text
stringlengths
8
4.13M
#[doc = "Reader of register COMP1_CSR"] pub type R = crate::R<u32, super::COMP1_CSR>; #[doc = "Writer for register COMP1_CSR"] pub type W = crate::W<u32, super::COMP1_CSR>; #[doc = "Register COMP1_CSR `reset()`'s with value 0"] impl crate::ResetValue for super::COMP1_CSR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `COMP1EN`"] pub type COMP1EN_R = crate::R<bool, bool>; #[doc = "Write proxy for field `COMP1EN`"] pub struct COMP1EN_W<'a> { w: &'a mut W, } impl<'a> COMP1EN_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Reader of field `COMP1_INP_DAC`"] pub type COMP1_INP_DAC_R = crate::R<bool, bool>; #[doc = "Write proxy for field `COMP1_INP_DAC`"] pub struct COMP1_INP_DAC_W<'a> { w: &'a mut W, } impl<'a> COMP1_INP_DAC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Reader of field `COMP1MODE`"] pub type COMP1MODE_R = crate::R<u8, u8>; #[doc = "Write proxy for field `COMP1MODE`"] pub struct COMP1MODE_W<'a> { w: &'a mut W, } impl<'a> COMP1MODE_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 2)) | (((value as u32) & 0x03) << 2); self.w } } #[doc = "Reader of field `COMP1INMSEL`"] pub type COMP1INMSEL_R = crate::R<u8, u8>; #[doc = "Write proxy for field `COMP1INMSEL`"] pub struct COMP1INMSEL_W<'a> { w: &'a mut W, } impl<'a> COMP1INMSEL_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 4)) | (((value as u32) & 0x07) << 4); self.w } } #[doc = "Reader of field `COMP1OUTSEL`"] pub type COMP1OUTSEL_R = crate::R<u8, u8>; #[doc = "Write proxy for field `COMP1OUTSEL`"] pub struct COMP1OUTSEL_W<'a> { w: &'a mut W, } impl<'a> COMP1OUTSEL_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0f << 10)) | (((value as u32) & 0x0f) << 10); self.w } } #[doc = "Reader of field `COMP1POL`"] pub type COMP1POL_R = crate::R<bool, bool>; #[doc = "Write proxy for field `COMP1POL`"] pub struct COMP1POL_W<'a> { w: &'a mut W, } impl<'a> COMP1POL_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 15)) | (((value as u32) & 0x01) << 15); self.w } } #[doc = "Reader of field `COMP1HYST`"] pub type COMP1HYST_R = crate::R<u8, u8>; #[doc = "Write proxy for field `COMP1HYST`"] pub struct COMP1HYST_W<'a> { w: &'a mut W, } impl<'a> COMP1HYST_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 16)) | (((value as u32) & 0x03) << 16); self.w } } #[doc = "Reader of field `COMP1_BLANKING`"] pub type COMP1_BLANKING_R = crate::R<u8, u8>; #[doc = "Write proxy for field `COMP1_BLANKING`"] pub struct COMP1_BLANKING_W<'a> { w: &'a mut W, } impl<'a> COMP1_BLANKING_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 18)) | (((value as u32) & 0x07) << 18); self.w } } #[doc = "Reader of field `COMP1OUT`"] pub type COMP1OUT_R = crate::R<bool, bool>; #[doc = "Reader of field `COMP1LOCK`"] pub type COMP1LOCK_R = crate::R<bool, bool>; #[doc = "Write proxy for field `COMP1LOCK`"] pub struct COMP1LOCK_W<'a> { w: &'a mut W, } impl<'a> COMP1LOCK_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31); self.w } } impl R { #[doc = "Bit 0 - Comparator 1 enable"] #[inline(always)] pub fn comp1en(&self) -> COMP1EN_R { COMP1EN_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - Comparator 1 non inverting input connection to DAC output"] #[inline(always)] pub fn comp1_inp_dac(&self) -> COMP1_INP_DAC_R { COMP1_INP_DAC_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bits 2:3 - Comparator 1 mode"] #[inline(always)] pub fn comp1mode(&self) -> COMP1MODE_R { COMP1MODE_R::new(((self.bits >> 2) & 0x03) as u8) } #[doc = "Bits 4:6 - Comparator 1 inverting input selection"] #[inline(always)] pub fn comp1inmsel(&self) -> COMP1INMSEL_R { COMP1INMSEL_R::new(((self.bits >> 4) & 0x07) as u8) } #[doc = "Bits 10:13 - Comparator 1 output selection"] #[inline(always)] pub fn comp1outsel(&self) -> COMP1OUTSEL_R { COMP1OUTSEL_R::new(((self.bits >> 10) & 0x0f) as u8) } #[doc = "Bit 15 - Comparator 1 output polarity"] #[inline(always)] pub fn comp1pol(&self) -> COMP1POL_R { COMP1POL_R::new(((self.bits >> 15) & 0x01) != 0) } #[doc = "Bits 16:17 - Comparator 1 hysteresis"] #[inline(always)] pub fn comp1hyst(&self) -> COMP1HYST_R { COMP1HYST_R::new(((self.bits >> 16) & 0x03) as u8) } #[doc = "Bits 18:20 - Comparator 1 blanking source"] #[inline(always)] pub fn comp1_blanking(&self) -> COMP1_BLANKING_R { COMP1_BLANKING_R::new(((self.bits >> 18) & 0x07) as u8) } #[doc = "Bit 30 - Comparator 1 output"] #[inline(always)] pub fn comp1out(&self) -> COMP1OUT_R { COMP1OUT_R::new(((self.bits >> 30) & 0x01) != 0) } #[doc = "Bit 31 - Comparator 1 lock"] #[inline(always)] pub fn comp1lock(&self) -> COMP1LOCK_R { COMP1LOCK_R::new(((self.bits >> 31) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - Comparator 1 enable"] #[inline(always)] pub fn comp1en(&mut self) -> COMP1EN_W { COMP1EN_W { w: self } } #[doc = "Bit 1 - Comparator 1 non inverting input connection to DAC output"] #[inline(always)] pub fn comp1_inp_dac(&mut self) -> COMP1_INP_DAC_W { COMP1_INP_DAC_W { w: self } } #[doc = "Bits 2:3 - Comparator 1 mode"] #[inline(always)] pub fn comp1mode(&mut self) -> COMP1MODE_W { COMP1MODE_W { w: self } } #[doc = "Bits 4:6 - Comparator 1 inverting input selection"] #[inline(always)] pub fn comp1inmsel(&mut self) -> COMP1INMSEL_W { COMP1INMSEL_W { w: self } } #[doc = "Bits 10:13 - Comparator 1 output selection"] #[inline(always)] pub fn comp1outsel(&mut self) -> COMP1OUTSEL_W { COMP1OUTSEL_W { w: self } } #[doc = "Bit 15 - Comparator 1 output polarity"] #[inline(always)] pub fn comp1pol(&mut self) -> COMP1POL_W { COMP1POL_W { w: self } } #[doc = "Bits 16:17 - Comparator 1 hysteresis"] #[inline(always)] pub fn comp1hyst(&mut self) -> COMP1HYST_W { COMP1HYST_W { w: self } } #[doc = "Bits 18:20 - Comparator 1 blanking source"] #[inline(always)] pub fn comp1_blanking(&mut self) -> COMP1_BLANKING_W { COMP1_BLANKING_W { w: self } } #[doc = "Bit 31 - Comparator 1 lock"] #[inline(always)] pub fn comp1lock(&mut self) -> COMP1LOCK_W { COMP1LOCK_W { w: self } } }
// Copyright 2021 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::collections::HashMap; use std::ops::Range; use std::sync::Arc; use common_exception::ErrorCode; use common_exception::Result; use common_expression::types::DataType; use common_expression::ConstantFolder; use common_expression::DataField; use common_expression::DataSchemaRef; use common_expression::Domain; use common_expression::Expr; use common_expression::FunctionContext; use common_expression::Scalar; use common_expression::TableSchemaRef; use common_functions::BUILTIN_FUNCTIONS; use storages_common_table_meta::meta::ClusterStatistics; use storages_common_table_meta::meta::ColumnStatistics; use crate::range_index::statistics_to_domain; #[derive(Clone)] pub struct PageIndex { expr: Expr<String>, column_refs: HashMap<String, DataType>, func_ctx: FunctionContext, cluster_key_id: u32, // index of the cluster key inside the schema cluster_key_fields: Vec<DataField>, } impl PageIndex { pub fn try_create( func_ctx: FunctionContext, cluster_key_id: u32, cluster_keys: Vec<String>, expr: &Expr<String>, schema: TableSchemaRef, ) -> Result<Self> { let data_schema: DataSchemaRef = Arc::new((&schema).into()); let cluster_key_fields = cluster_keys .iter() .map(|name| data_schema.field_with_name(name.as_str()).unwrap().clone()) .collect::<Vec<_>>(); Ok(Self { column_refs: expr.column_refs(), expr: expr.clone(), cluster_key_fields, cluster_key_id, func_ctx, }) } pub fn try_apply_const(&self) -> Result<bool> { // if the exprs did not contains the first cluster key, we should return true if self.cluster_key_fields.is_empty() || !self .column_refs .iter() .any(|c| self.cluster_key_fields.iter().any(|f| f.name() == c.0)) { return Ok(true); } // Only return false, which means to skip this block, when the expression is folded to a constant false. Ok(!matches!(self.expr, Expr::Constant { scalar: Scalar::Boolean(false), .. })) } #[tracing::instrument(level = "debug", name = "page_index_eval", skip_all)] pub fn apply(&self, stats: &Option<ClusterStatistics>) -> Result<(bool, Option<Range<usize>>)> { let stats = match stats { Some(stats) => stats, None => return Ok((true, None)), }; let min_values = match stats.pages { Some(ref pages) => pages, None => return Ok((true, None)), }; let max_value = Scalar::Tuple(stats.max.clone()); if self.cluster_key_id != stats.cluster_key_id { return Ok((true, None)); } let pages = min_values.len(); let mut start = 0; let mut end = pages - 1; while start <= end { let min_value = &min_values[start]; let max_value = if start + 1 < pages { &min_values[start + 1] } else { &max_value }; if self.eval_single_page(min_value, max_value)? { break; } start += 1; } while end >= start { let min_value = &min_values[end]; let max_value = if end + 1 < pages { &min_values[end + 1] } else { &max_value }; if self.eval_single_page(min_value, max_value)? { break; } end -= 1; } // no page is pruned if start + pages == end + 1 { return Ok((true, None)); } if start > end { Ok((false, None)) } else { Ok((true, Some(start..end + 1))) } } fn eval_single_page(&self, min_value: &Scalar, max_value: &Scalar) -> Result<bool> { let min_value = min_value .as_tuple() .ok_or_else(|| ErrorCode::StorageOther("cluster stats must be tuple scalar"))?; let max_value = max_value .as_tuple() .ok_or_else(|| ErrorCode::StorageOther("cluster stats must be tuple scalar"))?; let mut input_domains = HashMap::with_capacity(self.cluster_key_fields.len()); for (idx, (min, max)) in min_value.iter().zip(max_value.iter()).enumerate() { if self .column_refs .contains_key(self.cluster_key_fields[idx].name()) { let f = &self.cluster_key_fields[idx]; let stat = ColumnStatistics { min: min.clone(), max: max.clone(), null_count: 1, in_memory_size: 0, distinct_of_values: None, }; let domain = statistics_to_domain(vec![&stat], f.data_type()); input_domains.insert(f.name().clone(), domain); } // For Tuple scalars, if the first element is not equal, then the monotonically increasing property is broken. if min != max { break; } } if input_domains.is_empty() { return Ok(true); } // Fill missing stats to be full domain for (name, ty) in self.column_refs.iter() { if !input_domains.contains_key(name.as_str()) { input_domains.insert(name.clone(), Domain::full(ty)); } } let (new_expr, _) = ConstantFolder::fold_with_domain( &self.expr, input_domains, self.func_ctx, &BUILTIN_FUNCTIONS, ); // Only return false, which means to skip this block, when the expression is folded to a constant false. Ok(!matches!(new_expr, Expr::Constant { scalar: Scalar::Boolean(false), .. })) } }
use std::num::NonZeroU32; use std::time::Duration; use governor::{Quota, DirectRateLimiter}; use governor::clock::Clock; use log::debug; pub struct RateLimiter { limiters: Vec<DirectRateLimiter>, } impl RateLimiter { pub fn new() -> RateLimiter { RateLimiter { limiters: Vec::new(), } } pub fn with_limit(mut self, max_burst: u32, duration: Duration) -> RateLimiter { let max_burst = NonZeroU32::new(max_burst).unwrap(); let quota = Quota::new(max_burst, duration).unwrap(); self.limiters.push(DirectRateLimiter::new(quota)); self } // Please notice: naive implementation. // We iterate over limiters which makes us drift to the future which reduces accuracy. To make // this impact less noticeable limiters should be added in order of decreasing duration. pub fn wait(&self, name: &str) { let mut limited = false; for limiter in &self.limiters { while let Err(until) = limiter.check() { if !limited { debug!("Rate limiting {}...", name); limited = true; } std::thread::sleep(until.wait_time_from(limiter.get_clock().now())); } } } }
#[macro_use] extern crate log; extern crate serde; extern crate serde_json; #[macro_use] extern crate serde_derive; pub mod converters; pub mod hashes; pub mod random; pub mod ecc; pub mod types; pub mod net; pub mod db; pub mod mac; pub mod client; pub mod server;
//! Where miscellaneous traits reside. use std::{borrow::Borrow, ops::Deref}; #[allow(unused_imports)] use core_extensions::SelfOps; use crate::{ pointer_trait::{CanTransmuteElement, TransmuteElement}, sabi_types::{RMut, RRef}, std_types::{RSlice, RStr, RString, RVec}, }; /////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////// /// For cloning a reference-like type into a (preferably ffi-safe) owned type. pub trait IntoOwned: Copy + Deref { /// The owned equivalent of this type. type ROwned: Borrow<Self::Target>; /// Performs the colne. fn into_owned(self) -> Self::ROwned; } impl<T: Clone> IntoOwned for &T { type ROwned = T; fn into_owned(self) -> T { self.clone() } } impl IntoOwned for RStr<'_> { type ROwned = RString; fn into_owned(self) -> RString { self.into() } } impl<T: Clone> IntoOwned for RSlice<'_, T> { type ROwned = RVec<T>; fn into_owned(self) -> RVec<T> { self.to_rvec() } } /////////////////////////////////////////////////////////////////////////// /// Converts a `#[repr(Rust)]` type into its `#[repr(C)]` equivalent. /// /// `#[repr(Rust)]` is the default representation for data types. pub trait IntoReprC { /// The `#[repr(C)]` equivalent. type ReprC; /// Performs the conversion fn into_c(self) -> Self::ReprC; } /// Converts a `#[repr(C)]` type into its `#[repr(Rust)]` equivalent. /// /// `#[repr(Rust)]` is the default representation for data types. pub trait IntoReprRust { /// The `#[repr(Rust)]` equivalent. type ReprRust; /// Performs the conversion fn into_rust(self) -> Self::ReprRust; } /////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////// macro_rules! impl_from_rust_repr { ( $(#[$meta:meta])* impl$([ $($impl_header:tt)* ])? From<$from_ty:ty> for $into_ty:ty $( where [ $( $where_clause:tt )* ] )? { fn($this:pat) $function_contents:block } ) => ( $(#[$meta])* impl $(< $($impl_header)* >)? From<$from_ty> for $into_ty $(where $($where_clause)*)? { #[inline] fn from($this:$from_ty)->$into_ty{ $function_contents } } $(#[$meta])* impl $(< $($impl_header)* >)? $crate::traits::IntoReprC for $from_ty $(where $($where_clause)*)? { type ReprC=$into_ty; #[inline] fn into_c(self)->Self::ReprC{ self.into() } } ) } macro_rules! impl_into_rust_repr { ( $(#[$meta:meta])* impl$([ $($impl_header:tt)* ])? Into<$into_ty:ty> for $from_ty:ty $( where [ $( $where_clause:tt )* ] )? { fn($this:pat){ $($function_contents:tt)* } } ) => ( $(#[$meta])* impl $(< $($impl_header)* >)? From<$from_ty> for $into_ty $(where $($where_clause)*)? { #[inline] fn from($this: $from_ty) -> $into_ty{ $($function_contents)* } } $(#[$meta])* impl $(< $($impl_header)* >)? $crate::traits::IntoReprRust for $from_ty $(where $($where_clause)*)? { type ReprRust=$into_ty; #[inline] fn into_rust(self)->Self::ReprRust{ self.into() } } ) } /////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////// pub(crate) trait ErasedType<'a>: Sized { type Unerased; #[inline] unsafe fn from_unerased<P>(p: P) -> P::TransmutedPtr where P: CanTransmuteElement<Self, PtrTarget = Self::Unerased>, { unsafe { p.transmute_element::<Self>() } } #[inline] unsafe fn downcast_into<P>(p: P) -> P::TransmutedPtr where P: CanTransmuteElement<Self::Unerased, PtrTarget = Self>, { unsafe { p.transmute_element::<Self::Unerased>() } } #[inline] unsafe fn run_downcast_as<'b, F, R>(p: RRef<'b, Self>, func: F) -> R where Self::Unerased: 'b, F: FnOnce(&'b Self::Unerased) -> R, { unsafe { func(p.transmute_into_ref::<Self::Unerased>()) } } #[inline] unsafe fn run_downcast_as_mut<'b, F, R>(p: RMut<'b, Self>, func: F) -> R where Self::Unerased: 'b, F: FnOnce(&'b mut Self::Unerased) -> R, { unsafe { func(p.transmute_into_mut()) } } } /////////////////////////////////////////////////////////////////////////// /// Unwraps a type into its owned value. pub trait IntoInner { /// The type of the value this owns. type Element; /// Unwraps this type into its owned value. fn into_inner_(self) -> Self::Element; } ///////////////////////////////////////////////////////////////////////////
//! Tests auto-converted from "sass-spec/spec/parser/interpolate/44_selector/todo_single_escape" #[allow(unused)] use super::rsass; #[allow(unused)] use rsass::precision; // Ignoring "11_escaped_interpolated_value", tests with expected error not implemented yet. // Ignoring "21_escaped_interpolated_variable", tests with expected error not implemented yet. // Ignoring "31_escaped_literal", tests with expected error not implemented yet.
// Copyright 2019 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use { fuchsia_criterion::criterion::{black_box, Bencher, Criterion}, fuchsia_zircon as zx, }; pub fn benches(c: &mut Criterion) { c.bench_function("ticks_get", ticks_get); c.bench_function("monotonic_time", monotonic_time); } fn ticks_get(b: &mut Bencher) { b.iter(|| black_box(zx::ticks_get())); } fn monotonic_time(b: &mut Bencher) { b.iter(|| black_box(zx::Time::get(zx::ClockId::Monotonic))); }
#![feature(test)] extern crate test; extern crate password_maker_lib; use test::test::Bencher; use password_maker_lib::printout_password; #[cfg(not(debug_assertions))] #[bench] #[ignore] fn printout_password_bench(b: &mut Bencher) -> () { let length = 10000; let amount = 75000; b.iter(|| { printout_password(&length, &amount); }); }
use crate::gpu::GPU; use crate::memory_map::*; // abstract memory into its logical parts instead of one big array // currently do not have an implementation for echo ram pub struct MemoryBus { bios: [u8; 0xff], rom_bank0: [u8; 0x4000], rom_bankn: [u8; 0x4000], // vram: [u8; 0x2000], eram: [u8; 0x2000], wram: [u8; 0x2000], // oam: [u8; 0xa0], hram: [u8; 0x7f], // memory: [u8; 0xffff], gpu: GPU, } impl MemoryBus { pub fn new(rom: Vec<u8>, game: Vec<u8>) -> MemoryBus { let mut bios = [0; 0xff]; for (i, &byte) in rom.iter().enumerate() { bios[i] = byte; } // load the game into memory let mut bank0 = [0; 0x4000]; let mut bankn = [0; 0x4000]; for i in 0..0x4000 { bank0[i] = game[i]; bankn[i] = game[i + 0x4000]; } /* let mut bankn = [0; 0x4000]; for i in 0..0x4000 { bankn[i] = game[i + 0x4000]; } */ MemoryBus { bios: bios, rom_bank0: bank0, rom_bankn: bankn, // vram: [0; 0x2000], eram: [0; 0x2000], wram: [0; 0x2000], // oam: [0; 0xa0], hram: [0; 0x7f], gpu: GPU::new() } } // broken pub fn read_byte(&self, address: u16) -> u8 { match address { BIOS_START..=BIOS_END => self.bios[address as usize], ROM_BANK_0_START..=ROM_BANK_0_END => self.rom_bank0[address as usize], ROM_BANK_N_START..=ROM_BANK_N_END => self.rom_bankn[(address - ROM_BANK_N_START) as usize], VRAM_START..=VRAM_END => self.gpu.vram[(address - VRAM_START) as usize], ERAM_START..=ERAM_END => self.eram[(address - ERAM_START) as usize], WRAM_START..=WRAM_END => self.wram[(address - WRAM_START) as usize], ECHO_START..=ECHO_END => self.wram[(address - ECHO_START) as usize], OAM_START..=OAM_END => self.gpu.vram[(address - OAM_START) as usize], IO_START..=IO_END => self.read_io_register(address), HRAM_START..=HRAM_END => self.hram[(address - HRAM_START) as usize], _ => panic!("Cannot access address 0x{}", address) } } // broken pub fn set_byte(&mut self, address: u16, new_byte: u8) { match address { BIOS_START..=BIOS_END => self.bios[address as usize] = new_byte, ROM_BANK_0_START..=ROM_BANK_0_END => self.rom_bank0[address as usize] = new_byte, ROM_BANK_N_START..=ROM_BANK_N_END => self.rom_bankn[(address - ROM_BANK_N_START) as usize] = new_byte, // VRAM_START..=VRAM_END => self.gpu.vram[(address - VRAM_START) as usize] = new_byte, VRAM_START..=VRAM_END => self.gpu.set_vram(address - VRAM_START, new_byte), ERAM_START..=ERAM_END => self.eram[(address - ERAM_START) as usize] = new_byte, WRAM_START..=WRAM_END => self.wram[(address - WRAM_START) as usize] = new_byte, ECHO_START..=ECHO_END => self.wram[(address - ECHO_START) as usize] = new_byte, // OAM_START..=OAM_END => self.gpu.oam[(address - OAM_START) as usize] = new_byte, OAM_START..=OAM_END => self.gpu.set_oam(address - VRAM_START, new_byte), IO_START..=IO_END => self.write_io_register(address, new_byte), HRAM_START..=HRAM_END => self.hram[(address - HRAM_START) as usize] = new_byte, _ => panic!("Cannot access address 0x{}", address) }; } // TODO fn read_io_register(&self, address: u16) -> u8 { 0 } // TODO fn write_io_register(&mut self, address: u16, new_byte: u8) { } }
use crate::Event; use snafu::Snafu; pub mod util; #[cfg(feature = "transforms-add_fields")] pub mod add_fields; #[cfg(feature = "transforms-add_tags")] pub mod add_tags; #[cfg(feature = "transforms-ansi_stripper")] pub mod ansi_stripper; #[cfg(feature = "transforms-aws_ec2_metadata")] pub mod aws_ec2_metadata; #[cfg(feature = "transforms-coercer")] pub mod coercer; #[cfg(feature = "transforms-concat")] pub mod concat; #[cfg(feature = "transforms-dedupe")] pub mod dedupe; #[cfg(feature = "transforms-field_filter")] pub mod field_filter; #[cfg(feature = "transforms-filter")] pub mod filter; #[cfg(feature = "transforms-geoip")] pub mod geoip; #[cfg(feature = "transforms-grok_parser")] pub mod grok_parser; #[cfg(feature = "transforms-json_parser")] pub mod json_parser; #[cfg(feature = "transforms-log_to_metric")] pub mod log_to_metric; #[cfg(feature = "transforms-logfmt_parser")] pub mod logfmt_parser; #[cfg(feature = "transforms-lua")] pub mod lua; #[cfg(feature = "transforms-merge")] pub mod merge; #[cfg(feature = "transforms-reduce")] pub mod reduce; #[cfg(feature = "transforms-regex_parser")] pub mod regex_parser; #[cfg(feature = "transforms-remove_fields")] pub mod remove_fields; #[cfg(feature = "transforms-remove_tags")] pub mod remove_tags; #[cfg(feature = "transforms-rename_fields")] pub mod rename_fields; #[cfg(feature = "transforms-sampler")] pub mod sampler; #[cfg(feature = "transforms-split")] pub mod split; #[cfg(feature = "transforms-swimlanes")] pub mod swimlanes; #[cfg(feature = "transforms-tag_cardinality_limit")] pub mod tag_cardinality_limit; #[cfg(feature = "transforms-tokenizer")] pub mod tokenizer; #[cfg(feature = "wasm")] pub mod wasm; use futures01::Stream; pub trait Transform: Send { fn transform(&mut self, event: Event) -> Option<Event>; fn transform_into(&mut self, output: &mut Vec<Event>, event: Event) { if let Some(transformed) = self.transform(event) { output.push(transformed); } } fn transform_stream( self: Box<Self>, input_rx: Box<dyn Stream<Item = Event, Error = ()> + Send>, ) -> Box<dyn Stream<Item = Event, Error = ()> + Send> where Self: 'static, { let mut me = self; Box::new( input_rx .map(move |event| { let mut output = Vec::with_capacity(1); me.transform_into(&mut output, event); futures01::stream::iter_ok(output.into_iter()) }) .flatten(), ) } } #[derive(Debug, Snafu)] enum BuildError { #[snafu(display("Invalid regular expression: {}", source))] InvalidRegex { source: regex::Error }, #[snafu(display("Invalid substring expression: {}", name))] InvalidSubstring { name: String }, }
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // A test where we (successfully) close over a reference into // an object. #![feature(box_syntax)] trait SomeTrait { fn get(&self) -> isize; } impl<'a> SomeTrait for &'a isize { fn get(&self) -> isize { **self } } fn make_object<'a,A:SomeTrait+'a>(v: A) -> Box<SomeTrait+'a> { box v as Box<SomeTrait+'a> } fn main() { let i: isize = 22; let obj = make_object(&i); assert_eq!(22, obj.get()); }
macro_rules! declare_enabled_traits { ( auto_traits[ $(($auto_trait:ident, $auto_trait_query:ident, $auto_trait_path:path)),* $(,)* ] regular_traits[ $(($regular_trait:ident, $regular_trait_query:ident, $regular_trait_path:path)),* $(,)* ] ) => ( use crate::{ abi_stability::extra_checks::{ TypeCheckerMut,ExtraChecks, ForExtraChecksImplementor,ExtraChecksError, }, type_layout::TypeLayout, std_types::{RCowSlice,RResult}, }; use core_extensions::strings::StringExt; #[allow(non_upper_case_globals)] mod auto_trait_mask{ #[repr(u32)] enum __Index { $($auto_trait,)* } $(pub(super) const $auto_trait: u16 = 1u16 << __Index::$auto_trait as u32;)* } #[allow(non_upper_case_globals)] mod regular_trait_mask{ #[repr(u32)] enum __Index { $($regular_trait,)* } $(pub(super) const $regular_trait: u64 = 1u64 << __Index::$regular_trait as u32;)* } /// Describes which traits are required and enabled by the `I: `[`InterfaceType`] /// that this `RequiredTraits` is created from. /// /// # Purpose /// /// This is what [`DynTrait`] uses to check that the traits it /// requires are compatible between library versions, /// by using this type in /// [`#[sabi(extra_checks = <here>)]`](derive@crate::StableAbi#sabi_extra_checks_attr). /// /// This type requires that auto traits are the same across versions. /// Non-auto traits can be added in newer versions of a library. #[repr(C)] #[derive(Copy,Clone,StableAbi)] pub struct RequiredTraits{ auto_traits:u16, regular_traits:u64, } impl RequiredTraits { /// Constructs an RequiredTraits. pub const fn new<I: InterfaceType>() -> Self { use crate::type_level::impl_enum::Implementability; RequiredTraits { auto_traits: $( if <I::$auto_trait as Implementability>::IS_IMPLD { auto_trait_mask::$auto_trait } else { 0 } )|*, regular_traits: $( if <I::$regular_trait as Implementability>::IS_IMPLD { regular_trait_mask::$regular_trait } else { 0 } )|* } } $( #[doc = concat!( "Whether the [`", stringify!($auto_trait_path), "`] trait is required", )] pub const fn $auto_trait_query(self) -> bool { (self.auto_traits & auto_trait_mask::$auto_trait) != 0 } )* $( #[doc = concat!( "Whether the [`", stringify!($regular_trait_path), "`] trait is required", )] pub const fn $regular_trait_query(self) -> bool { (self.regular_traits & regular_trait_mask::$regular_trait) != 0 } )* } impl Debug for RequiredTraits{ fn fmt(&self,f:&mut fmt::Formatter<'_>)->fmt::Result{ use self::debug_impl_details::{EnabledAutoTraits,EnabledRegularTraits}; f.debug_struct("RequiredTraits") .field("auto_traits_bits",&self.auto_traits) .field("auto_traits",&EnabledAutoTraits{traits:self.auto_traits}) .field("regular_traits_bits",&self.regular_traits) .field("regular_traits",&EnabledRegularTraits{traits:self.regular_traits}) .finish() } } impl Display for RequiredTraits{ fn fmt(&self,f:&mut fmt::Formatter<'_>)->fmt::Result{ f.write_str("RequiredTraits\n")?; f.write_str("Auto traits:")?; if self.auto_traits==0 { f.write_str("<no_traits>")?; }else{ $( if (self.auto_traits&auto_trait_mask::$auto_trait)!=0 { f.write_str(concat!(" ",stringify!($auto_trait)))?; } )* } writeln!(f,)?; f.write_str("Impld traits:")?; if self.regular_traits==0 { f.write_str("<no_traits>")?; }else{ $( if (self.regular_traits&regular_trait_mask::$regular_trait)!=0 { f.write_str(concat!(" ",stringify!($regular_trait)))?; } )* } writeln!(f,)?; Ok(()) } } unsafe impl ExtraChecks for RequiredTraits { fn type_layout(&self)->&'static TypeLayout{ <Self as StableAbi>::LAYOUT } fn check_compatibility( &self, _layout_containing_self:&'static TypeLayout, layout_containing_other:&'static TypeLayout, checker:TypeCheckerMut<'_>, )->RResult<(), ExtraChecksError> { Self::downcast_with_layout(layout_containing_other,checker,|other,_|{ if self.auto_traits!=other.auto_traits { Err(ImpldTraitsError{ kind:ImpldTraitsErrorKind::MismatchedAutoTraits, expected:self.clone(), found:other.clone(), }) }else if (self.regular_traits&other.regular_traits)!=self.regular_traits { Err(ImpldTraitsError{ kind:ImpldTraitsErrorKind::UnimpldTraits, expected:self.clone(), found:other.clone(), }) }else{ Ok(()) } }) } fn nested_type_layouts(&self)->RCowSlice<'_, &'static TypeLayout>{ RCowSlice::from_slice(&[]) } } mod debug_impl_details{ use super::*; pub(super) struct EnabledAutoTraits{ pub(super) traits:u16, } impl Debug for EnabledAutoTraits{ fn fmt(&self,f:&mut fmt::Formatter<'_>)->fmt::Result{ let mut ds=f.debug_set(); $( if (self.traits&auto_trait_mask::$auto_trait)!=0 { ds.entry(&stringify!( $auto_trait )); } )* ds.finish() } } pub(super) struct EnabledRegularTraits{ pub(super) traits:u64, } impl Debug for EnabledRegularTraits{ fn fmt(&self,f:&mut fmt::Formatter<'_>)->fmt::Result{ let mut ds=f.debug_set(); $( if (self.traits&regular_trait_mask::$regular_trait)!=0 { ds.entry(&stringify!( $regular_trait )); } )* ds.finish() } } } //////////////////////////////////////////////////////////////////////// #[derive(Debug,Clone)] pub struct ImpldTraitsError{ kind:ImpldTraitsErrorKind, expected:RequiredTraits, found:RequiredTraits, } #[derive(Debug,Clone)] pub enum ImpldTraitsErrorKind{ MismatchedAutoTraits, UnimpldTraits, } impl Display for ImpldTraitsError{ fn fmt(&self,f:&mut fmt::Formatter<'_>)->fmt::Result{ let msg=match self.kind { ImpldTraitsErrorKind::MismatchedAutoTraits=> "Expected auto traits to be exactly the same", ImpldTraitsErrorKind::UnimpldTraits=> "`Expected` does not contain a subset of the traits in`Found`", }; f.write_str(msg)?; writeln!(f,)?; writeln!(f,"Expected:\n{}",self.expected.to_string().left_padder(4))?; writeln!(f,"Found:\n{}",self.found.to_string().left_padder(4))?; Ok(()) } } impl std::error::Error for ImpldTraitsError{} ) }
use std::collections::HashMap; use std::fs; use std::path::{Path, PathBuf}; use std::sync::RwLock; use directories::ProjectDirs; pub const CLIENT_ID: &str = "d420a117a32841c2b3474932e49fb54b"; #[derive(Clone, Serialize, Deserialize, Debug, Default)] pub struct Config { pub default_keybindings: Option<bool>, pub keybindings: Option<HashMap<String, String>>, pub theme: Option<ConfigTheme>, pub use_nerdfont: Option<bool>, pub saved_state: Option<SavedState>, pub audio_cache: Option<bool>, pub backend: Option<String>, pub backend_device: Option<String>, pub volnorm: Option<bool>, pub volnorm_pregain: Option<f32>, pub notify: Option<bool>, } #[derive(Serialize, Deserialize, Debug, Default, Clone)] pub struct SavedState { pub volume: Option<u8>, pub shuffle: Option<bool>, pub repeat: Option<String>, } #[derive(Serialize, Deserialize, Debug, Default, Clone)] pub struct ConfigTheme { pub background: Option<String>, pub primary: Option<String>, pub secondary: Option<String>, pub title: Option<String>, pub playing: Option<String>, pub playing_selected: Option<String>, pub playing_bg: Option<String>, pub highlight: Option<String>, pub highlight_bg: Option<String>, pub error: Option<String>, pub error_bg: Option<String>, pub statusbar_progress: Option<String>, pub statusbar_progress_bg: Option<String>, pub statusbar: Option<String>, pub statusbar_bg: Option<String>, pub cmdline: Option<String>, pub cmdline_bg: Option<String>, } lazy_static! { pub static ref BASE_PATH: RwLock<Option<PathBuf>> = RwLock::new(None); } pub fn load() -> Result<Config, String> { let path = config_path("config.toml"); load_or_generate_default(path, |_| Ok(Config::default()), false) } fn proj_dirs() -> ProjectDirs { match *BASE_PATH.read().expect("can't readlock BASE_PATH") { Some(ref basepath) => ProjectDirs::from_path(basepath.clone()).expect("invalid basepath"), None => { ProjectDirs::from("org", "affekt", "ncspot").expect("can't determine project paths") } } } pub fn config_path(file: &str) -> PathBuf { let proj_dirs = proj_dirs(); let cfg_dir = proj_dirs.config_dir(); trace!("{:?}", cfg_dir); if cfg_dir.exists() && !cfg_dir.is_dir() { fs::remove_file(cfg_dir).expect("unable to remove old config file"); } if !cfg_dir.exists() { fs::create_dir_all(cfg_dir).expect("can't create config folder"); } let mut cfg = cfg_dir.to_path_buf(); cfg.push(file); cfg } pub fn cache_path(file: &str) -> PathBuf { let proj_dirs = proj_dirs(); let cache_dir = proj_dirs.cache_dir(); if !cache_dir.exists() { fs::create_dir_all(cache_dir).expect("can't create cache folder"); } let mut pb = cache_dir.to_path_buf(); pb.push(file); pb } /// Configuration and credential file helper /// Creates a default configuration if none exist, otherwise will optionally overwrite /// the file if it fails to parse pub fn load_or_generate_default< P: AsRef<Path>, T: serde::Serialize + serde::de::DeserializeOwned, F: Fn(&Path) -> Result<T, String>, >( path: P, default: F, default_on_parse_failure: bool, ) -> Result<T, String> { let path = path.as_ref(); // Nothing exists so just write the default and return it if !path.exists() { let value = default(&path)?; return write_content_helper(&path, value); } // load the serialized content. Always report this failure let contents = std::fs::read_to_string(&path) .map_err(|e| format!("Unable to read {}: {}", path.to_string_lossy(), e))?; // Deserialize the content, optionally fall back to default if it fails let result = toml::from_str(&contents); if default_on_parse_failure && result.is_err() { let value = default(&path)?; return write_content_helper(&path, value); } result.map_err(|e| format!("Unable to parse {}: {}", path.to_string_lossy(), e)) } fn write_content_helper<P: AsRef<Path>, T: serde::Serialize>( path: P, value: T, ) -> Result<T, String> { let content = toml::to_string_pretty(&value).map_err(|e| format!("Failed serializing value: {}", e))?; fs::write(path.as_ref(), content) .map(|_| value) .map_err(|e| { format!( "Failed writing content to {}: {}", path.as_ref().display(), e ) }) }
#[macro_use] extern crate bencher; extern crate svgdom; use std::fs; use std::env; use std::io::Read; use bencher::Bencher; use svgdom::{Document, WriteBuffer}; const TEN_MIB: usize = 10 * 1024 * 1024; fn load_file(path: &str) -> String { let path = env::current_dir().unwrap().join(path); let mut file = fs::File::open(&path).unwrap(); let mut text = String::new(); file.read_to_string(&mut text).unwrap(); text } macro_rules! do_parse { ($name:ident, $path:expr) => ( fn $name(bencher: &mut Bencher) { let text = load_file($path); bencher.iter(|| { let _ = Document::from_str(&text).unwrap(); }) } ) } do_parse!(parse_small, "benches/small.svg"); do_parse!(parse_medium, "benches/medium.svg"); do_parse!(parse_large, "benches/large.svg"); macro_rules! do_write { ($name:ident, $path:expr) => ( fn $name(bencher: &mut Bencher) { let text = load_file($path); let doc = Document::from_str(&text).unwrap(); let mut ouput_data = Vec::with_capacity(TEN_MIB); bencher.iter(|| { doc.write_buf(&mut ouput_data); ouput_data.clear(); }) } ) } do_write!(write_small, "benches/small.svg"); do_write!(write_medium, "benches/medium.svg"); do_write!(write_large, "benches/large.svg"); benchmark_group!(benches1, parse_small, parse_medium, parse_large); benchmark_group!(benches2, write_small, write_medium, write_large); benchmark_main!(benches1, benches2);
use std::path::PathBuf; use crate::fs::LllDirList; use crate::history::{DirectoryHistory, LllHistory}; use crate::sort; use crate::ui; use crate::window::{LllPanel, LllView}; use crate::LllConfig; use crate::THEME_T; pub struct LllTab { pub history: LllHistory, pub curr_path: PathBuf, pub curr_list: LllDirList, } impl LllTab { pub fn new(curr_path: PathBuf, sort_option: &sort::SortOption) -> std::io::Result<Self> { let mut history = LllHistory::new(); history.populate_to_root(&curr_path, sort_option)?; let curr_list = history.pop_or_create(&curr_path, sort_option)?; let tab = LllTab { curr_path, history, curr_list, }; Ok(tab) } pub fn refresh(&mut self, views: &LllView, config_t: &LllConfig) { self.refresh_curr(&views.mid_win, config_t); self.refresh_path_status(&views.top_win); self.refresh_file_status(&views.bot_win); } pub fn refresh_curr(&mut self, win: &LllPanel, config_t: &LllConfig) { ui::display_contents( win, &mut self.curr_list, config_t, &ui::PRIMARY_DISPLAY_OPTION, ); } pub fn refresh_file_status(&self, win: &LllPanel) { ncurses::werase(win.win); ncurses::wmove(win.win, 0, 0); if let Some(index) = self.curr_list.index { let entry = &self.curr_list.contents[index]; ui::wprint_file_status(win, entry); } } pub fn refresh_path_status(&self, win: &LllPanel) { let path_str: &str = self.curr_path.to_str().unwrap(); ncurses::werase(win.win); ncurses::wattron(win.win, ncurses::COLOR_PAIR(THEME_T.directory.colorpair)); ncurses::waddstr(win.win, path_str); ncurses::waddstr(win.win, "/"); ncurses::wattroff(win.win, ncurses::COLOR_PAIR(THEME_T.directory.colorpair)); win.queue_for_refresh(); } }
#[cfg(all(not(target_arch = "wasm32"), test))] mod test; use liblumen_alloc::erts::exception; use liblumen_alloc::erts::term::prelude::Term; /// `orelse/2` infix operator. /// /// Short-circuiting, but doesn't enforce `right` is boolean. If you need to enforce `boolean` for /// both operands, use `or_2`. #[native_implemented::function(erlang:orelse/2)] pub fn result(left_boolean: Term, right_term: Term) -> exception::Result<Term> { let left_bool: bool = term_try_into_bool!(left_boolean)?; if left_bool { // always `true.into()`, but this is faster Ok(left_boolean) } else { Ok(right_term) } }
use crate::types::{ Cipher, DecryptedShare, NrOfShuffles, PublicParameters, TopicId, TopicResult, VoteId, Wrapper, }; use crate::{ helpers::params::get_public_params, Ciphers, DecryptedShares, Error, Sealers, Tally, Trait, }; use crypto::encryption::ElGamal; use crypto::types::Cipher as BigCipher; use frame_support::{ ensure, storage::{StorageDoubleMap, StorageMap, StorageValue}, }; use num_bigint::BigUint; use num_traits::One; use sp_std::{collections::btree_map::BTreeMap, vec::Vec}; pub fn combine_shares_and_tally_topic<T: Trait>( vote_id: &VoteId, topic_id: &TopicId, encoded: bool, nr_of_shuffles: &NrOfShuffles, ) -> Result<TopicResult, Error<T>> { // get the public parameters and the system public key let params: PublicParameters = get_public_params::<T>(vote_id)?; let big_p: BigUint = BigUint::from_bytes_be(&params.p); let big_g: BigUint = BigUint::from_bytes_be(&params.g); // get all encrypted votes (ciphers) // for the topic with id: topic_id and the # of shuffles (nr_of_shuffles) let ciphers: Vec<Cipher> = Ciphers::get(topic_id, nr_of_shuffles); // type conversion: Vec<Cipher> (Vec<Vec<u8>>) to Vec<BigCipher> (Vec<BigUint>) let big_ciphers: Vec<BigCipher> = Wrapper(ciphers).into(); // retrieve the decrypted shares of all sealers let sealers: Vec<T::AccountId> = Sealers::<T>::get(); let mut partial_decryptions: Vec<Vec<BigUint>> = Vec::with_capacity(sealers.len()); for sealer in sealers.iter() { // get the partial decryptions of each sealer let shares: Vec<DecryptedShare> = DecryptedShares::<T>::get::<&TopicId, &T::AccountId>(topic_id, &sealer); // make sure that each sealer has submitted his decrypted shares ensure!(!shares.is_empty(), Error::<T>::NotEnoughDecryptedShares); // type conversion: DecryptedShare (Vec<u8>) to BigUint let big_shares: Vec<BigUint> = shares .iter() .map(|s| BigUint::from_bytes_be(s)) .collect::<Vec<BigUint>>(); partial_decryptions.push(big_shares); } // combine all partial decryptions by all sealers let combined_partial_decryptions = ElGamal::combine_partial_decrypted_as(partial_decryptions, &big_p); // retrieve the plaintext votes // by combining the decrypted components a with their decrypted components b let iterator = big_ciphers.iter().zip(combined_partial_decryptions.iter()); let mut plaintexts = iterator .map(|(cipher, decrypted_a)| { ElGamal::partial_decrypt_b(&cipher.b, decrypted_a, &big_p) }) .collect::<Vec<BigUint>>(); // if the votes were encoded, we need to decoded them (brute force dlog) if encoded { plaintexts = plaintexts .iter() .map(|encoded| ElGamal::decode_message(encoded, &big_g, &big_p)) .collect::<Vec<BigUint>>(); } // get the tally for the vote with topic id: topic_id let tally: Option<TopicResult> = Tally::get::<&TopicId>(topic_id); // check that topic has not been tallied yet ensure!(tally.is_none(), Error::<T>::TopicHasAlreadyBeenTallied); // count the number of votes per voting option // store result as a map -> key: voting option, value: count let one = BigUint::one(); let mut big_results: BTreeMap<BigUint, BigUint> = BTreeMap::new(); plaintexts .into_iter() .for_each(|item| *big_results.entry(item).or_default() += &one); // type conversion: BTreeMap<BigUint, BigUint> to BTreeMap<Vec<u8>, Vec<u8>> // to be able to store the results on chain let mut results: TopicResult = BTreeMap::new(); for (key, value) in big_results.iter() { results.insert(key.to_bytes_be(), value.to_bytes_be()); } // store the results on chain Tally::insert::<&TopicId, TopicResult>(topic_id, results.clone()); Ok(results) }
// Copyright (c) 2014 Michael Woerister // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. use std::hash::Hash; use sync::Arc; use PersistentMap; use item_store::{ItemStore, CopyStore, ShareStore}; #[deriving(Clone, Eq, PartialEq)] enum Color { NegativeBlack = 0, Red = 1, Black = 2, DoubleBlack = 3 } impl Color { fn inc(&self) -> Color { match *self { NegativeBlack => Red, Red => Black, Black => DoubleBlack, DoubleBlack => fail!("Can't inc DoubleBlack") } } fn dec(&self) -> Color { match *self { NegativeBlack => fail!("Can't dec NegativeBlack"), Red => NegativeBlack, Black => Red, DoubleBlack => Black } } } struct NodeData<K, V, IS> { left: NodeRef<K, V, IS>, item: IS, right: NodeRef<K, V, IS>, } struct NodeRef<K, V, IS> { col: Color, data: Option<Arc<NodeData<K, V, IS>>> } impl<K, V, IS: ItemStore<K, V>> Clone for NodeRef<K, V, IS> { fn clone(&self) -> NodeRef<K, V, IS> { NodeRef { col: self.col, data: self.data.clone() } } } fn new_node<K: Ord+Clone+Send+Sync, V: Clone+Send+Sync, IS: ItemStore<K, V>>( color: Color, left: NodeRef<K, V, IS>, item: IS, right: NodeRef<K, V, IS>) -> NodeRef<K, V, IS> { let node = NodeRef { col: color, data: Some( Arc::new( NodeData { left: left, item: item, right: right } ) ) }; assert!(!node.is_leaf()); return node; } fn new_leaf<K: Ord+Clone+Send+Sync, V: Clone+Send+Sync, IS: ItemStore<K, V>>( color: Color) -> NodeRef<K, V, IS> { assert!(color == Black || color == DoubleBlack); let leaf = NodeRef { col: color, data: None }; assert!(leaf.is_leaf()); return leaf; } impl<K: Ord+Clone+Send+Sync, V: Clone+Send+Sync, IS: ItemStore<K, V>> NodeRef<K, V, IS> { fn is_leaf(&self) -> bool { self.data.is_none() } fn get_data<'a>(&'a self) -> &'a NodeData<K, V, IS> { match self.data { Some(ref data_ref) => data_ref.deref(), None => unreachable!() } } fn redden(self) -> NodeRef<K, V, IS> { assert!(!self.is_leaf()); NodeRef { col: Red, data: self.data } } fn blacken(self) -> NodeRef<K, V, IS> { NodeRef { col: Black, data: self.data } } fn inc(mut self) -> NodeRef<K, V, IS> { self.col = self.col.inc(); self } fn dec(mut self) -> NodeRef<K, V, IS> { self.col = self.col.dec(); self } fn find<'a>(&'a self, search_key: &K) -> Option<&'a V> { match self.data { Some(ref data_ref) => { let data_ref = data_ref.deref(); if *search_key < *data_ref.item.key() { data_ref.left.find(search_key) } else if *search_key > *data_ref.item.key() { data_ref.right.find(search_key) } else { Some(data_ref.item.val()) } } None => None } } // Calculates the max black nodes on path: // fn count_black_height(&self, combine: |u64, u64| -> u64) -> u64 { // assert!(self.col == Red || self.col == Black); // match self.data { // Some(ref data_ref) => { // let data_ref = data_ref.get(); // let this = if self.col == Black { 1 } else { 0 }; // let sub = combine(data_ref.left.count_black_height(|a, b| combine(a, b)), // data_ref.right.count_black_height(|a, b| combine(a, b))); // this + sub // } // None => { 1 } // } // } // Does this tree contain a red child of red? // fn no_red_red(&self) -> bool { // assert!(self.col == Red || self.col == Black); // if !self.is_leaf() { // let (l, r) = self.children(); // assert!(l.col == Red || l.col == Black); // assert!(r.col == Red || r.col == Black); // if self.col == Black { // return l.no_red_red() && r.no_red_red(); // } // if self.col == Red && l.col == Black && r.col == Black { // return l.no_red_red() && r.no_red_red(); // } // return false; // } else { // return true; // } // } // Is this tree black-balanced? // fn black_balanced(&self) -> bool { // self.count_black_height(::std::num::max) == self.count_black_height(::std::num::min) // } // Returns the maxium (key . value) pair: fn find_max_kvp<'a>(&'a self) -> &'a IS { assert!(!self.is_leaf()); let node = self.get_data(); if node.right.is_leaf() { &node.item } else { node.right.find_max_kvp() } } fn modify_at(&self, kvp: IS, insertion_count: &mut uint) -> NodeRef<K, V, IS> { self.modify_at_rec(kvp, insertion_count).blacken() } fn modify_at_rec(&self, kvp: IS, insertion_count: &mut uint) -> NodeRef<K, V, IS> { if self.is_leaf() { assert!(self.col == Black); *insertion_count = 1; new_node(Red, new_leaf(Black), kvp, new_leaf(Black)) } else { let node_data = self.get_data(); let node_color = self.col; if *kvp.key() < *node_data.item.key() { new_node(node_color, node_data.left.modify_at_rec(kvp, insertion_count), node_data.item.clone(), node_data.right.clone()) .balance() } else if *kvp.key() > *node_data.item.key() { new_node(node_color, node_data.left.clone(), node_data.item.clone(), node_data.right.modify_at_rec(kvp, insertion_count)) .balance() } else { *insertion_count = 0; new_node(node_color, node_data.left.clone(), kvp, node_data.right.clone()) } } } // WAT! fn balance(self) -> NodeRef<K, V, IS> { assert!(!self.is_leaf()); { let root_data = self.get_data(); let left_child = &root_data.left; let right_child = &root_data.right; if self.col == Black || self.col == DoubleBlack { let result_col = self.col.dec(); if left_child.col == Red { assert!(!left_child.is_leaf()); let left_child_data = left_child.get_data(); let left_grand_child = &left_child_data.left; let right_grand_child = &left_child_data.right; if left_grand_child.col == Red { assert!(!left_grand_child.is_leaf()); let left_grand_child_data = left_grand_child.get_data(); return new_node(result_col, new_node(Black, left_grand_child_data.left.clone(), left_grand_child_data.item.clone(), left_grand_child_data.right.clone()), left_child.get_data().item.clone(), new_node(Black, right_grand_child.clone(), root_data.item.clone(), right_child.clone()) ); } if right_grand_child.col == Red { assert!(!right_grand_child.is_leaf()); let right_grand_child_data = right_grand_child.get_data(); return new_node(result_col, new_node(Black, left_grand_child.clone(), left_child.get_data().item.clone(), right_grand_child_data.left.clone()), right_grand_child_data.item.clone(), new_node(Black, right_grand_child_data.right.clone(), root_data.item.clone(), right_child.clone())); } } if right_child.col == Red { assert!(!right_child.is_leaf()); let right_child_data = right_child.get_data(); let left_grand_child = &right_child_data.left; let right_grand_child = &right_child_data.right; if left_grand_child.col == Red { assert!(!left_grand_child.is_leaf()); let left_grand_child_data = left_grand_child.get_data(); return new_node(result_col, new_node(Black, left_child.clone(), root_data.item.clone(), left_grand_child_data.left.clone()), left_grand_child_data.item.clone(), new_node(Black, left_grand_child_data.right.clone(), right_child_data.item.clone(), right_grand_child.clone())); } if right_grand_child.col == Red { assert!(!right_grand_child.is_leaf()); let right_grand_child_data = right_grand_child.get_data(); return new_node(result_col, new_node(Black, left_child.clone(), root_data.item.clone(), left_grand_child.clone()), right_child_data.item.clone(), new_node(Black, right_grand_child_data.left.clone(), right_grand_child_data.item.clone(), right_grand_child_data.right.clone())); } } } if self.col == DoubleBlack { if right_child.col == NegativeBlack { assert!(!right_child.is_leaf()); let right_child_data = right_child.get_data(); let left_grand_child = &right_child_data.left; let right_grand_child = &right_child_data.right; if !left_grand_child.is_leaf() && left_grand_child.col == Black && right_grand_child.col == Black { let left_grand_child_data = left_grand_child.get_data(); return new_node(Black, new_node(Black, left_child.clone(), root_data.item.clone(), left_grand_child_data.left.clone()), left_grand_child_data.item.clone(), new_node(Black, left_grand_child_data.right.clone(), right_child_data.item.clone(), right_grand_child.clone().redden()).balance() ); } } if left_child.col == NegativeBlack { assert!(!left_child.is_leaf()); let left_child_data = left_child.get_data(); let left_grand_child = &left_child_data.left; let right_grand_child = &left_child_data.right; if left_grand_child.col == Black && !right_grand_child.is_leaf() && right_grand_child.col == Black { let right_grand_child_data = right_grand_child.get_data(); return new_node(Black, new_node(Black, left_grand_child.clone().redden(), left_child_data.item.clone(), right_grand_child_data.left.clone()).balance(), right_grand_child_data.item.clone(), new_node(Black, right_grand_child_data.right.clone(), root_data.item.clone(), right_child.clone())); } } } } return self; } // Deletes a key from this map fn delete(&self, search_key: &K, removal_count: &mut uint) -> NodeRef<K, V, IS> { // Finds the node to be removed fn del<K: Ord+Clone+Send+Sync, V: Clone+Send+Sync, IS: ItemStore<K, V>>( node: &NodeRef<K, V, IS>, search_key: &K, removal_count: &mut uint) -> NodeRef<K, V, IS> { if !node.is_leaf() { let node_data = node.get_data(); let node_key = node_data.item.key(); if *search_key < *node_key { bubble(node.col, del(&node_data.left, search_key, removal_count), node_data.item.clone(), node_data.right.clone()) } else if *search_key > *node_key { bubble(node.col, node_data.left.clone(), node_data.item.clone(), del(&node_data.right, search_key, removal_count)) } else { *removal_count = 1; remove(node) } } else { *removal_count = 0; new_leaf(Black) } } // Removes this node. might leave behind a double-black node: fn remove<K: Ord+Clone+Send+Sync, V: Clone+Send+Sync, IS: ItemStore<K, V>>( node: &NodeRef<K, V, IS>) -> NodeRef<K, V, IS> { assert!(!node.is_leaf()); let node_data = node.get_data(); let left = &node_data.left; let right = &node_data.right; if left.is_leaf() && right.is_leaf() { return if node.col == Red { new_leaf(Black) } else { assert!(node.col == Black); new_leaf(DoubleBlack) }; } if node.col == Red { if right.is_leaf() { return left.clone(); } if left.is_leaf() { return right.clone(); } } if node.col == Black { if left.col == Red && right.is_leaf() { let left_child_data = left.get_data(); return new_node(Black, left_child_data.left.clone(), left_child_data.item.clone(), left_child_data.right.clone()); } if left.is_leaf() && right.col == Red { let right_child_data = right.get_data(); return new_node(Black, right_child_data.left.clone(), right_child_data.item.clone(), right_child_data.right.clone()); } if left.is_leaf() && right.col == Black { return right.clone().inc(); } if left.col == Black && right.is_leaf() { return left.clone().inc(); } } if !left.is_leaf() && !right.is_leaf() { let kvp = left.find_max_kvp(); return bubble(node.col, remove_max(left), kvp.clone(), right.clone()); } unreachable!(); } // Kills a double-black, or moves it to the top: fn bubble<K: Ord+Clone+Send+Sync, V: Clone+Send+Sync, IS: ItemStore<K, V>>( color: Color, l: NodeRef<K, V, IS>, kvp: IS, r: NodeRef<K, V, IS>) -> NodeRef<K, V, IS> { if l.col == DoubleBlack || r.col == DoubleBlack { new_node(color.inc(), l.dec(), kvp, r.dec()).balance() } else { new_node(color, l, kvp, r) } } // Removes the max node: fn remove_max<K: Ord+Clone+Send+Sync, V: Clone+Send+Sync, IS: ItemStore<K, V>>( node: &NodeRef<K, V, IS>) -> NodeRef<K, V, IS> { assert!(!node.is_leaf()); let node_data = node.get_data(); if node_data.right.is_leaf() { remove(node) } else { bubble(node.col, node_data.left.clone(), node_data.item.clone(), remove_max(&node_data.right)) } } // Delete the key, and color the new root black del(self, search_key, removal_count).blacken() } } struct RedBlackTree<K, V, IS> { root: NodeRef<K, V, IS>, len: uint, } impl<K, V, IS: ItemStore<K, V>> Clone for RedBlackTree<K, V, IS> { fn clone(&self) -> RedBlackTree<K, V, IS> { RedBlackTree { root: self.root.clone(), len: self.len } } } impl<K: Ord+Clone+Send+Sync, V: Clone+Send+Sync, IS: ItemStore<K, V>> RedBlackTree<K, V, IS> { pub fn new() -> RedBlackTree<K, V, IS> { RedBlackTree { root: new_leaf(Black), len: 0, } } pub fn find<'a>(&'a self, search_key: &K) -> Option<&'a V> { self.root.find(search_key) } pub fn insert(self, kvp: IS) -> (RedBlackTree<K, V, IS>, bool) { let mut insertion_count = 0xdeadbeaf; let new_root = self.root.modify_at(kvp, &mut insertion_count); assert!(insertion_count != 0xdeadbeaf); (RedBlackTree { root: new_root, len: self.len + insertion_count }, insertion_count != 0) } pub fn remove(self, key: &K) -> (RedBlackTree<K, V, IS>, bool) { let mut removal_count = 0xdeadbeaf; let new_root = self.root.delete(key, &mut removal_count); assert!(removal_count != 0xdeadbeaf); (RedBlackTree { root: new_root, len: self.len - removal_count }, removal_count != 0) } // fn balanced(&self) -> bool { // self.root.black_balanced() // } // fn no_red_red(&self) -> bool { // self.root.no_red_red() // } } impl<K: Hash+Eq+Send+Sync+Ord+Clone, V: Send+Sync+Clone> PersistentMap<K, V> for RedBlackTree<K, V, CopyStore<K, V>> { #[inline] fn insert(self, key: K, value: V) -> (RedBlackTree<K, V, CopyStore<K, V>>, bool) { self.insert(CopyStore::new(key, value)) } #[inline] fn remove(self, key: &K) -> (RedBlackTree<K, V, CopyStore<K, V>>, bool) { self.remove(key) } } impl<K: Hash+Eq+Send+Sync+Ord+Clone, V: Send+Sync+Clone> PersistentMap<K, V> for RedBlackTree<K, V, ShareStore<K, V>> { #[inline] fn insert(self, key: K, value: V) -> (RedBlackTree<K, V, ShareStore<K, V>>, bool) { self.insert(ShareStore::new(key, value)) } #[inline] fn remove(self, key: &K) -> (RedBlackTree<K, V, ShareStore<K, V>>, bool) { self.remove(key) } } impl<K: Hash+Eq+Send+Sync+Ord+Clone, V: Send+Sync+Clone, IS: ItemStore<K, V>> Map<K, V> for RedBlackTree<K, V, IS> { #[inline] fn find<'a>(&'a self, key: &K) -> Option<&'a V> { self.find(key) } } impl<K: Hash+Eq+Send+Sync+Ord+Clone, V: Send+Sync+Clone, IS: ItemStore<K, V>> Collection for RedBlackTree<K, V, IS> { #[inline] fn len(&self) -> uint { self.len } } #[cfg(test)] mod tests { use super::RedBlackTree; use testing::Test; use test::Bencher; use item_store::{CopyStore, ShareStore}; #[test] fn test_insert_copy() { Test::test_insert(RedBlackTree::<u64, u64, CopyStore<u64, u64>>::new()); } #[test] fn test_insert_overwrite_copy() { Test::test_insert_overwrite(RedBlackTree::<u64, u64, CopyStore<u64, u64>>::new()); } #[test] fn test_remove_copy() { Test::test_remove(RedBlackTree::<u64, u64, CopyStore<u64, u64>>::new()); } #[bench] fn bench_insert_copy_10(bh: &mut Bencher) { Test::bench_insert(RedBlackTree::<u64, u64, CopyStore<u64, u64>>::new(), 10, bh); } #[bench] fn bench_insert_copy_1000(bh: &mut Bencher) { Test::bench_insert(RedBlackTree::<u64, u64, CopyStore<u64, u64>>::new(), 1000, bh); } #[bench] fn bench_insert_copy_100000(bh: &mut Bencher) { Test::bench_insert(RedBlackTree::<u64, u64, CopyStore<u64, u64>>::new(), 100000, bh); } #[bench] fn bench_find_copy_10(bh: &mut Bencher) { Test::bench_find(RedBlackTree::<u64, u64, CopyStore<u64, u64>>::new(), 10, bh); } #[bench] fn bench_find_copy_1000(bh: &mut Bencher) { Test::bench_find(RedBlackTree::<u64, u64, CopyStore<u64, u64>>::new(), 1000, bh); } #[bench] fn bench_find_copy_100000(bh: &mut Bencher) { Test::bench_find(RedBlackTree::<u64, u64, CopyStore<u64, u64>>::new(), 100000, bh); } #[bench] fn bench_remove_copy_10(bh: &mut Bencher) { Test::bench_remove(RedBlackTree::<u64, u64, CopyStore<u64, u64>>::new(), 10, bh); } #[bench] fn bench_remove_copy_1000(bh: &mut Bencher) { Test::bench_remove(RedBlackTree::<u64, u64, CopyStore<u64, u64>>::new(), 1000, bh); } #[bench] fn bench_remove_copy_100000(bh: &mut Bencher) { Test::bench_remove(RedBlackTree::<u64, u64, CopyStore<u64, u64>>::new(), 100000, bh); } #[test] fn test_insert_shared() { Test::test_insert(RedBlackTree::<u64, u64, ShareStore<u64, u64>>::new()); } #[test] fn test_insert_overwrite_shared() { Test::test_insert_overwrite(RedBlackTree::<u64, u64, ShareStore<u64, u64>>::new()); } #[test] fn test_remove_shared() { Test::test_remove(RedBlackTree::<u64, u64, ShareStore<u64, u64>>::new()); } #[bench] fn bench_insert_shared_10(bh: &mut Bencher) { Test::bench_insert(RedBlackTree::<u64, u64, ShareStore<u64, u64>>::new(), 10, bh); } #[bench] fn bench_insert_shared_1000(bh: &mut Bencher) { Test::bench_insert(RedBlackTree::<u64, u64, ShareStore<u64, u64>>::new(), 1000, bh); } #[bench] fn bench_insert_shared_100000(bh: &mut Bencher) { Test::bench_insert(RedBlackTree::<u64, u64, ShareStore<u64, u64>>::new(), 100000, bh); } #[bench] fn bench_find_shared_10(bh: &mut Bencher) { Test::bench_find(RedBlackTree::<u64, u64, ShareStore<u64, u64>>::new(), 10, bh); } #[bench] fn bench_find_shared_1000(bh: &mut Bencher) { Test::bench_find(RedBlackTree::<u64, u64, ShareStore<u64, u64>>::new(), 1000, bh); } #[bench] fn bench_find_shared_100000(bh: &mut Bencher) { Test::bench_find(RedBlackTree::<u64, u64, ShareStore<u64, u64>>::new(), 100000, bh); } #[bench] fn bench_remove_shared_10(bh: &mut Bencher) { Test::bench_remove(RedBlackTree::<u64, u64, ShareStore<u64, u64>>::new(), 10, bh); } #[bench] fn bench_remove_shared_1000(bh: &mut Bencher) { Test::bench_remove(RedBlackTree::<u64, u64, ShareStore<u64, u64>>::new(), 1000, bh); } #[bench] fn bench_remove_shared_100000(bh: &mut Bencher) { Test::bench_remove(RedBlackTree::<u64, u64, ShareStore<u64, u64>>::new(), 100000, bh); } }
use crate::architecture::*; use crate::loader::*; use crate::memory::backing::Memory; use crate::Error; use log::warn; use std::collections::BTreeMap; use std::fs::File; use std::io::Read; use std::path::{Path, PathBuf}; /// The address where the first library will be loaded const DEFAULT_LIB_BASE: u64 = 0x4000_0000; /// The step in address between where we will load libraries. const LIB_BASE_STEP: u64 = 0x0200_0000; // Some MIPS-specific DT entries. This will eventually land in Goblin. const DT_MIPS_LOCAL_GOTNO: u64 = 0x7000_000a; const DT_MIPS_GOTSYM: u64 = 0x7000_0013; const DT_MIPS_SYMTABNO: u64 = 0x7000_0011; /// A helper to build an ElfLinker using the builder pattern. #[derive(Clone, Debug)] pub struct ElfLinkerBuilder { filename: PathBuf, do_relocations: bool, just_interpreter: bool, ld_paths: Option<Vec<PathBuf>>, } impl ElfLinkerBuilder { /// Create a new ElfLinker pub fn new(filename: PathBuf) -> ElfLinkerBuilder { ElfLinkerBuilder { filename, do_relocations: true, just_interpreter: false, ld_paths: None, } } /// This ElfLinker should perform relocations (default true) pub fn do_relocations(mut self, do_relocations: bool) -> Self { self.do_relocations = do_relocations; self } /// This ElfLinker should only link in the program interpreter, specified /// by DT_INTERPRETER (default false) pub fn just_interpreter(mut self, just_interpreter: bool) -> Self { self.just_interpreter = just_interpreter; self } /// Set the paths where the ElfLinker should look for shared objects and /// depenedncies pub fn ld_paths<P: Into<PathBuf>>(mut self, ld_paths: Option<Vec<P>>) -> Self { self.ld_paths = ld_paths.map(|v| v.into_iter().map(|p| p.into()).collect::<Vec<PathBuf>>()); self } /// Get the ElfLinker for this ElfLinkerBuilder pub fn link(self) -> Result<ElfLinker, Error> { ElfLinker::new( self.filename, self.do_relocations, self.just_interpreter, self.ld_paths, ) } } /// Loader which links together multiple Elf files. /// /// Can do some rudimentary linking of binaries. #[derive(Debug)] pub struct ElfLinker { /// The filename (path included) of the file we're loading. filename: PathBuf, /// A mapping from lib name (for example `libc.so.6`) to Elf. loaded: BTreeMap<String, Elf>, /// The current memory mapping. memory: Memory, /// A mapping of function symbol names to addresses symbols: BTreeMap<String, u64>, /// The address we will place the next library at. next_lib_address: u64, /// Functions as specified by the user user_functions: Vec<u64>, /// If set, we will do relocations as we link do_relocations: bool, /// If set, we will only bring in the DT_INTERPRETER entry, as would happen /// if a process was loaded normally. just_interpreter: bool, /// The paths where ElfLinker will look for dependencies ld_paths: Option<Vec<PathBuf>>, } impl ElfLinker { /// Create a new ElfLinker. /// /// It is recommended you use ElfLinkerBuilder to build an ElfLinker. pub fn new( filename: PathBuf, do_relocations: bool, just_interpreter: bool, ld_paths: Option<Vec<PathBuf>>, ) -> Result<ElfLinker, Error> { let mut file = File::open(&filename)?; let mut buf = Vec::new(); file.read_to_end(&mut buf)?; // get the endianness of this elf for the memory model let mut endian = Endian::Big; if let goblin::Object::Elf(elf_peek) = goblin::Object::parse(&buf)? { if elf_peek.header.endianness()?.is_little() { endian = Endian::Little; } } else { return Err(Error::InvalidFileFormat(format!( "{} was not an elf", filename.to_str().unwrap() ))); } let mut elf_linker = ElfLinker { filename: filename.clone(), loaded: BTreeMap::new(), memory: Memory::new(endian), symbols: BTreeMap::new(), next_lib_address: DEFAULT_LIB_BASE, user_functions: Vec::new(), do_relocations, just_interpreter, ld_paths, }; elf_linker.load_elf(&filename, 0)?; Ok(elf_linker) } /// Get the ELFs loaded and linked in this loader pub fn loaded(&self) -> &BTreeMap<String, Elf> { &self.loaded } /// Get the filename of the ELF we're loading pub fn filename(&self) -> &Path { &self.filename } /// Takes the path to an Elf, and a base address the Elf should be loaded /// at. Loads the Elf, all it's dependencies (DT_NEEDED), and then handles /// the supported relocations. pub fn load_elf(&mut self, filename: &Path, base_address: u64) -> Result<(), Error> { let path = self .ld_paths .as_ref() .map(|ld_paths| { ld_paths .iter() .map(|ld_path| { let filename = if filename.starts_with("/") { let filename = filename.to_str().unwrap(); Path::new(filename.split_at(1).1) } else { filename }; ld_path.to_path_buf().join(filename) }) .find(|path| path.exists()) .unwrap_or_else(|| filename.to_path_buf()) }) .unwrap_or_else(|| filename.to_path_buf()); let elf = Elf::from_file_with_base_address(&path, base_address)?; // Update our memory map based on what's in the Elf for (address, section) in elf.memory()?.sections() { self.memory .set_memory(*address, section.data().to_owned(), section.permissions()); } // Add this Elf to the loaded Elfs let filename = filename.file_name().unwrap().to_str().unwrap().to_string(); self.loaded.insert(filename.clone(), elf); { let elf = &self.loaded[&filename]; // Add its exported symbols to our symbols for symbol in elf.exported_symbols() { if self.symbols.get(symbol.name()).is_some() { continue; } self.symbols.insert( symbol.name().to_string(), elf.base_address() + symbol.address(), ); } } if self.just_interpreter { let interpreter_filename = self.loaded[&filename] .elf() .interpreter .map(|s| s.to_string()); if let Some(interpreter_filename) = interpreter_filename { self.load_elf(Path::new(&interpreter_filename), DEFAULT_LIB_BASE)?; } } else { // Ensure all shared objects we rely on are loaded for so_name in self.loaded[&filename].dt_needed()? { if self.loaded.get(&so_name).is_none() { self.next_lib_address += LIB_BASE_STEP; let next_lib_address = self.next_lib_address; self.load_elf(Path::new(&so_name), next_lib_address)?; } } } if self.do_relocations { match self.loaded[&filename].elf().header.e_machine { goblin::elf::header::EM_386 => self.relocations_x86(&filename)?, goblin::elf::header::EM_MIPS => self.relocations_mips(&filename)?, _ => return Err(Error::ElfLinkerRelocationsUnsupported), } } Ok(()) } /// Get the `Elf` for the primary elf loaded. pub fn get_elf(&self) -> Result<&Elf, Error> { let loaded = self.loaded(); let filename = self .filename() .file_name() .and_then(|filename| filename.to_str()) .ok_or("Could not get filename for ElfLinker's primary program")?; let elf = loaded .get(filename) .ok_or(format!("Could not get {} from ElfLinker", filename))?; Ok(elf) } /// If the primary `Elf` we're loading has an interpreter designated in its /// dynamic sectino, get the `Elf` for the interpreter. pub fn get_interpreter(&self) -> Result<Option<&Elf>, Error> { let elf = self.get_elf()?; let interpreter_elf = match elf.elf().interpreter { Some(interpreter_filename) => { let interpreter_filename = Path::new(interpreter_filename) .file_name() .and_then(|filename| filename.to_str()) .ok_or_else(|| { Error::Custom( "Failed to get filename portion of interpreter filename".to_string(), ) })?; Some(self.loaded().get(interpreter_filename).ok_or(format!( "Could not find interpreter {}", interpreter_filename ))?) } None => None, }; Ok(interpreter_elf) } /// Perform x86-specific relocations fn relocations_x86(&mut self, filename: &str) -> Result<(), Error> { // Process relocations let elf = &self.loaded[filename]; let dynsyms = elf.elf().dynsyms; let dynstrtab = elf.elf().dynstrtab; for reloc in elf .elf() .dynrelas .iter() .chain(elf.elf().dynrels.iter().chain(elf.elf().pltrelocs.iter())) { match reloc.r_type { goblin::elf::reloc::R_386_32 => { let sym = &dynsyms .get(reloc.r_sym) .expect("Unable to resolve relocation symbol"); let sym_name = &dynstrtab[sym.st_name]; let value = match self.symbols.get(sym_name) { Some(v) => v.to_owned() as u32, None => { return Err(Error::Custom(format!( "Could not resolve symbol {}", sym_name ))) } }; self.memory .set32(reloc.r_offset + elf.base_address(), value)?; } goblin::elf::reloc::R_386_GOT32 => { return Err(Error::Custom("R_386_GOT32".to_string())) } goblin::elf::reloc::R_386_PLT32 => { let sym = &dynsyms .get(reloc.r_sym) .expect("Unable to resolve relocation symbol"); let sym_name = &dynstrtab[sym.st_name]; return Err(Error::Custom(format!( "R_386_PLT32 {:?}:0x{:x}:{}", self.filename, reloc.r_offset, sym_name ))); } goblin::elf::reloc::R_386_COPY => { return Err(Error::Custom("R_386_COPY".to_string())) } goblin::elf::reloc::R_386_GLOB_DAT => { let sym = &dynsyms .get(reloc.r_sym) .expect("Unable to resolve relocation symbol"); let sym_name = &dynstrtab[sym.st_name]; let value = match self.symbols.get(sym_name) { Some(v) => v.to_owned() as u32, None => { warn!("Could not resolve symbol {}", sym_name); continue; } }; self.memory .set32(reloc.r_offset + elf.base_address(), value)?; } goblin::elf::reloc::R_386_JMP_SLOT => { let sym = &dynsyms .get(reloc.r_sym) .expect("Unable to resolve relocation symbol"); let sym_name = &dynstrtab[sym.st_name]; let value = match self.symbols.get(sym_name) { Some(v) => v.to_owned() as u32, None => { return Err(Error::Custom(format!( "Could not resolve symbol {}", sym_name ))) } }; self.memory .set32(reloc.r_offset + elf.base_address(), value)?; } goblin::elf::reloc::R_386_RELATIVE => { let value = self.memory.get32(reloc.r_offset + elf.base_address()); let value = match value { Some(value) => elf.base_address() as u32 + value, None => { return Err(Error::Custom(format!( "Invalid address for R_386_RELATIVE {:?}:{:x}", self.filename, reloc.r_offset, ))) } }; self.memory .set32(reloc.r_offset + elf.base_address(), value)?; } goblin::elf::reloc::R_386_GOTPC => { return Err(Error::Custom("R_386_GOT_PC".to_string())) } goblin::elf::reloc::R_386_TLS_TPOFF => { return Err(Error::Custom( "Ignoring R_386_TLS_TPOFF Relocation".to_string(), )) } goblin::elf::reloc::R_386_IRELATIVE => { return Err(Error::Custom(format!( "R_386_IRELATIVE {:?}:0x{:x} going unprocessed", self.filename, reloc.r_offset ))) } _ => { return Err(Error::Custom(format!( "unhandled relocation type {}", reloc.r_type ))) } } } Ok(()) } /// Perform MIPS-specific relocations fn relocations_mips(&mut self, filename: &str) -> Result<(), Error> { let elf = &self.loaded[filename]; fn get_dynamic(elf: &Elf, tag: u64) -> Option<u64> { elf.elf().dynamic.and_then(|dynamic| { dynamic .dyns .iter() .find(|dyn_| dyn_.d_tag == tag) .map(|dyn_| dyn_.d_val) }) } // The number of local GOT entries. Also an index into the GOT // for the first external GOT entry. let local_gotno = get_dynamic(elf, DT_MIPS_LOCAL_GOTNO).ok_or("Could not get DT_MIPS_LOCAL_GOTNO")?; // Index of the first dynamic symbol table entry that corresponds // to an entry in the GOT. let gotsym = get_dynamic(elf, DT_MIPS_GOTSYM).ok_or("Could not get DT_MIPS_GOTSYM")?; // The number of entries in the dynamic symbol table let symtabno = get_dynamic(elf, DT_MIPS_SYMTABNO).ok_or("Could not get DT_MIPS_SYMTABNO")?; // The address of the GOT section let pltgot = get_dynamic(elf, goblin::elf::dynamic::DT_PLTGOT).ok_or("Could not get DT_PLTGOT")?; // Start by adding the base address to all entries in the GOT for i in 0..(local_gotno + (symtabno - gotsym)) { let address = elf.base_address() + (i * 4) + pltgot; let value = self.memory.get32(address).ok_or(format!( "Could not get memory at address 0x{:x} for adding base address", address ))?; self.memory .set32(address, value.wrapping_add(elf.base_address() as u32))?; } let dynstrtab = elf.elf().dynstrtab; let dynsyms = elf.elf().dynsyms; let mut address = pltgot + elf.base_address() + (local_gotno * 4); for i in gotsym..(symtabno) { let sym = dynsyms .get(i as usize) .ok_or(format!("Could not get symbol {}", i))?; let symbol_name = dynstrtab .get_at(sym.st_name) .ok_or(format!("Could not get symbol name for {}", i))?; // Internal entries have already been relocated, so we only need to // relocate external entries if sym.st_shndx == 0 { if let Some(value) = self.symbols.get(symbol_name) { self.memory.set32(address, *value as u32)?; } else { format!("Could not get symbol with name: \"{}\"", symbol_name); } } address += 4; } // handle all relocation entries for dynrel in elf.elf().dynrels.iter() { if dynrel.r_type == goblin::elf::reloc::R_MIPS_REL32 { let value = self .memory .get32(dynrel.r_offset + elf.base_address()) .ok_or(format!( "Could not load R_MIPS_REL32 at 0x{:x}", dynrel.r_offset + elf.base_address() ))?; self.memory.set32( dynrel.r_offset + elf.base_address(), value + (elf.base_address() as u32), )?; } } Ok(()) } /// Inform the linker of a function at the given address. /// /// This function will be added to calls to `function_entries` and will be automatically /// lifted when calling `to_program`. pub fn add_user_function(&mut self, address: u64) { self.user_functions.push(address); } } impl Loader for ElfLinker { fn memory(&self) -> Result<Memory, Error> { Ok(self.memory.clone()) } fn function_entries(&self) -> Result<Vec<FunctionEntry>, Error> { let mut function_entries = Vec::new(); for loaded in &self.loaded { // let fe = loaded.1.function_entries()?; // for e in &fe { // println!("{} 0x{:x}", loaded.0, e.address()); // } function_entries.append(&mut loaded.1.function_entries()?); } for address in &self.user_functions { function_entries.push(FunctionEntry::new(*address, None)); } Ok(function_entries) } // TODO Just maybe a bit too much unwrapping here. fn program_entry(&self) -> u64 { let filename = self .filename .as_path() .file_name() .unwrap() .to_str() .unwrap(); self.loaded[filename].program_entry() } fn architecture(&self) -> &dyn Architecture { let filename = self .filename .as_path() .file_name() .unwrap() .to_str() .unwrap(); self.loaded[filename].architecture() } fn as_any(&self) -> &dyn Any { self } fn symbols(&self) -> Vec<Symbol> { self.loaded .iter() .flat_map(|(_, elf)| elf.symbols()) .collect() } }
#[doc = "Reader of register C5ISR"] pub type R = crate::R<u32, super::C5ISR>; #[doc = "Reader of field `TEIF5`"] pub type TEIF5_R = crate::R<bool, bool>; #[doc = "Reader of field `CTCIF5`"] pub type CTCIF5_R = crate::R<bool, bool>; #[doc = "Reader of field `BRTIF5`"] pub type BRTIF5_R = crate::R<bool, bool>; #[doc = "Reader of field `BTIF5`"] pub type BTIF5_R = crate::R<bool, bool>; #[doc = "Reader of field `TCIF5`"] pub type TCIF5_R = crate::R<bool, bool>; #[doc = "Reader of field `CRQA5`"] pub type CRQA5_R = crate::R<bool, bool>; impl R { #[doc = "Bit 0 - Channel x transfer error interrupt flag This bit is set by hardware. It is cleared by software writing 1 to the corresponding bit in the DMA_IFCRy register."] #[inline(always)] pub fn teif5(&self) -> TEIF5_R { TEIF5_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - Channel x Channel Transfer Complete interrupt flag This bit is set by hardware. It is cleared by software writing 1 to the corresponding bit in the DMA_IFCRy register. CTC is set when the last block was transferred and the channel has been automatically disabled. CTC is also set when the channel is suspended, as a result of writing EN bit to 0."] #[inline(always)] pub fn ctcif5(&self) -> CTCIF5_R { CTCIF5_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - Channel x block repeat transfer complete interrupt flag This bit is set by hardware. It is cleared by software writing 1 to the corresponding bit in the DMA_IFCRy register."] #[inline(always)] pub fn brtif5(&self) -> BRTIF5_R { BRTIF5_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 3 - Channel x block transfer complete interrupt flag This bit is set by hardware. It is cleared by software writing 1 to the corresponding bit in the DMA_IFCRy register."] #[inline(always)] pub fn btif5(&self) -> BTIF5_R { BTIF5_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 4 - channel x buffer transfer complete"] #[inline(always)] pub fn tcif5(&self) -> TCIF5_R { TCIF5_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 16 - channel x request active flag"] #[inline(always)] pub fn crqa5(&self) -> CRQA5_R { CRQA5_R::new(((self.bits >> 16) & 0x01) != 0) } }
use crate::{ database::Database, services::{stdin::StdinService, stdout::StdoutService}, utils::{module_for_path, packages_path}, Exit, ProgramResult, }; use candy_frontend::{ast_to_hir::AstToHir, hir, TracingConfig}; use candy_vm::{ execution_controller::RunForever, fiber::EndedReason, heap::{HirId, SendPort, Struct, SymbolId}, mir_to_lir::compile_lir, return_value_into_main_function, tracer::stack_trace::StackTracer, vm::{Status, Vm}, }; use clap::{Parser, ValueHint}; use std::{ path::PathBuf, rc::Rc, time::{Duration, Instant}, }; use tracing::{debug, error}; /// Run a Candy program. /// /// This command runs the given file, or, if no file is provided, the package of /// your current working directory. The module should export a `main` function. /// This function is then called with an environment. #[derive(Parser, Debug)] pub(crate) struct Options { /// The file or package to run. If none is provided, the package of your /// current working directory will be run. #[arg(value_hint = ValueHint::FilePath)] path: Option<PathBuf>, } pub(crate) fn run(options: Options) -> ProgramResult { let packages_path = packages_path(); let db = Database::new_with_file_system_module_provider(packages_path); let module = module_for_path(options.path)?; let tracing = TracingConfig::off(); debug!("Running {module}."); let compilation_start = Instant::now(); let mut tracer = StackTracer::default(); let lir = Rc::new(compile_lir(&db, module, tracing).0); let compilation_end = Instant::now(); debug!( "Compilation took {}.", format_duration(compilation_end - compilation_start), ); let mut ended = Vm::for_module(&*lir, &mut tracer).run_until_completion(&mut tracer); let main = match ended.reason { EndedReason::Finished(return_value) => { return_value_into_main_function(&lir.symbol_table, return_value).unwrap() } EndedReason::Panicked(panic) => { error!("The module panicked: {}", panic.reason); error!("{} is responsible.", panic.responsible); if let Some(span) = db.hir_id_to_span(panic.responsible) { error!("Responsible is at {span:?}."); } error!( "This is the stack trace:\n{}", tracer.format_panic_stack_trace_to_root_fiber(&db, &lir.as_ref().symbol_table), ); return Err(Exit::CodePanicked); } }; let discovery_end = Instant::now(); debug!( "main function discovery took {}.", format_duration(discovery_end - compilation_end), ); debug!("Running main function."); // TODO: Add more environment stuff. let mut vm = Vm::uninitialized(lir.clone()); let mut stdout = StdoutService::new(&mut vm); let mut stdin = StdinService::new(&mut vm); let fields = [ ( SymbolId::STDOUT, SendPort::create(&mut ended.heap, stdout.channel), ), ( SymbolId::STDIN, SendPort::create(&mut ended.heap, stdin.channel), ), ]; let environment = Struct::create_with_symbol_keys(&mut ended.heap, true, fields).into(); let mut tracer = StackTracer::default(); let platform = HirId::create(&mut ended.heap, true, hir::Id::platform()); vm.initialize_for_function(ended.heap, main, &[environment], platform, &mut tracer); loop { match vm.status() { Status::CanRun => { vm.run(&mut RunForever, &mut tracer); } Status::WaitingForOperations => {} _ => break, } stdout.run(&mut vm); stdin.run(&mut vm); vm.free_unreferenced_channels(); } let ended = vm.tear_down(&mut tracer); let result = match ended.reason { EndedReason::Finished(return_value) => { debug!("The main function returned: {return_value:?}"); Ok(()) } EndedReason::Panicked(panic) => { error!("The main function panicked: {}", panic.reason); error!("{} is responsible.", panic.responsible); error!( "This is the stack trace:\n{}", tracer.format_panic_stack_trace_to_root_fiber(&db, &lir.as_ref().symbol_table), ); Err(Exit::CodePanicked) } }; let execution_end = Instant::now(); debug!( "Execution took {}.", format_duration(execution_end - discovery_end), ); drop(lir); // Make sure the LIR is kept around until here. result } fn format_duration(duration: Duration) -> String { if duration < Duration::from_millis(1) { format!("{} µs", duration.as_micros()) } else { format!("{} ms", duration.as_millis()) } }
#[doc = "Reader of register ITLINE1"] pub type R = crate::R<u32, super::ITLINE1>; #[doc = "Reader of field `PVDOUT`"] pub type PVDOUT_R = crate::R<bool, bool>; impl R { #[doc = "Bit 0 - PVD supply monitoring interrupt request pending (EXTI line 16)."] #[inline(always)] pub fn pvdout(&self) -> PVDOUT_R { PVDOUT_R::new((self.bits & 0x01) != 0) } }
// Copyright (c) 2021 Quark Container Authors / 2018 The gVisor Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Set/get socket option levels, from socket.h. pub const SOL_IP :i32 = 0; pub const SOL_SOCKET :i32 = 1; pub const SOL_TCP :i32 = 6; pub const SOL_UDP :i32 = 17; pub const SOL_IPV6 :i32 = 41; pub const SOL_ICMPV6 :i32 = 58; pub const SOL_RAW :i32 = 255; pub const SOL_PACKET :i32 = 263; pub const SOL_NETLINK :i32 = 270; // Socket options from socket.h. pub const SO_DEBUG :i32 = 1; pub const SO_REUSEADDR :i32 = 2; pub const SO_TYPE :i32 = 3; pub const SO_ERROR :i32 = 4; pub const SO_DONTROUTE :i32 = 5; pub const SO_BROADCAST :i32 = 6; pub const SO_SNDBUF :i32 = 7; pub const SO_RCVBUF :i32 = 8; pub const SO_KEEPALIVE :i32 = 9; pub const SO_OOBINLINE :i32 = 10; pub const SO_NO_CHECK :i32 = 11; pub const SO_PRIORITY :i32 = 12; pub const SO_LINGER :i32 = 13; pub const SO_BSDCOMPAT :i32 = 14; pub const SO_REUSEPORT :i32 = 15; pub const SO_PASSCRED :i32 = 16; pub const SO_PEERCRED :i32 = 17; pub const SO_RCVLOWAT :i32 = 18; pub const SO_SNDLOWAT :i32 = 19; pub const SO_RCVTIMEO :i32 = 20; pub const SO_SNDTIMEO :i32 = 21; pub const SO_BINDTODEVICE :i32 = 25; pub const SO_ATTACH_FILTER :i32 = 26; pub const SO_DETACH_FILTER :i32 = 27; pub const SO_GET_FILTER :i32 = SO_ATTACH_FILTER; pub const SO_PEERNAME :i32 = 28; pub const SO_TIMESTAMP :i32 = 29; pub const SO_ACCEPTCONN :i32 = 30; pub const SO_PEERSEC :i32 = 31; pub const SO_SNDBUFFORCE :i32 = 32; pub const SO_RCVBUFFORCE :i32 = 33; pub const SO_PASSSEC :i32 = 34; pub const SO_TIMESTAMPNS :i32 = 35; pub const SO_MARK :i32 = 36; pub const SO_TIMESTAMPING :i32 = 37; pub const SO_PROTOCOL :i32 = 38; pub const SO_DOMAIN :i32 = 39; pub const SO_RXQ_OVFL :i32 = 40; pub const SO_WIFI_STATUS :i32 = 41; pub const SO_PEEK_OFF :i32 = 42; pub const SO_NOFCS :i32 = 43; pub const SO_LOCK_FILTER :i32 = 44; pub const SO_SELECT_ERR_QUEUE :i32 = 45; pub const SO_BUSY_POLL :i32 = 46; pub const SO_MAX_PACING_RATE :i32 = 47; pub const SO_BPF_EXTENSIONS :i32 = 48; pub const SO_INCOMING_CPU :i32 = 49; pub const SO_ATTACH_BPF :i32 = 50; pub const SO_ATTACH_REUSEPORT_CBPF :i32 = 51; pub const SO_ATTACH_REUSEPORT_EBPF :i32 = 52; pub const SO_CNX_ADVICE :i32 = 53; pub const SO_MEMINFO :i32 = 55; pub const SO_INCOMING_NAPI_ID :i32 = 56; pub const SO_COOKIE :i32 = 57; pub const SO_PEERGROUPS :i32 = 59; pub const SO_ZEROCOPY :i32 = 60; pub const SO_TXTIME :i32 = 61; // shutdown(2) how commands, from <linux/net.h>. pub const SHUT_RD :i32 = 0; pub const SHUT_WR :i32 = 1; pub const SHUT_RDWR :i32 = 2; // enum socket_state, from uapi/linux/net.h. pub const SS_FREE :i32 = 0; // Not allocated. pub const SS_UNCONNECTED :i32 = 1; // Unconnected to any socket. pub const SS_CONNECTING :i32 = 2; // In process of connecting. pub const SS_CONNECTED :i32 = 3; // Connected to socket. pub const SS_DISCONNECTING :i32 = 4; // In process of disconnecting. // LingerOption is used by SetSockOpt/GetSockOpt to set/get the // duration for which a socket lingers before returning from Close. // // +stateify savable #[derive(Default, Debug)] pub struct LingerOption { Enabled: bool, Timeout: i64, }
use std::cmp::{Ord, Ordering}; use std::collections::{BinaryHeap, HashSet}; use std::time::Instant; use tokio::stream::StreamExt; use tracing::debug; use crate::protobuf::*; /// Maintains outstanding client orders to be retried when the timeout is reached pub(crate) struct ClientOrderManager { orders: BinaryHeap<OrderRequest>, done: HashSet<u64>, } impl ClientOrderManager { pub fn new() -> Self { Self { orders: BinaryHeap::new(), done: HashSet::new(), } } /// Adds a new order to the end of the order list that is ready to be sent pub fn add_order(&mut self, order: Order) { self.orders.push(OrderRequest::Ready(order)); } /// Adds a new order that becomes ready after the [timeout] is reached pub fn add_order_with_timeout(&mut self, order: Order, timeout: Instant) { self.orders .push(OrderRequest::WaitingResponse(order, timeout)); } /// Completes the order with the given sequence number. This is used to signal that the order no /// longer needs to be retried. pub fn complete_order(&mut self, seq_number: u64) { self.done.insert(seq_number); } /// If there are orders that are ready to be sent or retried, it returns an order, otherwise it /// returns None. pub fn poll_next_order(&mut self) -> Option<Order> { loop { let order = match self.orders.peek() { Some(OrderRequest::Ready(_)) => { self.orders.pop().map(|order_req| order_req.into_order()) } Some(OrderRequest::WaitingResponse(_, timeout)) => { if Instant::now() > *timeout { self.orders.pop().map(|o_req| o_req.into_order()) } else { None } } _ => None, }; debug!("Possible order: {:?}", order); match order { None => return None, Some(order) => { if self.done.contains(&order.seq_number) { debug!("Already ACK'd: {:?}", order); continue; } else { return Some(order); } } } } } } #[derive(Debug)] enum OrderRequest { Ready(Order), WaitingResponse(Order, Instant), Done(Order), } impl OrderRequest { fn into_order(self) -> Order { match self { OrderRequest::Ready(order) => order, OrderRequest::WaitingResponse(order, _) => order, OrderRequest::Done(order) => order, } } } impl Eq for Order {} impl Eq for OrderRequest {} impl PartialEq for OrderRequest { fn eq(&self, _other: &Self) -> bool { unimplemented!() } } impl Ord for Order { fn cmp(&self, other: &Self) -> Ordering { self.seq_number.cmp(&other.seq_number) } } impl Ord for OrderRequest { fn cmp(&self, other: &Self) -> Ordering { use OrderRequest::{Done, Ready, WaitingResponse}; match (self, other) { (Ready(ord), Ready(other_ord)) => ord.cmp(other_ord).reverse(), (Ready(_), _) => Ordering::Greater, (_, Ready(_)) => Ordering::Less, (WaitingResponse(order, timeout), WaitingResponse(other_order, other_timeout)) => { let primary_ord = timeout.cmp(other_timeout).reverse(); if primary_ord == Ordering::Equal { return order.cmp(other_order).reverse(); } primary_ord } (WaitingResponse(_, _), _) => Ordering::Greater, (_, WaitingResponse(_, _)) => Ordering::Less, (Done(ord), Done(other_ord)) => ord.cmp(other_ord).reverse(), } } } impl PartialOrd for Order { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } impl PartialOrd for OrderRequest { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } #[cfg(test)] mod test { use super::*; use std::time::Duration; #[test] fn test_ordering_works() { let mut hs = BinaryHeap::new(); hs.push(OrderRequest::Ready(generate_random_order_request(0, 1))); hs.push(OrderRequest::Ready(generate_random_order_request(0, 2))); hs.push(OrderRequest::WaitingResponse( generate_random_order_request(0, 3), Instant::now() + Duration::from_secs(10), )); hs.push(OrderRequest::WaitingResponse( generate_random_order_request(0, 4), Instant::now() + Duration::from_secs(1), )); hs.push(OrderRequest::Done(generate_random_order_request(0, 5))); while let Some(order_req) = hs.pop() { println!("{:?}", order_req); } } #[test] fn test_streaming_order_manager() { let mut om = ClientOrderManager::new(); let orders: Vec<Order> = (1..=5) .map(|_seq| generate_random_order_request(0, 2)) .collect(); orders.into_iter().for_each(|order| om.add_order(order)); let mut hs = BinaryHeap::new(); hs.push(OrderRequest::Ready(generate_random_order_request(0, 2))); hs.push(OrderRequest::Ready(generate_random_order_request(0, 2))); hs.push(OrderRequest::WaitingResponse( generate_random_order_request(0, 3), Instant::now() + Duration::from_secs(10), )); hs.push(OrderRequest::WaitingResponse( generate_random_order_request(0, 4), Instant::now() + Duration::from_secs(1), )); hs.push(OrderRequest::Done(generate_random_order_request(0, 5))); while let Some(order_req) = hs.pop() { println!("{:?}", order_req); } } fn generate_random_order_request(client_id: u64, seq_number: u64) -> Order { Order { client_id, price: 1754, seq_number, side: OrderSide::Buy as i32, size: 1, security_id: 0, } } }
//! Permissions use serde::{Deserialize, Serialize}; /// Permissions for a resource #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] pub struct Permission { /// Access Type pub action: Action, /// Resource object pub resource: crate::models::Resource, } impl Permission { /// Return instance of Permission pub fn new(action: Action, resource: crate::models::Resource) -> Self { Self { action, resource } } } /// Allowed Permission Action #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub enum Action { /// Read access Read, /// Write access Write, }
/// Calculate additive persistance for a number, return it as `Some(persistance)` /// /// If the input is less than 10, return `None` because the persistance is solved fn collapse(mut number: u128) -> Option<u128> { if number < 10 { return None } let mut sum = 0; while number > 0 { sum += number % 10; number /= 10; } Some(sum) } /// Calculates the additive persistance for a number pub fn persistance(mut number: u128) -> usize { let mut iterations = 0; while let Some(x) = collapse(number) { number = x; iterations += 1; } iterations } #[cfg(test)] mod tests { use super::persistance; #[test] fn test() { assert_eq!(persistance(13), 1); assert_eq!(persistance(1234), 2); assert_eq!(persistance(9876), 2); assert_eq!(persistance(199), 3); } }
/// The `Array` global object pub mod array; /// The `Boolean` global object pub mod boolean; /// The `console` global object pub mod console; /// The `Error` global objects pub mod error; /// The `Function` global object pub mod function; /// The `JSON` global object pub mod json; /// The `Math` global object pub mod math; /// The `Number` global object and related global methods pub mod number; /// The `Object` global object pub mod object; /// The `String` global object pub mod string; /// The global URI methods pub mod uri; /// An arbritary Javascript value pub mod value;
#![feature(alloc)] use std::boxed; use std::fmt::Debug; use std::mem::{min_align_of_val, align_of_val, size_of_val}; use std::rt::heap::deallocate; use std::marker::PhantomData; #[allow(dead_code)] #[derive(Debug, Copy, Clone)] struct Something { value: u8 } #[allow(dead_code)] fn printalign<T: Debug>(v: T) { println!("Alignment of {:?} = {:?}", v, min_align_of_val(&v)); } #[allow(dead_code)] fn samplealignments() { // stack objects println!("Stack objects:"); printalign(8u8); printalign(16u16); printalign(32u32); printalign(64u64); printalign("str"); printalign(Something{value: 9}); // heap objects println!("Heap objects:"); printalign("String".to_string()); printalign(Box::new("str")); let f = Box::new(10u8); unsafe { let g = boxed::into_raw(f); printalign(g); Box::from_raw(g); } printalign(Box::new(Something{value: 11})); } struct Ptr<T> { p: *mut u8, phantom: PhantomData<T> } impl <T> Ptr<T> { fn new(value: T) -> Ptr<T> { let f = Box::new(value); let g = boxed::into_raw(f); Ptr{p: g as *mut u8, phantom: PhantomData} } } fn main() { let mut journal: [Option<usize>; 12] = [None; 12]; let mut journal_ptr = 0; let f = Box::new("foo"); let g = boxed::into_raw(f); journal[0] = Some(g as usize); journal_ptr += 1; let mut index = 0; while index < journal_ptr { if let Some(ptr) = journal[index] { unsafe { let raw = ptr as *mut u8; deallocate(raw, size_of_val(&raw), align_of_val(&raw)); //drop(Box::from_raw(ptr as *mut &str)); } } index += 1; } }
use std::io::{self, BufRead, Read}; struct Slope { right: usize, down: usize, } #[derive(Clone)] struct Location { x: usize, y: usize, } #[derive(Clone)] struct Map { // u8 so it easy to count; 1 = tree, 0 = no tree. map: Vec<Vec<u8>>, current_pos: Location, } impl Map { fn new<F: Read>(reader: F, initial_location: Location) -> Map { let reader = io::BufReader::new(reader); let mut map = Vec::<Vec<u8>>::new(); for line in reader.lines() { let result = line .unwrap() .as_bytes() .to_vec() .into_iter() .map(|val| if val as char == '.' { 0 } else { 1 }) .collect(); map.push(result); } Map { map, current_pos: initial_location, } } fn width(&self) -> usize { self.map.first().unwrap().len() } fn height(&self) -> usize { self.map.len() } fn move_right(&mut self, step: usize) { self.current_pos.x = (self.current_pos.x + step) % self.width(); } fn move_down(&mut self, step: usize) -> bool { let next = self.current_pos.y + step; if next < self.height() { self.current_pos.y = next; true } else { false } } fn current_elem(&self) -> u8 { self.map[self.current_pos.y][self.current_pos.x] } } struct Toboggan { map: Map, slope: Slope, } impl Toboggan { fn new(map: Map, slope: Slope) -> Self { Toboggan { map, slope } } fn next(&mut self) -> bool { self.map.move_right(self.slope.right); self.map.move_down(self.slope.down) } fn slide(&mut self) -> usize { let mut nr_trees: usize = 0; while self.next() { nr_trees += self.map.current_elem() as usize; } nr_trees } } pub fn solve_puzzle<F: Read>(reader: F) { let initial_location = Location { x: 0, y: 0 }; let map = Map::new(reader, initial_location); let mut slopes: Vec<Slope> = Vec::new(); slopes.push(Slope { right: 1, down: 1 }); slopes.push(Slope { right: 3, down: 1 }); slopes.push(Slope { right: 5, down: 1 }); slopes.push(Slope { right: 7, down: 1 }); slopes.push(Slope { right: 1, down: 2 }); let mut nr_trees: Vec<usize> = Vec::new(); for slope in slopes.drain(..) { let mut toboggan = Toboggan::new(map.clone(), slope); nr_trees.push(toboggan.slide()); } println!("nr trees for slope nr 2={}", nr_trees[1]); println!( "product of nr trees on all slopes={}", nr_trees.iter().fold(1, |acc, v| acc * v) ); }
mod selection; mod snake; pub use selection::SelectionScreen;
use efm32gg_hal::gpio; use embedded_hal::digital::InputPin; use efm32gg_hal::gpio::EFM32Pin; pub struct Buttons { button0: gpio::pins::PB9<gpio::Input>, button1: gpio::pins::PB10<gpio::Input>, } /// A representation of the two user buttons on the STK3700 impl Buttons { pub fn new(pb9: gpio::pins::PB9<gpio::Disabled>, pb10: gpio::pins::PB10<gpio::Disabled>) -> Self { Buttons { button0: pb9.as_input(), button1: pb10.as_input() } } /// Return true if PB0 in depressed state at the time of the invocation. pub fn button0_pressed(&self) -> bool { self.button0.is_low() } /// Return true if PB1 in depressed state at the time of the invocation. pub fn button1_pressed(&self) -> bool { self.button1.is_low() } }
pub mod art_book; pub mod book; pub mod fifo; pub mod optimised_fifo;
use console::{style, StyledObject}; pub fn url(msg: &str) -> StyledObject<&str> { style(msg).blue().bold() } pub fn warning(msg: &str) -> StyledObject<&str> { style(msg).red().bold() } pub fn highlight(msg: &str) -> StyledObject<&str> { style(msg).yellow().bold() }
use std::ops::{Add, Sub, Mul, Div, AddAssign, SubAssign, MulAssign, DivAssign}; use crate::structs::offset4::Offset4; impl<'a> Add for &'a Offset4 { type Output = Offset4; fn add(self, rhs: Self) -> Self::Output { let mut res = self.clone(); res.0 += rhs.0; res.1 += rhs.1; res.2 += rhs.2; res.3 += rhs.3; res } } impl<'a> Sub for &'a Offset4 { type Output = Offset4; fn sub(self, rhs: Self) -> Self::Output { let mut res = self.clone(); res.0 -= rhs.0; res.1 -= rhs.1; res.2 -= rhs.2; res.3 -= rhs.3; res } } impl<'a> Mul for &'a Offset4 { type Output = Offset4; fn mul(self, rhs: Self) -> Self::Output { let mut res = self.clone(); res.0 *= rhs.0; res.1 *= rhs.1; res.2 *= rhs.2; res.3 += rhs.3; res } } impl<'a> Div for &'a Offset4 { type Output = Offset4; fn div(self, rhs: Self) -> Self::Output { let mut res = self.clone(); res.0 /= rhs.0; res.1 /= rhs.1; res.2 /= rhs.2; res.3 /= rhs.3; res } } impl AddAssign for Offset4 { fn add_assign(&mut self, rhs: Offset4) { self.0 += rhs.0; self.1 += rhs.1; self.2 += rhs.2; self.3 += rhs.3; } } impl<'a> SubAssign for &'a mut Offset4 { fn sub_assign(&mut self, rhs: Self) { self.0 -= rhs.0; self.1 -= rhs.1; self.2 -= rhs.2; self.3 -= rhs.3; } } impl<'a> MulAssign for &'a mut Offset4 { fn mul_assign(&mut self, rhs: Self) { self.0 *= rhs.0; self.1 *= rhs.1; self.2 *= rhs.2; self.3 *= rhs.3; } } impl<'a> DivAssign for &'a mut Offset4 { fn div_assign(&mut self, rhs: Self) { self.0 /= rhs.0; self.1 /= rhs.1; self.2 /= rhs.2; self.3 /= rhs.3; } }
use crate::structs::Component; use libloading::{Library, Symbol}; use std::any::Any; use std::collections::HashMap; use std::fmt::Debug; use std::fs::read_dir; use std::fs::DirEntry; use std::path::PathBuf; pub type PluginCreate<'a> = unsafe fn(Library) -> *mut dyn Plugin; pub type ComponentsHashMap = Vec<HashMap<String, Box<dyn Component>>>; pub type SystemSymbol = unsafe fn(data: ComponentsHashMap); pub type LoadedPlugin = Box<dyn Plugin>; #[cfg(windows)] fn is_lib(path: PathBuf) -> bool { match path.extension() { Some(extension) => extension == "dll", _ => false, } } #[cfg(unix)] fn is_lib(path: PathBuf) -> bool { match path.extension() { Some(extension) => extension == "so", _ => false, } } pub trait Plugin: Any + Send + Sync + Debug { fn new() -> Self where Self: Sized; fn name(&self) -> &'static str; fn plugin_id(&self) -> &'static str; fn version(&self) -> &'static str; fn on_load(&self); fn systems(&self) -> Vec<&'static str>; fn take_library(&mut self) -> Library; } pub struct PluginManager; impl PluginManager { pub unsafe fn load_plugin_folder( folder_path: PathBuf, ) -> Result<Vec<LoadedPlugin>, &'static str> { let plugins = read_dir(folder_path) .map_err(|e| "Cannot scan directory for plugins.")? .filter(|e| e.is_ok()) .map(|e| e.unwrap()) .filter(|e| is_lib(e.path())) .filter_map(|e| Self::load_plugin(e.path()).ok()) .collect::<Vec<LoadedPlugin>>(); Ok(plugins) } pub unsafe fn load_plugin(filename: PathBuf) -> Result<LoadedPlugin, &'static str> { let lib = Library::new(filename).map_err(|_| "Unable to load the plugin")?; let constructor: Symbol<PluginCreate> = lib .get(b"_plugin_create") .map_err(|_| "The `_plugin_create` symbol wasn't found.")?; let boxed_raw = constructor(lib); let plugin = Box::from_raw(boxed_raw); Ok(plugin) } pub fn load(plugins: &Vec<LoadedPlugin>) -> Result<(), &'static str> { for plugin in plugins { plugin.on_load(); println!("Loaded plugin: {}", plugin.name()); } Ok(()) } pub fn unload_plugins(mut plugins: Vec<LoadedPlugin>) -> Vec<Library> { plugins.iter_mut().map(|p| p.take_library()).collect() } }
#[derive(Debug, Clone)] pub enum ChessKind { LEAF, TRUNK, BRANCH, } #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub struct Position { pub(crate) x: usize, pub(crate) y: usize, } #[derive(Clone, Debug)] pub struct Chess { pub kind: ChessKind, pub position: Position, pub camp: Camp, } #[derive(Eq, PartialEq, Clone, Debug)] pub enum Camp { GRAY, GREEN, } impl Chess { pub fn new(position: Position, kind: ChessKind, camp: Camp) -> Self { Chess { kind, position, camp, } } }
// Write code here. // // To see what the code looks like after macro expansion: // $ cargo expand // // To run the code: // $ cargo run use seq::seq; seq!(N in 0..16 { #[derive(Copy, Clone, PartialEq, Debug)] enum Interrupt { #( Irq#N, )* } }); fn main() { let interrupt = Interrupt::Irq8; assert_eq!(interrupt as u8, 8); assert_eq!(interrupt, Interrupt::Irq8); }
use leptos::*; #[component] pub fn Button(cx: Scope, children: Children) -> impl IntoView { view! { cx, <button class="p-1 my-2 transition bg-green-400 border-2 border-gray-900 rounded-md active:bg-green-300 hover:border-white"> {children(cx)} </button> } }
use std::convert::TryInto; use proptest::strategy::Just; use proptest::{prop_assert, prop_assert_eq}; use liblumen_alloc::erts::term::prelude::*; use crate::erlang::make_tuple_2::result; use crate::test::strategy; #[test] fn without_arity_errors_badarg() { run!( |arc_process| { ( Just(arc_process.clone()), strategy::term::is_not_arity(arc_process.clone()), strategy::term(arc_process.clone()), ) }, |(arc_process, arity, initial_value)| { prop_assert_is_not_arity!(result(&arc_process, arity, initial_value), arity); Ok(()) }, ); } #[test] fn with_arity_returns_tuple_with_arity_copies_of_initial_value() { run!( |arc_process| { ( Just(arc_process.clone()), (0_usize..255_usize), strategy::term(arc_process), ) }, |(arc_process, arity_usize, initial_value)| { let arity = arc_process.integer(arity_usize); let result = result(&arc_process, arity, initial_value); prop_assert!(result.is_ok()); let tuple_term = result.unwrap(); prop_assert!(tuple_term.is_boxed()); let boxed_tuple: Result<Boxed<Tuple>, _> = tuple_term.try_into(); prop_assert!(boxed_tuple.is_ok()); let tuple = boxed_tuple.unwrap(); prop_assert_eq!(tuple.len(), arity_usize); for element in tuple.iter() { prop_assert_eq!(element, &initial_value); } Ok(()) }, ); }
use bigneon_api::config::{Config, Environment}; use bigneon_db::dev::*; use bigneon_db::models::User; use diesel::Connection; use diesel::PgConnection; use std::sync::Arc; #[derive(Clone)] pub struct TestDatabase { pub connection: Arc<PgConnection>, } #[allow(dead_code)] impl TestDatabase { pub fn new() -> TestDatabase { let config = Config::new(Environment::Test); let connection = PgConnection::establish(&config.database_url).unwrap_or_else(|_| { panic!( "Connection to {} could not be established.", config.database_url ) }); connection.begin_test_transaction().unwrap(); TestDatabase { connection: Arc::new(connection), } } pub fn create_organization_with_user(&self, user: &User, owner: bool) -> OrganizationBuilder { let organization_builder = self.create_organization(); if owner { organization_builder.with_owner(&user) } else { organization_builder.with_user(&user) } } pub fn create_artist(&self) -> ArtistBuilder { ArtistBuilder::new(&self.connection) } pub fn create_cart(&self) -> OrderBuilder { OrderBuilder::new(&self.connection) } pub fn create_event(&self) -> EventBuilder { EventBuilder::new(&self.connection) } pub fn create_order(&self) -> OrderBuilder { OrderBuilder::new(&self.connection) } pub fn create_organization(&self) -> OrganizationBuilder { OrganizationBuilder::new(&self.connection) } pub fn create_organization_invite(&self) -> OrgInviteBuilder { OrgInviteBuilder::new(&self.connection) } pub fn create_payment_method(&self) -> PaymentMethodBuilder { PaymentMethodBuilder::new(&self.connection) } pub fn create_region(&self) -> RegionBuilder { RegionBuilder::new(&self.connection) } pub fn create_user(&self) -> UserBuilder { UserBuilder::new(&self.connection) } pub fn create_venue(&self) -> VenueBuilder { VenueBuilder::new(&self.connection) } pub fn create_fee_schedule(&self) -> FeeScheduleBuilder { FeeScheduleBuilder::new(&self.connection) } }
use std::io::{Read}; use pest::{Parser}; use template::parse::{self, TemplateParser, Rule}; use template::{ComponentTemplate}; /// A template, used to define how a group of components should be layouted and initialized based /// on model data. #[derive(Debug)] pub struct Template { pub root: ComponentTemplate, } impl Template { /// Parses a template from a reader, such as a `File`. pub fn from_reader<R: Read>(mut reader: R) -> Result<Self, String> { let mut text = String::new(); reader.read_to_string(&mut text).unwrap(); Self::from_str(&text) } /// Parses a template from a string. pub fn from_str(text: &str) -> Result<Self, String> { // Parse and extract the template pair let pairs = TemplateParser::parse(Rule::template, text) // This gives a pretty error to our caller .map_err(|e| format!("{}", e))?; let template_pair = pairs.into_iter().next().unwrap(); let document = parse::parse_document(template_pair)?; if document.len() == 0 { return Err("No component found in template".into()) } if document.len() > 1 { return Err("More than one root component found in template, only one allowed".into()) } Ok(Template { root: document.into_iter().next().unwrap(), }) } } #[cfg(test)] mod test { use template::{Template}; use {Value}; #[test] fn it_parses_single_root() { let result = Template::from_str("root\n"); println!("Result: {:?}", result); assert!(result.is_ok()); assert_eq!(result.unwrap().root.class, "root"); } #[test] fn it_parses_root_with_child() { let result = Template::from_str("root\n child\n"); println!("Result: {:?}", result); assert!(result.is_ok()); let component = result.unwrap().root; assert_eq!(component.class, "root"); assert_eq!(component.children.len(), 1, "Incorrect children length on root"); assert_eq!(component.children[0].class, "child"); } #[test] fn it_parses_root_with_nested_children() { let result = Template::from_str("root\n child\n nested_child\n"); println!("Result: {:?}", result); assert!(result.is_ok()); let component = result.unwrap().root; assert_eq!(component.class, "root"); assert_eq!(component.children.len(), 1, "Incorrect children length on root"); assert_eq!(component.children[0].class, "child"); assert_eq!(component.children[0].children.len(), 1, "Incorrect children length on child"); assert_eq!(component.children[0].children[0].class, "nested_child"); } #[test] fn it_parses_root_with_two_children() { let result = Template::from_str("root\n child1\n child2\n"); println!("Result: {:?}", result); assert!(result.is_ok()); let component = result.unwrap().root; assert_eq!(component.class, "root"); assert_eq!(component.children.len(), 2, "Incorrect children length on root"); assert_eq!(component.children[0].class, "child1"); assert_eq!(component.children[1].class, "child2"); } #[test] fn it_parses_varied_children_depth() { let result = Template::from_str("root\n child1\n nested_child\n child2\n"); println!("Result: {:?}", result); assert!(result.is_ok()); let component = result.unwrap().root; assert_eq!(component.class, "root"); assert_eq!(component.children.len(), 2, "Incorrect children length on root"); assert_eq!(component.children[0].class, "child1"); assert_eq!(component.children[1].class, "child2"); assert_eq!(component.children[0].children.len(), 1, "Incorrect children length on child1"); assert_eq!(component.children[0].children[0].class, "nested_child"); } #[test] fn it_parses_root_attributes() { let result = Template::from_str("root { key: \"value\" }\n"); println!("Result: {:?}", result); assert!(result.is_ok()); let component = result.unwrap().root; assert_eq!(component.class, "root"); assert_eq!(component.attributes.len(), 1); assert_eq!(component.attributes.get("key"), Some(&Value::String("value".into()))); } #[test] fn it_parses_newlines_in_attributes_while_parsing_children() { let result = Template::from_str( r#"root { key: "value", key2: "value2", } child "# ); println!("Result: {:?}", result); assert!(result.is_ok()); let component = result.unwrap().root; assert_eq!(component.class, "root"); assert_eq!(component.children.len(), 1, "Incorrect children length on root"); assert_eq!(component.children[0].class, "child"); } #[test] fn it_parses_number_attributes() { let result = Template::from_str("root { key1: 5, key2: 2.5, key3: 69% }\n"); println!("Result: {:?}", result); assert!(result.is_ok()); let component = result.unwrap().root; assert_eq!(component.class, "root"); assert_eq!(component.attributes.len(), 3); assert_eq!(component.attributes.get("key1"), Some(&Value::Integer(5))); assert_eq!(component.attributes.get("key2"), Some(&Value::Float(2.5))); assert_eq!(component.attributes.get("key3"), Some(&Value::Percentage(69))); } #[test] fn it_parses_tuple_attributes() { let result = Template::from_str("root { key: (50, \"text\") }\n"); println!("Result: {:?}", result); assert!(result.is_ok()); let component = result.unwrap().root; assert_eq!(component.class, "root"); assert_eq!(component.attributes.len(), 1); assert_eq!( component.attributes.get("key"), Some(&Value::Tuple(vec!(Value::Integer(50), Value::String("text".into())))) ); } #[test] fn it_fails_two_roots() { let result = Template::from_str("root\nroot2\n"); println!("Result: {:?}", result); assert!(result.is_err()); } #[test] fn it_fails_two_roots_with_child() { let result = Template::from_str("root\n child\nroot2\n"); println!("Result: {:?}", result); assert!(result.is_err()); } #[test] fn it_fails_excessive_indentation() { let result = Template::from_str("root\n excessive_child1\n"); println!("Result: {:?}", result); assert!(result.is_err()); } #[test] fn it_fails_non_4_indentation() { let result1 = Template::from_str("root\n bad_child\n"); let result2 = Template::from_str("root\n bad_child\n"); println!("Result1: {:?}", result1); println!("Result2: {:?}", result2); assert!(result1.is_err()); assert!(result2.is_err()); } #[test] fn it_fails_duplicate_keys() { let result = Template::from_str("root { key1: 5, key1: 10 }\n"); println!("Result: {:?}", result); assert!(result.is_err()); } }
//! Compile-time assertions to ensure that invariants are met. //! //! _All_ assertions within this crate are performed at **compile-time**. This //! allows for finding errors quickly and early when it comes to ensuring //! certain features or aspects of a codebase. //! //! # Usage //! //! This crate is available [on crates.io][crate] and can be used by adding the //! following to your project's `Cargo.toml`: //! //! ```toml //! [dependencies] //! static_assertions = "0.2.5" //! ``` //! //! and this to your crate root: //! //! ``` //! #[macro_use] //! extern crate static_assertions; //! # fn main() {} //! ``` //! //! # Examples //! //! Very thorough examples are provided in the docs for each individual macro. //! Failure case examples are also documented. //! //! # Limitations //! //! Due to implementation details, some macros can only be used normally from //! within the context of a function. To use these macros in other contexts, a //! unique label must be provided. //! //! ```compile_fail //! # #[macro_use] extern crate static_assertions; //! # fn main() {} //! // error: expected item after attributes //! const_assert!(true == true); //! ``` //! //! This can be fixed via: //! //! ``` //! # #[macro_use] extern crate static_assertions; //! # fn main() {} //! const_assert!(label; true == true); //! ``` //! //! This issue can be followed [here][issue1]. Feedback and potential solutions //! are welcome! //! //! [issue1]: https://github.com/nvzqz/static-assertions-rs/issues/1 //! [crate]: https://crates.io/crates/static_assertions #![no_std] #![deny(unused_macros)] #[doc(hidden)] pub extern crate core as _core; /// Asserts that a given configuration is set. /// /// # Examples /// /// A project may not support a set of configurations and thus you may want to /// report why: /// /// ``` /// # #[macro_use] /// # extern crate static_assertions; /// // We should only be compiling for Unix or Linux /// # #[cfg(any(unix, linux))] /// assert_cfg!(any(unix, linux)); /// # fn main() {} /// ``` /// /// If users need to specify a database back-end: /// /// ``` /// # #[macro_use] /// # extern crate static_assertions; /// # #[cfg(target_pointer_width = "0")] // Impossible /// assert_cfg!("Must exclusively use MySQL or MongoDB as database back-end", /// all(not(all(feature = "mysql", feature = "mongodb")), /// any( feature = "mysql", feature = "mongodb"))); /// # fn main() {} /// ``` /// /// We can't be compiling for both Unix _and_ Windows simultaneously: /// /// ```compile_fail /// # #[macro_use] extern crate static_assertions; /// # fn main() { /// assert_cfg!("No, that's not how it works! ಠ_ಠ", all(unix, windows)); /// # } /// ``` #[macro_export] macro_rules! assert_cfg { () => {}; ($msg:expr, $($cfg:tt)*) => { #[cfg(not($($cfg)*))] compile_error!($msg); }; ($($cfg:tt)*) => { #[cfg(not($($cfg)*))] compile_error!(concat!("Cfg does not pass: ", stringify!($($cfg)*))); }; } /// Asserts that types are equal in size. /// /// When performing operations such as pointer casts or dealing with [`usize`] /// versus [`u64`] versus [`u32`], the size of your types matter. This is where /// this macro comes into play. /// /// # Alternatives /// /// There are also [`assert_eq_size_val`](macro.assert_eq_size_val.html) and /// [`assert_eq_size_ptr`](macro.assert_eq_size_ptr.html). Instead of specifying /// types to compare, values' sizes can be directly compared against each other. /// /// # Examples /// /// ``` /// # #[macro_use] /// # extern crate static_assertions; /// // Can be declared outside of a function if labeled /// assert_eq_size!(bytes; (u8, u8), u16); /// /// fn main() { /// // Supports unlimited arguments: /// assert_eq_size!([u8; 4], (u16, u16), u32); /// } /// ``` /// /// The following produces a compilation failure because `u32` has 4 times the /// size of `u8`: /// /// ```compile_fail /// # #[macro_use] extern crate static_assertions; /// # fn main() { /// assert_eq_size!(u32, u8); /// # } /// ``` /// /// [`usize`]: https://doc.rust-lang.org/std/primitive.usize.html /// [`u64`]: https://doc.rust-lang.org/std/primitive.u64.html /// [`u32`]: https://doc.rust-lang.org/std/primitive.u32.html #[macro_export] macro_rules! assert_eq_size { ($x:ty, $($xs:ty),+ $(,)*) => { $(let _ = $crate::_core::mem::transmute::<$x, $xs>;)+ }; ($label:ident; $($xs:tt)+) => { #[allow(dead_code, non_snake_case)] fn $label() { assert_eq_size!($($xs)+); } }; } /// Asserts that values pointed to are equal in size. /// /// This especially is useful for when coercing pointers between different types /// and ensuring the underlying values are the same size. /// /// # Examples /// /// ``` /// # #[macro_use] /// # extern crate static_assertions; /// fn operation(x: &(u32, u32), y: &[u16; 4]) { /// assert_eq_size_ptr!(x, y); /// } /// # fn main() {} /// ``` /// /// Byte arrays of different lengths have different sizes: /// /// ```compile_fail /// # #[macro_use] extern crate static_assertions; /// # fn main() { /// static BYTES: &[u8; 4] = &[ /// /* ... */ /// # 0; 4 /// ]; /// /// static TABLE: &[u8; 16] = &[ /// /* ... */ /// # 0; 16 /// ]; /// /// assert_eq_size_ptr!(BYTES, TABLE); /// # } /// ``` #[macro_export] macro_rules! assert_eq_size_ptr { ($x:expr, $($xs:expr),+ $(,)*) => { #[allow(unknown_lints, unsafe_code, forget_copy, useless_transmute)] || unsafe { use $crate::_core::{mem, ptr}; let mut copy = ptr::read($x); $(ptr::write(&mut copy, mem::transmute(ptr::read($xs)));)+ mem::forget(copy); }; } } /// Asserts that values are equal in size. /// /// This macro doesn't consume its arguments and thus works for /// non-[`Clone`]able values. /// /// # Examples /// /// ``` /// # #[macro_use] /// # extern crate static_assertions; /// # fn main() { /// struct Byte(u8); /// /// let x = 10u8; /// let y = Byte(42); // Works for non-cloneable types /// /// assert_eq_size_val!(x, y); /// assert_eq_size_val!(x, y, 0u8); /// # } /// ``` /// /// Even though both values are 0, they are of types with different sizes: /// /// ```compile_fail /// # #[macro_use] extern crate static_assertions; /// # fn main() { /// assert_eq_size_val!(0u8, 0u32); /// # } /// ``` /// /// [`Clone`]: https://doc.rust-lang.org/std/clone/trait.Clone.html #[macro_export] macro_rules! assert_eq_size_val { ($x:expr, $($xs:expr),+ $(,)*) => { assert_eq_size_ptr!(&$x, $(&$xs),+); } } /// Asserts that constant expressions evaluate to `true`. /// /// There also exists [`const_assert_eq`](macro.const_assert_eq.html) for /// validating whether a sequence of expressions are equal to one another. /// /// # Examples /// /// Constant expressions can be ensured to have certain properties via this /// macro If the expression evaluates to `false`, the file will fail to compile. /// This is synonymous to [`static_assert` in C++][static_assert]. /// /// As a [limitation](index.html#limitations), a unique label is required if /// the macro is used outside of a function. /// /// ``` /// # #[macro_use] /// # extern crate static_assertions; /// const FIVE: usize = 5; /// /// const_assert!(twenty_five; FIVE * FIVE == 25); /// /// fn main() { /// const_assert!(2 + 2 == 4); /// const_assert!(FIVE - FIVE == 0); /// } /// ``` /// /// Some expressions are blatantly false: /// /// ```compile_fail /// # #[macro_use] extern crate static_assertions; /// # fn main() { /// const_assert!(1 >= 2); /// # } /// ``` /// /// [static_assert]: http://en.cppreference.com/w/cpp/language/static_assert #[macro_export] macro_rules! const_assert { ($($xs:expr),+ $(,)*) => { #[allow(unknown_lints, eq_op)] let _ = [(); 0 - !($($xs)&&+) as usize]; }; ($label:ident; $($xs:tt)+) => { #[allow(dead_code, non_snake_case)] fn $label() { const_assert!($($xs)+); } }; } /// Asserts that constants are equal in value. /// /// # Examples /// /// Works as a shorthand for `const_assert!(a == b)`: /// /// ``` /// # #[macro_use] /// # extern crate static_assertions; /// const TWO: usize = 2; /// const_assert_eq!(two; TWO * TWO, TWO + TWO, 4); /// /// fn main() { /// const NUM: usize = 32; /// const_assert_eq!(NUM + NUM, 64); /// } /// ``` /// /// Just because 2 × 2 = 2 + 2 doesn't mean it holds true for other numbers: /// /// ```compile_fail /// # #[macro_use] extern crate static_assertions; /// # fn main() { /// const_assert_eq!(4 + 4, 4 * 4); /// # } /// ``` #[macro_export] macro_rules! const_assert_eq { ($x:expr, $($xs:expr),+ $(,)*) => { const_assert!($($x == $xs),+); }; ($label:ident; $x:expr, $($xs:expr),+ $(,)*) => { const_assert!($label; $($x == $xs),+); }; } /// Asserts that the traits are object-safe. /// /// This is useful for when changes are made to a trait that accidentally /// prevent it from being used as an object. Such a case would be adding a /// generic method and forgetting to add `where Self: Sized` after it. If left /// unnoticed, that mistake will affect crate users and break both forward and /// backward compatibility. /// /// # Examples /// /// ``` /// # #[macro_use] /// # extern crate static_assertions; /// assert_obj_safe!(basic; Send, Sync, AsRef<str>); /// /// mod inner { /// // Works with traits that are not in the calling module /// pub trait BasicTrait { /// fn foo(&self); /// } /// } /// /// trait MySafeTrait { /// fn bar(&self) -> u32; /// } /// /// fn main() { /// assert_obj_safe!(MySafeTrait); /// assert_obj_safe!(inner::BasicTrait); /// } /// ``` /// /// Generics without `where Self: Sized` are not allowed in object-safe traits: /// /// ```compile_fail /// # #[macro_use] extern crate static_assertions; /// trait MyUnsafeTrait { /// fn baz<T>(&self) -> T; /// } /// /// # fn main() { /// assert_obj_safe!(MyUnsafeTrait); /// # } /// ``` #[macro_export] macro_rules! assert_obj_safe { ($($xs:ty),+ $(,)*) => { $(let _: &$xs;)+ }; ($label:ident; $($xs:tt)+) => { #[allow(dead_code, non_snake_case)] fn $label() { assert_obj_safe!($($xs)+); } }; } /// Asserts that the type has the given fields. /// /// # Examples /// /// This may be used when types have odd fields as a result of `#[cfg]`. /// /// ``` /// # #[macro_use] extern crate static_assertions; /// # fn main() { /// struct Ty { /// #[cfg(windows)] /// value: u8, /// #[cfg(not(windows))] /// value: usize, /// } /// /// /* ... */ /// /// // Always have `value` regardless of OS /// assert_fields!(Ty, value); /// # } /// ``` /// /// Range does not have a field named `middle`: /// /// ```compile_fail /// # #[macro_use] extern crate static_assertions; /// # fn main() { /// use std::ops::Range; /// /// assert_fields!(Range<u32>, middle); /// # } /// ``` #[macro_export] macro_rules! assert_fields { ($t:path, $($f:ident),+) => { #[allow(unknown_lints, unneeded_field_pattern)] { $(let $t { $f: _, .. };)+ } }; ($label:ident; $($xs:tt)+) => { #[allow(dead_code, non_snake_case)] fn $label() { assert_fields!($($xs)+); } }; } /// Asserts that the type implements the given traits. /// /// # Examples /// /// Can be used to ensure types implement [`Send`], [`Sync`], and other traits: /// /// ``` /// # #[macro_use] extern crate static_assertions; /// # fn main() {} /// assert_impl!(str; String, Send, Sync, From<&'static str>); /// assert_impl!(vec; &'static [u8], Into<Vec<u8>>); /// ``` /// /// Raw pointers cannot be sent between threads safely: /// /// ```compile_fail /// # #[macro_use] extern crate static_assertions; /// # fn main() { /// assert_impl!(*const u8, Send); /// # } /// ``` /// /// [`Send`]: https://doc.rust-lang.org/std/marker/trait.Send.html /// [`Sync`]: https://doc.rust-lang.org/std/marker/trait.Sync.html #[macro_export] macro_rules! assert_impl { ($x:ty, $($t:path),+ $(,)*) => { { fn assert_impl<T>() where T: ?Sized $(+ $t)+ {} assert_impl::<$x>(); } }; ($label:ident; $($xs:tt)+) => { #[allow(dead_code, non_snake_case)] fn $label() { assert_impl!($($xs)+); } }; }
use futures::TryStreamExt; use sqlx::{sqlite::SqliteQueryAs, Connect, Connection, Executor, Sqlite, SqliteConnection}; use sqlx_test::new; #[cfg_attr(feature = "runtime-async-std", async_std::test)] #[cfg_attr(feature = "runtime-tokio", tokio::test)] async fn it_connects() -> anyhow::Result<()> { Ok(new::<Sqlite>().await?.ping().await?) } #[cfg_attr(feature = "runtime-async-std", async_std::test)] #[cfg_attr(feature = "runtime-tokio", tokio::test)] async fn it_fails_to_connect() -> anyhow::Result<()> { // empty connection string assert!(SqliteConnection::connect("").await.is_err()); assert!( SqliteConnection::connect("sqlite:///please_do_not_run_sqlx_tests_as_root") .await .is_err() ); Ok(()) } #[cfg_attr(feature = "runtime-async-std", async_std::test)] #[cfg_attr(feature = "runtime-tokio", tokio::test)] async fn it_fails_to_parse() -> anyhow::Result<()> { let mut conn = new::<Sqlite>().await?; let res = conn.execute("SEELCT 1").await; assert!(res.is_err()); let err = res.unwrap_err().to_string(); assert_eq!("near \"SEELCT\": syntax error", err); Ok(()) } #[cfg_attr(feature = "runtime-async-std", async_std::test)] #[cfg_attr(feature = "runtime-tokio", tokio::test)] async fn it_handles_empty_queries() -> anyhow::Result<()> { let mut conn = new::<Sqlite>().await?; let affected = conn.execute("").await?; assert_eq!(affected, 0); Ok(()) } #[cfg_attr(feature = "runtime-async-std", async_std::test)] #[cfg_attr(feature = "runtime-tokio", tokio::test)] async fn it_executes() -> anyhow::Result<()> { let mut conn = new::<Sqlite>().await?; let _ = conn .execute( r#" CREATE TEMPORARY TABLE users (id INTEGER PRIMARY KEY) "#, ) .await?; for index in 1..=10_i32 { let cnt = sqlx::query("INSERT INTO users (id) VALUES (?)") .bind(index) .execute(&mut conn) .await?; assert_eq!(cnt, 1); } let sum: i32 = sqlx::query_as("SELECT id FROM users") .fetch(&mut conn) .try_fold(0_i32, |acc, (x,): (i32,)| async move { Ok(acc + x) }) .await?; assert_eq!(sum, 55); Ok(()) } #[cfg_attr(feature = "runtime-async-std", async_std::test)] #[cfg_attr(feature = "runtime-tokio", tokio::test)] async fn it_can_execute_multiple_statements() -> anyhow::Result<()> { let mut conn = new::<Sqlite>().await?; let affected = conn .execute( r#" CREATE TEMPORARY TABLE users (id INTEGER PRIMARY KEY, other INTEGER); INSERT INTO users DEFAULT VALUES; "#, ) .await?; assert_eq!(affected, 1); for index in 2..5_i32 { let (id, other): (i32, i32) = sqlx::query_as( r#" INSERT INTO users (other) VALUES (?); SELECT id, other FROM users WHERE id = last_insert_rowid(); "#, ) .bind(index) .fetch_one(&mut conn) .await?; assert_eq!(id, index); assert_eq!(other, index); } Ok(()) } #[cfg_attr(feature = "runtime-async-std", async_std::test)] #[cfg_attr(feature = "runtime-tokio", tokio::test)] async fn it_describes() -> anyhow::Result<()> { let mut conn = new::<Sqlite>().await?; let _ = conn .execute( r#" CREATE TEMPORARY TABLE describe_test ( _1 int primary key, _2 text not null, _3 blob, _4 boolean, _5 float, _6 varchar(255), _7 double, _8 bigint ) "#, ) .await?; let describe = conn .describe("select nt.*, false from describe_test nt") .await?; assert_eq!( describe.result_columns[0] .type_info .as_ref() .unwrap() .to_string(), "INTEGER" ); assert_eq!( describe.result_columns[1] .type_info .as_ref() .unwrap() .to_string(), "TEXT" ); assert_eq!( describe.result_columns[2] .type_info .as_ref() .unwrap() .to_string(), "BLOB" ); assert_eq!( describe.result_columns[3] .type_info .as_ref() .unwrap() .to_string(), "BOOLEAN" ); assert_eq!( describe.result_columns[4] .type_info .as_ref() .unwrap() .to_string(), "DOUBLE" ); assert_eq!( describe.result_columns[5] .type_info .as_ref() .unwrap() .to_string(), "TEXT" ); assert_eq!( describe.result_columns[6] .type_info .as_ref() .unwrap() .to_string(), "DOUBLE" ); assert_eq!( describe.result_columns[7] .type_info .as_ref() .unwrap() .to_string(), "INTEGER" ); // Expressions can not be described assert!(describe.result_columns[8].type_info.is_none()); Ok(()) }
use juniper::GraphQLScalar; #[derive(GraphQLScalar)] #[graphql(transparent)] struct Scalar(i32, i32); fn main() {}
use crate::terrain::*; use std::fmt; use rand::Rng; #[derive(Clone, Debug, PartialEq, Eq)] pub struct Forest { layout: Vec<ForestFeature>, size: usize, } impl Forest { /// /// Generates a forest of specified size /// the height and width are always the same /// size /// pub fn new(size: usize) -> Self { let mut rng = rand::thread_rng(); Forest { layout: (0..(size * size)) .map(|_| match rng.gen_range(0, 100) { 0..=2 => ForestFeature::Bear(BearInfo::new()), 3..=53 => ForestFeature::Tree(FloraVariant::Tree(12)), 54..=64 => ForestFeature::LumberJack(Woodcutter::new()), _ => ForestFeature::Empty, }) .collect(), size, } } /// /// takes an x and y cordiante gives a copy the resulting forestfeature pub fn get(&self, x: usize, y: usize) -> ForestFeature { self.layout[x * self.size + y] } /// /// Converts an absoluute location into /// the projected x,y cordinate pub fn get_x_y(&self, i: usize) -> (usize, usize) { (i / self.size, i % self.size) } /// /// Takes an x,y coridnate and a feature to assign then /// overwrites the location with the new feature /// pub fn assign_at(&mut self, x: usize, y: usize, f: ForestFeature) { self.layout[x * self.size + y] = f; } /// /// Returns a census struct that /// has the populations of all /// the forestfeatures /// /// pub fn get_terrain_counts(&self) -> Census { self.layout.iter().fold( Census { tree_count: 0, bear_count: 0, lumberjack_count: 0, }, |acc, x| match x { ForestFeature::Bear(_) => Census { tree_count: acc.tree_count, bear_count: acc.bear_count + 1, lumberjack_count: acc.lumberjack_count, }, ForestFeature::LumberJack(_) => Census { tree_count: acc.tree_count, bear_count: acc.bear_count, lumberjack_count: acc.lumberjack_count + 1, }, ForestFeature::Tree(_) => Census { tree_count: acc.tree_count + 1, ..acc }, ForestFeature::BearTree(_, _) => Census { tree_count: acc.tree_count + 1, bear_count: acc.bear_count + 1, lumberjack_count: acc.lumberjack_count, }, ForestFeature::LumberSeedling(_, _) => Census { tree_count: acc.tree_count + 1, bear_count: acc.bear_count, lumberjack_count: acc.lumberjack_count + 1, }, ForestFeature::Empty => acc, }, ) } /// /// Takes a closure that filters for ceratin Foresfeature types /// applies the closure to every Forestfeature and /// returns a vector of all valid locations as tuple (x,y) /// pub fn get_locations(&self, criteria: fn(ForestFeature) -> bool) -> Vec<(usize, usize)> { self.layout .iter() .enumerate() .filter_map(|(i, x)| { if criteria(*x) { Some(self.get_x_y(i)) } else { None } }) .collect() } /// /// Takes a closure that filters for ceratin Foresfeature types /// with an option return value<T> /// applies the closure to every Forestfeature and /// returns a vector of all valid locations as vector of Feature locations<T> /// /// pub fn get_locations_with_info<T>( &self, criteria: fn(ForestFeature) -> Option<T>, ) -> Vec<FeatureLocation<T>> { self.layout .iter() .enumerate() .filter_map(|(i, x)| match criteria(*x) { Some(loc) => { let (x, y) = self.get_x_y(i); Some(FeatureLocation { x, y, feature: loc }) } _ => None, }) .collect() } /// /// Calls the aging implementation on all /// forestfeatures in the forest /// pub fn age_features(&mut self) { self.layout = self.layout.iter().map(|x| x.age()).collect(); } /// sums the amount of wood chopped by the lumber jacks pub fn amount_of_wood_chopped(&self) -> u32 { self.layout .iter() .filter_map(|x| match x { ForestFeature::LumberJack(l) => Some(l.lumber_collected), ForestFeature::LumberSeedling(l, _) => Some(l.lumber_collected), _ => None, }) .sum() } /// /// gets a vector of interactable neighbours locations /// interactable is specified by closure. /// /// Examples of interactable are neighbours to move to or /// plant to /// pub fn get_interactable_neighbours_locations( &self, x: usize, y: usize, criteria: fn(ForestFeature) -> bool, ) -> Vec<(usize, usize)> { let mut returned_vec = Vec::with_capacity(8); for delta_x in [-1, 0, 1].iter().cloned() { for delta_y in [-1, 0, 1].iter().cloned() { let new_x = delta_x + x as i32; let new_y = delta_y + y as i32; if (delta_x == 0 && delta_y == 0) || new_x as usize >= self.size || new_y as usize >= self.size || new_x < 0 || new_y < 0 { continue; } let new_x = new_x as usize; let new_y = new_y as usize; let neighbour = self.get(new_x, new_y); if criteria(neighbour) { returned_vec.push((new_x, new_y)) } } } returned_vec } /// /// changes a forest feature into another by slice of locations /// forest feature transformation is specified by /// closure /// pub fn transform_features( &mut self, new_locations: &[(usize, usize)], transformation: fn(ForestFeature) -> ForestFeature, ) { for position in new_locations { let (new_x, new_y) = position; let new_loc = self.get(*new_x, *new_y); self.assign_at(*new_x, *new_y, transformation(new_loc)); } } /// /// changes a forest feature into another /// forest feature transformation is specified by /// closure /// pub fn transform_feature( &mut self, new_locations: (usize, usize), transformation: fn(ForestFeature) -> ForestFeature, ) { let (new_x, new_y) = new_locations; let new_loc = self.get(new_x, new_y); self.assign_at(new_x, new_y, transformation(new_loc)); } /// /// Used to simulate movement in a forest /// take a FeatureLocation that stores stuff /// about the moving feature plus its location /// coridnates to new location /// and too closures to simulate movement one changes the orign /// the other the destionation T is any statistical information that the destination closure may return /// pub fn move_to<T, U>( &mut self, migrator: FeatureLocation<U>, new_loc: (usize, usize), move_form: fn(ForestFeature) -> ForestFeature, move_to: fn(ForestFeature, U) -> (ForestFeature, T), ) -> T { let old_loc = self.get(migrator.x, migrator.y); self.assign_at(migrator.x, migrator.y, move_form(old_loc)); let (new_x, new_y) = new_loc; let new_loc = self.get(new_x, new_y); let (new_feature, resul) = move_to(new_loc, migrator.feature); self.assign_at(new_x, new_y, new_feature); resul } /// /// Changes the features to be initial state /// this will reset movement and lumber statistics /// pub fn reset_feature_state(&mut self) { for x in self.layout.iter_mut() { match x { ForestFeature::Bear(b) => b.finished_moving = false, ForestFeature::BearTree(b, _) => b.finished_moving = false, ForestFeature::LumberJack(l) => { l.finished_moving = false; l.lumber_collected = 0 } ForestFeature::LumberSeedling(l, _) => { l.finished_moving = false; l.lumber_collected = 0 } _ => (), } } } } impl fmt::Display for Forest { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> { for line in self.layout.as_slice().chunks(self.size) { for &terrain in line { let symbol = match terrain { ForestFeature::Empty => '⬛', ForestFeature::Tree(t) => match t { FloraVariant::Sapling(_) => '🎄', FloraVariant::Tree(_) => '🌲', FloraVariant::Elder(_) => '🌳', }, ForestFeature::LumberJack(_) => '👷', ForestFeature::Bear(_) => '🐻', ForestFeature::BearTree(_, _) => '🍯', ForestFeature::LumberSeedling(_, _) => '🌴', }; write!(f, "{}", symbol)?; } writeln!(f)?; } Ok(()) } } pub mod actions { use super::Forest as SimpleForest; use crate::terrain::*; use rand::rngs::ThreadRng; use rand::Rng; use std::cmp::Ordering; /// /// single hread simulate /// pub fn simulate(size: usize) { let mut rng = rand::thread_rng(); let mut simulated_forest = SimpleForest::new(size); /* The main simulation */ for year in 1..400 { let mut annual_wood_chop = 0; let mut annual_mualing = 0; let mut annual_sapling_plant = 0; for month in 1..=12 { simulated_forest.age_features(); let saplings_planted_this_month = process_spawning(&mut simulated_forest, &mut rng); let mut mauled_lumberjacks = Vec::with_capacity(size * size); /* Movement of lumebrjacks and bear */ for move_phase in 0..5 { if move_phase < 3 { process_lumberjacks(&mut simulated_forest, &mut rng); } mauled_lumberjacks.append(&mut process_bear(&mut simulated_forest, &mut rng)); } let mauled_lumberjacks_this_month = mauled_lumberjacks.len(); let wood_chopped_this_month = mauled_lumberjacks .into_iter() .fold(0, |acc, x| acc + x.lumber_collected) + simulated_forest.amount_of_wood_chopped(); annual_wood_chop += wood_chopped_this_month; annual_mualing += mauled_lumberjacks_this_month as u32; annual_sapling_plant += saplings_planted_this_month; /* reset statistics and movment */ simulated_forest.reset_feature_state(); /* End of month printing */ println!("{}", simulated_forest); println!( "month {} year {}, units of wood chopped this month: {}, lumberjacks_malued: {}, saplings planted: {},", month,year,wood_chopped_this_month, mauled_lumberjacks_this_month, saplings_planted_this_month ); println!("{:-<1$}", "", size * 2); } /* Enf of year printing and statsitics */ let censare = populate( &mut simulated_forest, &mut rng, annual_wood_chop, annual_mualing, ); println!("yearly census {}", censare); println!( "year {}, wood chopped this year:{} ,lumberJacks mauled:{} ,saplings planted:{} ", year, annual_wood_chop, annual_mualing, annual_sapling_plant ); println!("{:_<1$}", "", size * 2); /* Terminate no forest to simulate */ if censare.tree_count == 0 { println!("The once great forest has disappeared"); std::process::exit(0); } } } // /// Process the spawning logic of tress /// this will plant new saplings /// fn process_spawning(simulated_forest: &mut SimpleForest, rng: &mut ThreadRng) -> u32 { /* Describe what mature tress are to the forest */ let mature_tress_criteria = |x| match x { ForestFeature::Tree(t) => match t { FloraVariant::Tree(_) => Some(MatureTree::Tree), FloraVariant::Elder(_) => Some(MatureTree::Elder), _ => None, }, _ => None, }; let mature_tress = simulated_forest.get_locations_with_info(mature_tress_criteria); let mut num_saplings_planted = 0; let plantable_criteria = |x| match x { ForestFeature::Empty => true, _ => false, }; let spawn_seedling = |new_loc| { if let ForestFeature::Empty = new_loc { ForestFeature::Tree(FloraVariant::Sapling(Seedling { current_age: 0 })) } else { panic!("planting in a non empty zone {:?}", new_loc); } }; /* main planting proceess happens here */ for m in mature_tress { /* Random chance of planting */ let gen = rng.gen_range(0, 10); match m.feature { MatureTree::Tree if gen == 0 => { let potential_locations = simulated_forest .get_interactable_neighbours_locations(m.x, m.y, plantable_criteria); if potential_locations.is_empty() { continue; } let (new_site_x, new_site_y) = potential_locations .get(rng.gen_range(0, potential_locations.len())) .unwrap(); simulated_forest.transform_feature((*new_site_x, *new_site_y), spawn_seedling); num_saplings_planted += 1; } MatureTree::Elder if gen == 0 || gen == 1 => { let potential_locations = simulated_forest .get_interactable_neighbours_locations(m.x, m.y, plantable_criteria); if potential_locations.is_empty() { continue; } let (new_site_x, new_site_y) = potential_locations .get(rng.gen_range(0, potential_locations.len())) .unwrap(); simulated_forest.transform_feature((*new_site_x, *new_site_y), spawn_seedling); num_saplings_planted += 1; } _ => continue, } } num_saplings_planted } /// Movement of lumberjacks handled here fn process_lumberjacks(simulated_forest: &mut SimpleForest, rng: &mut ThreadRng) { let movable_lumberjack_criteria = |x| match x { ForestFeature::LumberJack(l) if !l.finished_moving => Some(l), ForestFeature::LumberSeedling(l, _) if !l.finished_moving => Some(l), _ => None, }; simulated_forest.get_locations_with_info::<Woodcutter>(movable_lumberjack_criteria); let lumber_jack_locations = simulated_forest.get_locations_with_info::<Woodcutter>(movable_lumberjack_criteria); let loacation_criteria = |x| match x { ForestFeature::Empty | ForestFeature::Tree(_) => true, _ => false, }; for current_lumberjack in lumber_jack_locations { let new_destinations = simulated_forest.get_interactable_neighbours_locations( current_lumberjack.x, current_lumberjack.y, loacation_criteria, ); if new_destinations.is_empty() { continue; } let (new_x, new_y) = new_destinations .get(rng.gen_range(0, new_destinations.len())) .unwrap(); let move_from_actions = |old_loc| match old_loc { ForestFeature::LumberJack(_) => ForestFeature::Empty, ForestFeature::LumberSeedling(_, sap) => { ForestFeature::Tree(FloraVariant::Sapling(sap)) } old_place => panic!("lumberjack disappered {:?}", old_place), }; let move_to_actions = |new_loc, migrator| { let res = match new_loc { ForestFeature::Empty => ForestFeature::LumberJack(migrator), ForestFeature::Tree(l) => match l { FloraVariant::Sapling(a) => ForestFeature::LumberSeedling(migrator, a), FloraVariant::Tree(_) => ForestFeature::LumberJack(Woodcutter { lumber_collected: migrator.lumber_collected + 1, finished_moving: true, }), FloraVariant::Elder(_) => ForestFeature::LumberJack(Woodcutter { lumber_collected: migrator.lumber_collected + 2, finished_moving: true, }), }, _ => panic!("moved lumber to invalid position"), }; (res, ()) }; simulated_forest.move_to( current_lumberjack, (*new_x, *new_y), move_from_actions, move_to_actions, ); } } ///Movement of bears fn process_bear(simulated_forest: &mut SimpleForest, rng: &mut ThreadRng) -> Vec<Woodcutter> { let movable_criteria = |x| match x { ForestFeature::Bear(b) if !b.finished_moving => Some(b), ForestFeature::BearTree(b, _) if !b.finished_moving => Some(b), _ => None, }; let bear_locations = simulated_forest.get_locations_with_info(movable_criteria); let mut maulings = Vec::with_capacity(bear_locations.len()); let criteria = |x| match x { ForestFeature::Empty | ForestFeature::Tree(_) | ForestFeature::LumberJack(_) | ForestFeature::LumberSeedling(_, _) => true, _ => false, }; for current_bear in bear_locations { let new_destinations = simulated_forest.get_interactable_neighbours_locations( current_bear.x, current_bear.y, criteria, ); if new_destinations.is_empty() { continue; } let (new_x, new_y) = new_destinations .get(rng.gen_range(0, new_destinations.len())) .unwrap(); let move_from_actions = |old_loc| match old_loc { ForestFeature::Bear(_) => ForestFeature::Empty, ForestFeature::BearTree(_, tree) => match tree { FloraVariant::Elder(l) => ForestFeature::Tree(FloraVariant::Elder(l)), FloraVariant::Tree(l) => ForestFeature::Tree(FloraVariant::Tree(l)), FloraVariant::Sapling(sap) => ForestFeature::Tree(FloraVariant::Sapling(sap)), }, old_place => panic!("bear disappered {:?}", old_place), }; let move_to_actions = |new_loc, bear| match new_loc { ForestFeature::Empty => (ForestFeature::Bear(bear), None), ForestFeature::Tree(l) => match l { FloraVariant::Sapling(a) => ( ForestFeature::BearTree(bear, FloraVariant::Sapling(a)), None, ), FloraVariant::Tree(tree) => ( ForestFeature::BearTree(bear, FloraVariant::Tree(tree)), None, ), FloraVariant::Elder(tree) => ( ForestFeature::BearTree(bear, FloraVariant::Elder(tree)), None, ), }, ForestFeature::LumberJack(l) => ( ForestFeature::Bear(BearInfo { finished_moving: true, }), Some(l), ), ForestFeature::LumberSeedling(l, sap) => ( ForestFeature::BearTree( BearInfo { finished_moving: true, }, FloraVariant::Sapling(sap), ), Some(l), ), _ => panic!("moved bear to invalid position"), }; let result = simulated_forest.move_to( current_bear, (*new_x, *new_y), move_from_actions, move_to_actions, ); maulings.push(result); } maulings.into_iter().filter_map(|x| x).collect() } fn populate( simulated_forest: &mut SimpleForest, rng: &mut ThreadRng, wood_collected: u32, maul_incidents: u32, ) -> Census { let censare = simulated_forest.get_terrain_counts(); let delta_lumber = populate_lumberjacks( simulated_forest, rng, wood_collected, censare.lumberjack_count, ); let delta_bears = populate_bears(simulated_forest, rng, maul_incidents); Census { bear_count: (censare.bear_count as i32 + delta_bears) as u32, lumberjack_count: (censare.lumberjack_count as i32 + delta_lumber) as u32, ..censare } } fn populate_lumberjacks( simulated_forest: &mut SimpleForest, rng: &mut ThreadRng, wood_collected: u32, lumberjack_count: u32, ) -> i32 { /*Calculate lumber jack changes*/ let delta_num_lumberjacks = if lumberjack_count == 0 { 1 } else if lumberjack_count > wood_collected { -1 } else { (wood_collected / lumberjack_count) as i32 }; match delta_num_lumberjacks.cmp(&0) { Ordering::Equal => {} Ordering::Greater => { let spawnable_criteria = |x| match x { ForestFeature::Empty => true, ForestFeature::Tree(t) => match t { FloraVariant::Sapling(_) => true, _ => false, }, _ => false, }; let mut empty_locations = simulated_forest.get_locations(spawnable_criteria); let mut lumber_spawn_sites = Vec::with_capacity(delta_num_lumberjacks as usize); /* Collect randomly chosen spawn sites */ for _x in 0..delta_num_lumberjacks { let new_lumber_location = empty_locations.remove(rng.gen_range(0, empty_locations.len())); lumber_spawn_sites.push(new_lumber_location); } /*Instruct the forest on how to spawn the new lumberjacks*/ let spawnable_transformation = |new_loc| match new_loc { ForestFeature::Empty => ForestFeature::LumberJack(Woodcutter::new()), ForestFeature::Tree(l) => match l { FloraVariant::Sapling(a) => { ForestFeature::LumberSeedling(Woodcutter::new(), a) } _ => panic!("spawning lumberJack invalid tree"), }, _ => panic!("spawning lumberJack invalid location"), }; simulated_forest.transform_features(&lumber_spawn_sites, spawnable_transformation); } Ordering::Less => { let criteria = |x| match x { ForestFeature::LumberJack(_) => true, ForestFeature::LumberSeedling(_, _) => true, _ => false, }; let mut cullable_lumberjacks = simulated_forest.get_locations(criteria); let culled_lumber_jack = cullable_lumberjacks.remove(rng.gen_range(0, cullable_lumberjacks.len())); let remove_lumber_transform = |new_loc| match new_loc { ForestFeature::LumberJack(_) => ForestFeature::Empty, ForestFeature::LumberSeedling(_, s) => { ForestFeature::Tree(FloraVariant::Sapling(s)) } _ => panic!("spawning lumberJack invalid location"), }; simulated_forest.transform_feature(culled_lumber_jack, remove_lumber_transform); } } delta_num_lumberjacks } fn populate_bears( simulated_forest: &mut SimpleForest, rng: &mut ThreadRng, maul_incidents: u32, ) -> i32 { match maul_incidents.cmp(&0) { Ordering::Equal | Ordering::Less => { /*where bears can spawn*/ let spawnable_criteria = |x| match x { ForestFeature::Empty => true, ForestFeature::Tree(_) => true, _ => false, }; let mut bear_spawn_spots = simulated_forest.get_locations(spawnable_criteria); let new_bear_location = bear_spawn_spots.remove(rng.gen_range(0, bear_spawn_spots.len())); /*Instructions on how to spawn bears*/ let spawnable_transformation = |new_loc| match new_loc { ForestFeature::Empty => ForestFeature::Bear(BearInfo::new()), ForestFeature::Tree(l) => ForestFeature::BearTree(BearInfo::new(), l), _ => panic!("spawning lumberJack invalid location"), }; simulated_forest.transform_feature(new_bear_location, spawnable_transformation); 1 } Ordering::Greater => { let criteria = |x| match x { ForestFeature::Bear(_) => true, ForestFeature::BearTree(_, _) => true, _ => false, }; let mut cullable_bears = simulated_forest.get_locations(criteria); let culled_bear = cullable_bears.remove(rng.gen_range(0, cullable_bears.len())); let remove_bear_transform = |new_loc| match new_loc { ForestFeature::Bear(_) => ForestFeature::Empty, ForestFeature::BearTree(_, t) => ForestFeature::Tree(t), _ => panic!("spawning lumberJack invalid location"), }; simulated_forest.transform_feature(culled_bear, remove_bear_transform); -1 } } } }
use std::{sync::Arc, collections::HashMap}; use imgui::{Context, sys::ImDrawCmd, internal::RawWrapper, FontSource, TextureId}; use sourcerenderer_core::{graphics::{Backend, Device, MemoryUsage, BufferUsage, Scissor, Viewport, SampleCount, TextureUsage, Format, TextureDimension, TextureInfo, TextureViewInfo}, Vec2, Vec2UI, Vec2I, Platform}; pub struct UI<P: Platform> { imgui: Context, texture_map: HashMap<imgui::TextureId, Arc<<P::GraphicsBackend as Backend>::TextureView>>, window_size: Vec2UI } impl<P: Platform> UI<P> { pub fn new(device: &Arc<<P::GraphicsBackend as Backend>::Device>, window_size: Vec2UI) -> Self { let mut imgui = imgui::Context::create(); imgui.set_platform_name(Some("Dreieck".to_string())); imgui.style_mut().use_dark_colors(); let mut texture_map: HashMap<imgui::TextureId, Arc<<P::GraphicsBackend as Backend>::TextureView>> = HashMap::new(); const FONT_TEXTURE_ID: usize = 1; imgui.fonts().add_font(&[FontSource::DefaultFontData { config: None }]); let font_tex_data = imgui.fonts().build_rgba32_texture(); let font_texture = device.create_texture(&TextureInfo { dimension: TextureDimension::Dim2D, format: Format::RGBA8UNorm, width: font_tex_data.width, height: font_tex_data.height, depth: 1, mip_levels: 1, array_length: 1, samples: SampleCount::Samples1, usage: TextureUsage::COPY_DST | TextureUsage::SAMPLED, supports_srgb: false, }, Some("DearImguiFontMap")); let font_data = device.upload_data(font_tex_data.data, MemoryUsage::UncachedRAM, BufferUsage::COPY_SRC); device.init_texture(&font_texture, &font_data, 0, 0, 0); device.flush_transfers(); let font_texture_view = device.create_texture_view(&font_texture, &TextureViewInfo::default(), Some("DearImguiFontMapView")); imgui.fonts().tex_id = TextureId::new(FONT_TEXTURE_ID); texture_map.insert(imgui.fonts().tex_id, font_texture_view); Self { imgui, texture_map, window_size } } pub fn set_window_size(&mut self, size: Vec2UI) { self.window_size = size; } pub fn update(&mut self) { let io = self.imgui.io_mut(); io.display_size = [ self.window_size.x as f32, self.window_size.y as f32 ]; io.display_framebuffer_scale = [ 1f32, 1f32 ]; let frame = self.imgui.frame(); frame.text("Hi"); let mut opened = false; frame.show_demo_window(&mut opened); } pub fn draw_data(&mut self, device: &Arc<<P::GraphicsBackend as Backend>::Device>) -> UIDrawData<P::GraphicsBackend> { let draw = self.imgui.render(); let mut draw_lists = Vec::<UICmdList<P::GraphicsBackend>>::with_capacity(draw.draw_lists_count()); let fb_size = Vec2::new(draw.display_size[0] * draw.framebuffer_scale[0], draw.display_size[1] * draw.framebuffer_scale[1]); let scale = Vec2::new( 2f32 / draw.display_size[0], 2f32 / draw.display_size[1], ); let translate = Vec2::new( -1f32 - draw.display_pos[0] * scale.x, -1f32 - draw.display_pos[1] * scale.y, ); let clip_offset = Vec2::new(draw.display_pos[0], draw.display_pos[1]); let clip_scale = Vec2::new(draw.framebuffer_scale[0], draw.framebuffer_scale[1]); let viewport = Viewport { position: Vec2::new(0f32, 0f32), extent: fb_size, min_depth: 0.0f32, max_depth: 1.0f32, }; for list in draw.draw_lists() { let vertex_buffer = device.upload_data(list.vtx_buffer(), MemoryUsage::MappableVRAM, BufferUsage::VERTEX); let index_buffer = device.upload_data(list.idx_buffer(), MemoryUsage::MappableVRAM, BufferUsage::INDEX); let mut draws = Vec::<UIDraw<P::GraphicsBackend>>::new(); for cmd in list.commands() { match cmd { imgui::DrawCmd::Elements { count, cmd_params } => { let mut clip_min = Vec2::new((cmd_params.clip_rect[0] - clip_offset.x) * clip_scale.x, (cmd_params.clip_rect[1] - clip_offset.y) * clip_scale.y); let mut clip_max = Vec2::new((cmd_params.clip_rect[2] - clip_offset.x) * clip_scale.x, (cmd_params.clip_rect[3] - clip_offset.y) * clip_scale.y); if clip_min.x < 0.0f32 { clip_min.x = 0.0f32; } if clip_min.y < 0.0f32 { clip_min.y = 0.0f32; } if clip_max.x > fb_size.x { clip_max.x = fb_size.y; } if clip_max.y > fb_size.y { clip_max.y = fb_size.y; } if clip_max.x <= clip_min.x || clip_max.y <= clip_min.y { continue; } draws.push(UIDraw { scissor: Scissor { position: Vec2I::new(clip_min.x as i32, clip_min.y as i32), extent: Vec2UI::new((clip_max.x - clip_min.x) as u32, (clip_max.y - clip_min.y) as u32), }, texture: self.texture_map.get(&cmd_params.texture_id).cloned(), vertex_offset: cmd_params.vtx_offset as u32, first_index: cmd_params.idx_offset as u32, index_count: count as u32 }); } imgui::DrawCmd::ResetRenderState => {}, imgui::DrawCmd::RawCallback { callback, raw_cmd } => { unsafe { callback(list.raw(), raw_cmd); } } } } draw_lists.push(UICmdList { vertex_buffer, index_buffer, draws }); } return UIDrawData { draw_lists, viewport, scale, translate, }; } } pub struct UIDrawData<B: Backend> { pub draw_lists: Vec<UICmdList<B>>, pub viewport: Viewport, pub scale: Vec2, pub translate: Vec2 } pub struct UICmdList<B: Backend> { pub vertex_buffer: Arc<B::Buffer>, pub index_buffer: Arc<B::Buffer>, pub draws: Vec<UIDraw<B>> } pub struct UIDraw<B: Backend> { pub texture: Option<Arc<B::TextureView>>, pub vertex_offset: u32, pub first_index: u32, pub index_count: u32, pub scissor: Scissor } impl<B: Backend> Default for UIDrawData<B> { fn default() -> Self { Self { draw_lists: Vec::new(), viewport: Viewport { position: Vec2::new(0f32, 0f32), extent: Vec2::new(0f32, 0f32), min_depth: 0f32, max_depth: 0f32 }, scale: Vec2::new(1f32, 1f32), translate: Vec2::new(0f32, 0f32) } } }
//! Monitoring metrics. use std::sync::Arc; use chrono::{DateTime, Utc}; use rpki::tal::TalInfo; //------------ Metrics ------------------------------------------------------- #[derive(Clone, Debug)] pub struct Metrics { /// Time when these metrics have been collected. time: DateTime<Utc>, /// Per-TAL metrics. tals: Vec<TalMetrics>, } impl Metrics { pub fn new() -> Self { Metrics { time: Utc::now(), tals: Vec::new() } } pub fn push_tal(&mut self, tal: TalMetrics) { self.tals.push(tal) } pub fn timestamp(&self) -> i64 { self.time.timestamp() } pub fn tals(&self) -> &[TalMetrics] { &self.tals } pub fn log(self) { info!("Summary:"); for tal in self.tals { info!( "{}: {} valid ROAs, {} VRPs.", tal.tal.name(), tal.roas, tal.vrps ) } } } impl Default for Metrics { fn default() -> Self { Self::new() } } //------------ TalMetrics ---------------------------------------------------- #[derive(Clone, Debug)] pub struct TalMetrics { /// The TAL. pub tal: Arc<TalInfo>, /// Number of ROAs. pub roas: u32, /// Number of VRPs. pub vrps: u32, } impl TalMetrics { pub fn new(tal: Arc<TalInfo>) -> Self { TalMetrics { tal, roas: 0, vrps: 0 } } }
#[doc = "Reader of register D2CFGR"] pub type R = crate::R<u32, super::D2CFGR>; #[doc = "Writer for register D2CFGR"] pub type W = crate::W<u32, super::D2CFGR>; #[doc = "Register D2CFGR `reset()`'s with value 0"] impl crate::ResetValue for super::D2CFGR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "D2 domain APB1 prescaler\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum D2PPRE1_A { #[doc = "0: rcc_hclk not divided"] DIV1 = 0, #[doc = "4: rcc_hclk divided by 2"] DIV2 = 4, #[doc = "5: rcc_hclk divided by 4"] DIV4 = 5, #[doc = "6: rcc_hclk divided by 8"] DIV8 = 6, #[doc = "7: rcc_hclk divided by 16"] DIV16 = 7, } impl From<D2PPRE1_A> for u8 { #[inline(always)] fn from(variant: D2PPRE1_A) -> Self { variant as _ } } #[doc = "Reader of field `D2PPRE1`"] pub type D2PPRE1_R = crate::R<u8, D2PPRE1_A>; impl D2PPRE1_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> crate::Variant<u8, D2PPRE1_A> { use crate::Variant::*; match self.bits { 0 => Val(D2PPRE1_A::DIV1), 4 => Val(D2PPRE1_A::DIV2), 5 => Val(D2PPRE1_A::DIV4), 6 => Val(D2PPRE1_A::DIV8), 7 => Val(D2PPRE1_A::DIV16), i => Res(i), } } #[doc = "Checks if the value of the field is `DIV1`"] #[inline(always)] pub fn is_div1(&self) -> bool { *self == D2PPRE1_A::DIV1 } #[doc = "Checks if the value of the field is `DIV2`"] #[inline(always)] pub fn is_div2(&self) -> bool { *self == D2PPRE1_A::DIV2 } #[doc = "Checks if the value of the field is `DIV4`"] #[inline(always)] pub fn is_div4(&self) -> bool { *self == D2PPRE1_A::DIV4 } #[doc = "Checks if the value of the field is `DIV8`"] #[inline(always)] pub fn is_div8(&self) -> bool { *self == D2PPRE1_A::DIV8 } #[doc = "Checks if the value of the field is `DIV16`"] #[inline(always)] pub fn is_div16(&self) -> bool { *self == D2PPRE1_A::DIV16 } } #[doc = "Write proxy for field `D2PPRE1`"] pub struct D2PPRE1_W<'a> { w: &'a mut W, } impl<'a> D2PPRE1_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: D2PPRE1_A) -> &'a mut W { unsafe { self.bits(variant.into()) } } #[doc = "rcc_hclk not divided"] #[inline(always)] pub fn div1(self) -> &'a mut W { self.variant(D2PPRE1_A::DIV1) } #[doc = "rcc_hclk divided by 2"] #[inline(always)] pub fn div2(self) -> &'a mut W { self.variant(D2PPRE1_A::DIV2) } #[doc = "rcc_hclk divided by 4"] #[inline(always)] pub fn div4(self) -> &'a mut W { self.variant(D2PPRE1_A::DIV4) } #[doc = "rcc_hclk divided by 8"] #[inline(always)] pub fn div8(self) -> &'a mut W { self.variant(D2PPRE1_A::DIV8) } #[doc = "rcc_hclk divided by 16"] #[inline(always)] pub fn div16(self) -> &'a mut W { self.variant(D2PPRE1_A::DIV16) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 4)) | (((value as u32) & 0x07) << 4); self.w } } #[doc = "D2 domain APB2 prescaler"] pub type D2PPRE2_A = D2PPRE1_A; #[doc = "Reader of field `D2PPRE2`"] pub type D2PPRE2_R = crate::R<u8, D2PPRE1_A>; #[doc = "Write proxy for field `D2PPRE2`"] pub struct D2PPRE2_W<'a> { w: &'a mut W, } impl<'a> D2PPRE2_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: D2PPRE2_A) -> &'a mut W { unsafe { self.bits(variant.into()) } } #[doc = "rcc_hclk not divided"] #[inline(always)] pub fn div1(self) -> &'a mut W { self.variant(D2PPRE1_A::DIV1) } #[doc = "rcc_hclk divided by 2"] #[inline(always)] pub fn div2(self) -> &'a mut W { self.variant(D2PPRE1_A::DIV2) } #[doc = "rcc_hclk divided by 4"] #[inline(always)] pub fn div4(self) -> &'a mut W { self.variant(D2PPRE1_A::DIV4) } #[doc = "rcc_hclk divided by 8"] #[inline(always)] pub fn div8(self) -> &'a mut W { self.variant(D2PPRE1_A::DIV8) } #[doc = "rcc_hclk divided by 16"] #[inline(always)] pub fn div16(self) -> &'a mut W { self.variant(D2PPRE1_A::DIV16) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 8)) | (((value as u32) & 0x07) << 8); self.w } } impl R { #[doc = "Bits 4:6 - D2 domain APB1 prescaler"] #[inline(always)] pub fn d2ppre1(&self) -> D2PPRE1_R { D2PPRE1_R::new(((self.bits >> 4) & 0x07) as u8) } #[doc = "Bits 8:10 - D2 domain APB2 prescaler"] #[inline(always)] pub fn d2ppre2(&self) -> D2PPRE2_R { D2PPRE2_R::new(((self.bits >> 8) & 0x07) as u8) } } impl W { #[doc = "Bits 4:6 - D2 domain APB1 prescaler"] #[inline(always)] pub fn d2ppre1(&mut self) -> D2PPRE1_W { D2PPRE1_W { w: self } } #[doc = "Bits 8:10 - D2 domain APB2 prescaler"] #[inline(always)] pub fn d2ppre2(&mut self) -> D2PPRE2_W { D2PPRE2_W { w: self } } }
pub mod streaming;
// Copyright (c) The Starcoin Core Contributors // SPDX-License-Identifier: Apache-2.0 use serde::{Deserialize, Serialize}; #[derive(Debug, Hash, Clone, Eq, PartialEq, Serialize, Deserialize)] pub struct ContractEvent {}
use crate::common::CommonState; use crate::edit::apply_map_edits; use crate::game::{State, Transition, WizardState}; use crate::helpers::ID; use crate::render::{draw_signal_cycle, DrawCtx, DrawOptions, DrawTurn, TrafficSignalDiagram}; use crate::ui::{ShowEverything, UI}; use abstutil::Timer; use ezgui::{hotkey, Choice, Color, EventCtx, GeomBatch, GfxCtx, Key, ModalMenu}; use geom::Duration; use map_model::{ControlTrafficSignal, Cycle, IntersectionID, TurnID, TurnPriority, TurnType}; // TODO Warn if there are empty cycles or if some turn is completely absent from the signal. pub struct TrafficSignalEditor { menu: ModalMenu, icon_selected: Option<TurnID>, diagram: TrafficSignalDiagram, } impl TrafficSignalEditor { pub fn new(id: IntersectionID, ctx: &mut EventCtx, ui: &mut UI) -> TrafficSignalEditor { ui.primary.current_selection = None; let menu = ModalMenu::new( &format!("Traffic Signal Editor for {}", id), vec![ vec![ (hotkey(Key::UpArrow), "select previous cycle"), (hotkey(Key::DownArrow), "select next cycle"), ], vec![ (hotkey(Key::D), "change cycle duration"), (hotkey(Key::K), "move current cycle up"), (hotkey(Key::J), "move current cycle down"), (hotkey(Key::Backspace), "delete current cycle"), (hotkey(Key::N), "add a new empty cycle"), (hotkey(Key::M), "add a new pedestrian scramble cycle"), ], vec![ (hotkey(Key::R), "reset to original"), (hotkey(Key::P), "choose a preset signal"), ( hotkey(Key::B), "convert to dedicated pedestrian scramble cycle", ), ], vec![(hotkey(Key::Escape), "quit")], ], ctx, ); TrafficSignalEditor { menu, icon_selected: None, diagram: TrafficSignalDiagram::new(id, 0, &ui.primary.map, ctx), } } } impl State for TrafficSignalEditor { fn event(&mut self, ctx: &mut EventCtx, ui: &mut UI) -> Transition { self.menu.handle_event(ctx, None); ctx.canvas.handle_event(ctx.input); self.diagram.event(ctx, &mut self.menu); if ctx.redo_mouseover() { self.icon_selected = None; if let Some(pt) = ctx.canvas.get_cursor_in_map_space() { for t in ui .primary .draw_map .get_turns(self.diagram.i, &ui.primary.map) { if t.contains_pt(pt) { self.icon_selected = Some(t.id); break; } } } } let mut signal = ui.primary.map.get_traffic_signal(self.diagram.i).clone(); if let Some(id) = self.icon_selected { let cycle = &mut signal.cycles[self.diagram.current_cycle()]; // Just one key to toggle between the 3 states let next_priority = match cycle.get_priority(id) { TurnPriority::Banned => { if ui.primary.map.get_t(id).turn_type == TurnType::Crosswalk { if cycle.could_be_priority_turn(id, &ui.primary.map) { Some(TurnPriority::Priority) } else { None } } else { Some(TurnPriority::Yield) } } TurnPriority::Stop => { panic!("Can't have TurnPriority::Stop in a traffic signal"); } TurnPriority::Yield => { if cycle.could_be_priority_turn(id, &ui.primary.map) { Some(TurnPriority::Priority) } else { Some(TurnPriority::Banned) } } TurnPriority::Priority => Some(TurnPriority::Banned), }; if let Some(pri) = next_priority { if ctx.input.contextual_action( Key::Space, format!("toggle from {:?} to {:?}", cycle.get_priority(id), pri), ) { cycle.edit_turn(ui.primary.map.get_t(id), pri); change_traffic_signal(signal, self.diagram.i, ui, ctx); return Transition::Keep; } } } if self.menu.action("quit") { return Transition::Pop; } if self.menu.action("change cycle duration") { return Transition::Push(make_change_cycle_duration( signal.cycles[self.diagram.current_cycle()].duration, )); } else if self.menu.action("choose a preset signal") { return Transition::Push(make_change_preset(self.diagram.i)); } else if self.menu.action("reset to original") { signal = ControlTrafficSignal::get_possible_policies(&ui.primary.map, self.diagram.i) .remove(0) .1; change_traffic_signal(signal, self.diagram.i, ui, ctx); self.diagram = TrafficSignalDiagram::new(self.diagram.i, 0, &ui.primary.map, ctx); return Transition::Keep; } let has_sidewalks = ui .primary .map .get_turns_in_intersection(self.diagram.i) .iter() .any(|t| t.between_sidewalks()); let current_cycle = self.diagram.current_cycle(); if current_cycle != 0 && self.menu.action("move current cycle up") { signal.cycles.swap(current_cycle, current_cycle - 1); change_traffic_signal(signal, self.diagram.i, ui, ctx); self.diagram = TrafficSignalDiagram::new(self.diagram.i, current_cycle - 1, &ui.primary.map, ctx); } else if current_cycle != signal.cycles.len() - 1 && self.menu.action("move current cycle down") { signal.cycles.swap(current_cycle, current_cycle + 1); change_traffic_signal(signal, self.diagram.i, ui, ctx); self.diagram = TrafficSignalDiagram::new(self.diagram.i, current_cycle + 1, &ui.primary.map, ctx); } else if signal.cycles.len() > 1 && self.menu.action("delete current cycle") { signal.cycles.remove(current_cycle); let num_cycles = signal.cycles.len(); change_traffic_signal(signal, self.diagram.i, ui, ctx); self.diagram = TrafficSignalDiagram::new( self.diagram.i, if current_cycle == num_cycles { current_cycle - 1 } else { current_cycle }, &ui.primary.map, ctx, ); } else if self.menu.action("add a new empty cycle") { signal .cycles .insert(current_cycle, Cycle::new(self.diagram.i)); change_traffic_signal(signal, self.diagram.i, ui, ctx); self.diagram = TrafficSignalDiagram::new(self.diagram.i, current_cycle, &ui.primary.map, ctx); } else if has_sidewalks && self.menu.action("add a new pedestrian scramble cycle") { let mut cycle = Cycle::new(self.diagram.i); for t in ui.primary.map.get_turns_in_intersection(self.diagram.i) { if t.between_sidewalks() { cycle.edit_turn(t, TurnPriority::Priority); } } signal.cycles.insert(current_cycle, cycle); change_traffic_signal(signal, self.diagram.i, ui, ctx); self.diagram = TrafficSignalDiagram::new(self.diagram.i, current_cycle, &ui.primary.map, ctx); } else if has_sidewalks && self .menu .action("convert to dedicated pedestrian scramble cycle") { signal.convert_to_ped_scramble(&ui.primary.map); change_traffic_signal(signal, self.diagram.i, ui, ctx); self.diagram = TrafficSignalDiagram::new(self.diagram.i, 0, &ui.primary.map, ctx); } Transition::Keep } fn draw(&self, g: &mut GfxCtx, ui: &UI) { { let mut opts = DrawOptions::new(); opts.suppress_traffic_signal_details = Some(self.diagram.i); ui.draw(g, opts, &ui.primary.sim, &ShowEverything::new()); } let mut batch = GeomBatch::new(); let ctx = DrawCtx { cs: &ui.cs, map: &ui.primary.map, draw_map: &ui.primary.draw_map, sim: &ui.primary.sim, }; let map = &ui.primary.map; let cycle = &map.get_traffic_signal(self.diagram.i).cycles[self.diagram.current_cycle()]; for t in &ui.primary.draw_map.get_turns(self.diagram.i, map) { let arrow_color = match cycle.get_priority(t.id) { TurnPriority::Priority => ui .cs .get_def("priority turn in current cycle", Color::GREEN), TurnPriority::Yield => ui .cs .get_def("yield turn in current cycle", Color::rgb(255, 105, 180)), TurnPriority::Banned => ui.cs.get_def("turn not in current cycle", Color::BLACK), TurnPriority::Stop => panic!("Can't have TurnPriority::Stop in a traffic signal"), }; t.draw_icon( &mut batch, &ctx.cs, arrow_color, self.icon_selected == Some(t.id), ); } draw_signal_cycle(cycle, None, &mut batch, &ctx); if let Some(id) = self.icon_selected { DrawTurn::draw_dashed( map.get_t(id), &mut batch, ui.cs.get_def("selected turn", Color::RED), ); } batch.draw(g); self.diagram.draw(g, &ctx); self.menu.draw(g); if let Some(t) = self.icon_selected { CommonState::draw_osd(g, ui, &Some(ID::Turn(t))); } else { CommonState::draw_osd(g, ui, &None); } } } fn change_traffic_signal( signal: ControlTrafficSignal, i: IntersectionID, ui: &mut UI, ctx: &mut EventCtx, ) { let orig = ControlTrafficSignal::new(&ui.primary.map, i, &mut Timer::throwaway()); let mut new_edits = ui.primary.map.get_edits().clone(); if orig == signal { new_edits.traffic_signal_overrides.remove(&i); } else { new_edits.traffic_signal_overrides.insert(i, signal); } apply_map_edits(&mut ui.primary, &ui.cs, ctx, new_edits); } fn make_change_cycle_duration(current_duration: Duration) -> Box<dyn State> { WizardState::new(Box::new(move |wiz, ctx, _| { let new_duration = wiz.wrap(ctx).input_usize_prefilled( "How long should this cycle be?", format!("{}", current_duration.inner_seconds() as usize), )?; Some(Transition::PopWithData(Box::new(move |state, ui, ctx| { let mut editor = state.downcast_mut::<TrafficSignalEditor>().unwrap(); let mut signal = ui.primary.map.get_traffic_signal(editor.diagram.i).clone(); let idx = editor.diagram.current_cycle(); signal.cycles[idx].duration = Duration::seconds(new_duration as f64); change_traffic_signal(signal, editor.diagram.i, ui, ctx); editor.diagram = TrafficSignalDiagram::new(editor.diagram.i, idx, &ui.primary.map, ctx); }))) })) } fn make_change_preset(i: IntersectionID) -> Box<dyn State> { WizardState::new(Box::new(move |wiz, ctx, ui| { let (_, new_signal) = wiz.wrap(ctx) .choose("Use which preset for this intersection?", || { Choice::from(ControlTrafficSignal::get_possible_policies( &ui.primary.map, i, )) })?; Some(Transition::PopWithData(Box::new(move |state, ui, ctx| { let mut editor = state.downcast_mut::<TrafficSignalEditor>().unwrap(); change_traffic_signal(new_signal, editor.diagram.i, ui, ctx); editor.diagram = TrafficSignalDiagram::new(editor.diagram.i, 0, &ui.primary.map, ctx); }))) })) }
fn foo<'a, 'b>(t: &'a i32, f: &'b i32) { }
use std::io; use dsrv::server::Server; fn main() -> io::Result<()> { std::env::set_var("RUST_LOG", "user_auth=info,dsrv=info cargo run"); Server::new().run(None) }
use std::io::{BufRead, stdin}; use std::fmt::Write; fn main() { let stdin = stdin(); let mut iterator = stdin.lock().lines(); let values: Vec<i64> = iterator .next() .unwrap() .unwrap() .split_whitespace() .map(|s| s.parse().unwrap()) .collect(); let mut output = String::new(); write!(output, "{}", values[0] + values[1] + values[2]).unwrap(); println!("{}", output); }
pub mod test_helpers; mod file_finder_test_suite { use crate::test_helpers::*; use hackscanner_lib::*; #[test] fn find_files_test() { let rules = get_rules_multiple_results(); let matches = file_finder::find_files(get_test_dir(), &rules); assert_multiple_paths(matches); } #[test] fn find_files_one_test() { let rules = get_rules_single_result(); // Call `find` multiple times to make sure the results are cleared between calls file_finder::find_files(get_test_dir(), &rules); let matches = file_finder::find_files(get_test_dir(), &rules); assert_single_path(matches); } }
/// Asserts that constant expressions evaluate to `true`. /// /// Constant expressions can be ensured to have certain properties via this /// macro If the expression evaluates to `false`, the file will fail to compile. /// This is synonymous to [`static_assert` in C++][static_assert]. /// /// # Alternatives /// /// There also exists [`const_assert_eq`](macro.const_assert_eq.html) for /// validating whether a sequence of expressions are equal to one another. /// /// # Examples /// /// On stable Rust, using the macro requires a unique “label” when used in a /// module scope: /// #[cfg_attr(feature = "nightly", doc = "```ignore")] #[cfg_attr(not(feature = "nightly"), doc = "```")] /// # #[macro_use] /// # extern crate static_assertions; /// # fn main() {} /// const_assert!(meaning_of_life; 42 == !!42); /// ``` /// /// The [labeling limitation](index.html#limitations) is not necessary if /// compiling on nightly Rust with the `nightly` feature enabled: /// #[cfg_attr(feature = "nightly", doc = "```")] #[cfg_attr(not(feature = "nightly"), doc = "```ignore")] /// #![feature(underscore_const_names)] /// # #[macro_use] extern crate static_assertions; /// /// const FIVE: usize = 5; /// /// const_assert!(FIVE * FIVE == 25); /// /// fn main() { /// const_assert!(2 + 2 == 4); /// const_assert!(FIVE - FIVE == 0); /// } /// ``` /// /// Some expressions are blatantly false: /// /// ```compile_fail /// # #[macro_use] extern crate static_assertions; /// # fn main() { /// const_assert!(1 >= 2); /// # } /// ``` /// /// [static_assert]: http://en.cppreference.com/w/cpp/language/static_assert #[macro_export(local_inner_macros)] macro_rules! const_assert { ($($xs:tt)+) => { _const_assert!($($xs)+); }; } #[doc(hidden)] #[cfg(feature = "nightly")] #[macro_export(local_inner_macros)] #[allow(dead_code)] macro_rules! _const_assert { ($($xs:expr),+ $(,)*) => { #[allow(unknown_lints, eq_op)] const _: [(); 0 - !($($xs)&&+) as usize] = []; }; } #[doc(hidden)] #[cfg(not(feature = "nightly"))] #[macro_export(local_inner_macros)] macro_rules! _const_assert { ($($xs:expr),+ $(,)*) => { #[allow(unknown_lints, eq_op)] let _ = [(); 0 - !($($xs)&&+) as usize]; }; ($label:ident; $($xs:tt)+) => { #[allow(dead_code, non_snake_case)] fn $label() { const_assert!($($xs)+); } }; } /// Asserts that constants are equal in value. /// /// # Examples /// /// Works as a shorthand for `const_assert!(a == b)`: /// #[cfg_attr(feature = "nightly", doc = "```ignore")] #[cfg_attr(not(feature = "nightly"), doc = "```")] /// # #[macro_use] /// # extern crate static_assertions; /// const TWO: usize = 2; /// const_assert_eq!(two; TWO * TWO, TWO + TWO, 4); /// /// fn main() { /// const NUM: usize = 32; /// const_assert_eq!(NUM + NUM, 64); /// } /// ``` /// /// Just because 2 × 2 = 2 + 2 doesn't mean it holds true for other numbers: /// /// ```compile_fail /// # #[macro_use] extern crate static_assertions; /// # fn main() { /// const_assert_eq!(4 + 4, 4 * 4); /// # } /// ``` #[macro_export(local_inner_macros)] macro_rules! const_assert_eq { ($x:expr, $($xs:expr),+ $(,)*) => { const_assert!($($x == $xs),+); }; ($label:ident; $x:expr, $($xs:expr),+ $(,)*) => { const_assert!($label; $($x == $xs),+); }; }
#![feature( wasm_import_module, global_allocator, )] extern crate wee_alloc; extern crate nabi; #[global_allocator] static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; #[macro_export] macro_rules! print { ($($arg:tt)*) => {{ $crate::print(&format!($($arg)*)); }}; } #[macro_export] macro_rules! println { () => (print!("\n")); ($fmt:expr) => (print!(concat!($fmt, "\n"))); ($fmt:expr, $($arg:tt)*) => (print!(concat!($fmt, "\n"), $($arg)*)); } mod abi { #[wasm_import_module = "abi"] extern { pub fn print(ptr: *const u8, len: usize); pub fn wasm_compile(ptr: *const u8, len: usize) -> u64; pub fn process_create(code_handle: u32, chan_handle: u32) -> u64; pub fn process_start(process_handle: u32) -> u64; pub fn channel_create(handle0: &mut u32, handle1: &mut u32) -> u64; pub fn channel_write(handle: u32, ptr: *const u8, len: usize) -> u64; pub fn channel_read(handle: u32, ptr: *mut u8, len: usize, msg_len_out: &mut usize) -> u64; pub fn physical_map(phys_addr: u64, page_count: usize) -> u64; } } pub fn print(x: &str) { unsafe { abi::print(x.as_ptr(), x.len()); } } pub fn compile_wasm(wasm: &[u8]) -> nabi::Result<u32> { let ret = unsafe { abi::wasm_compile(wasm.as_ptr(), wasm.len()) }; nabi::Error::demux(ret) } pub fn process_create(handle: u32, chan_handle: u32) -> nabi::Result<u32> { let ret = unsafe { abi::process_create(handle, chan_handle) }; nabi::Error::demux(ret) } pub fn process_start(handle: u32) -> nabi::Result<u32> { let ret = unsafe { abi::process_start(handle) }; nabi::Error::demux(ret) } pub fn channel_create() -> nabi::Result<(u32, u32)> { let (mut handle_tx, mut handle_rx) = (0, 0); let ret = unsafe { abi::channel_create(&mut handle_tx, &mut handle_rx) }; nabi::Error::demux(ret) .map(|_| (handle_tx, handle_rx)) } pub fn channel_write(handle: u32, data: &[u8]) -> nabi::Result<()> { let ret = unsafe { abi::channel_write(handle, data.as_ptr(), data.len()) }; nabi::Error::demux(ret).map(|_| ()) } pub fn channel_read(handle: u32, buffer: &mut [u8]) -> (usize, nabi::Result<()>) { let mut msg_size_out = 0; let ret = unsafe { abi::channel_read(handle, buffer.as_mut_ptr(), buffer.len(), &mut msg_size_out) }; (msg_size_out, nabi::Error::demux(ret).map(|_| ())) } pub fn physical_map<T: Sized>(phys_addr: u64) -> nabi::Result<&'static mut T> { use std::mem; let page_count = { let rem = mem::size_of::<T>() % (1 << 16); mem::size_of::<T>() + (1 << 16) - rem }; let ret = unsafe { abi::physical_map(phys_addr, page_count) }; nabi::Error::demux(ret) .map(|offset| unsafe { mem::transmute::<_, &'static mut T>(offset) }) }
//! **MySQL** database and connection types. pub use arguments::MySqlArguments; pub use connection::MySqlConnection; pub use cursor::MySqlCursor; pub use database::MySql; pub use error::MySqlError; pub use row::MySqlRow; pub use type_info::MySqlTypeInfo; pub use value::{MySqlData, MySqlValue}; mod arguments; mod connection; mod cursor; mod database; mod error; mod executor; mod io; mod protocol; mod row; mod rsa; mod stream; mod tls; mod type_info; pub mod types; mod util; mod value; /// An alias for [`crate::pool::Pool`], specialized for **MySQL**. #[cfg_attr(docsrs, doc(cfg(feature = "mysql")))] pub type MySqlPool = crate::pool::Pool<MySqlConnection>; make_query_as!(MySqlQueryAs, MySql, MySqlRow); impl_map_row_for_row!(MySql, MySqlRow); impl_from_row_for_tuples!(MySql, MySqlRow);
extern crate iqdb; fn main() { let url = ::std::env::args().nth(1).expect("Expect 1 image URL"); let services = iqdb::available_services().unwrap(); let matches = iqdb::search_by_url(&url, &services).unwrap(); matches.iter().for_each(|m| println!("{:?}", m)); }
/* Copyright (c) 2015, 2016 Saurav Sachidanand Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ //! Stars /** Computes the combined magnitude of two stars # Arguments * `m1`: Magnitude of star 1 * `m2`: Magnitude of star 2 **/ #[inline] pub fn combined_mag(m1: f64, m2: f64) -> f64 { m2 - 2.5 * (brightness_ratio(m1, m2) + 1.0) } /** Computes the combined magnitude of two or more stars # Arguments * `m`: Array of magnitudes of stars **/ pub fn combined_mag_of_many(m: &[f64]) -> f64 { let mut sum = 0.0; for i in m.iter() { sum += 10_f64.powf(-0.4 * i); } -2.5 * sum.log10() } /** Computes the brightness ratio of two stars # Arguments * `m1`: Magnitude of star 1 * `m2`: Magnitude of star 2 **/ #[inline] pub fn brightness_ratio(m1: f64, m2: f64) -> f64 { 10.0_f64.powf(0.4 * (m2 - m1)) } /** Computes the difference in magnitude of two stars # Arguments * `br`: Brightness ratio of two stars **/ #[inline] pub fn mag_diff(br: f64) -> f64 { 2.5 * br.log10() } /** Computes the absolute magnitude of a star from its parallax # Arguments * `par`: Parallax of the star * `am`: Apparent magnitude of the star **/ #[inline] pub fn abs_mag_frm_parallax(par: f64, am: f64) -> f64 { am + 5.0 + 5.0*(par.to_degrees() * 3600.0).log10() } /** Computes the absolute magnitude of a star from its distance from earth # Arguments * `d`: The star's to earth *(parsecs)* * `am`: Apparent magnitude of the star **/ #[inline] pub fn abs_mag_frm_dist(d: f64, am: f64) -> f64 { am + 5.0 - 5.0*d.log10() } /** Computes the angle between a vector from a star to the north celestial pole of the Earth and a vector from the same star to the north pole of the ecliptic # Returns * `angle`: The desired angle *| in radians* # Arguments * `eclip_long`: The star's ecliptical longitude *| in radians* * `eclip_lat`: The star's ecliptical latitude *| in radians* * `oblq_eclip`: Obliquity of the ecliptic *| in radians* **/ #[inline] pub fn angl_between_north_celes_and_eclip_pole(eclip_long: f64, eclip_lat: f64, oblq_eclip: f64) -> f64 { (eclip_long.cos() * oblq_eclip.tan()).atan2 ( eclip_lat.sin() * eclip_long.sin() * oblq_eclip.tan() - eclip_lat.cos() ) } /** Computes the equatorial coordinates of a star at at a different time from it's motion in space This function Computes the equatorial coordinates of a star at a different time by taking into account it's proper motion, distance and radial velocity. # Returns `(new_asc, new_dec)` * `new_asc`: Right ascension at the different time *| in radians* * `new_dec`: Declination at the different time *| in radians* # Arguments * `asc0`: Right ascension of the star initially *| in radians* * `dec0`: Declination of the star initially *| in radians* * `r`: Distance of the star (*parsecs*) * `delta_r`: Radial velocity of the star (*parsecs/second*) * `proper_motion_asc`: Proper motion of the star in right ascension *| in radians* * `proper_motion_dec`: Proper motion of the star in declination *| in radians* * `t`: Decimal years from the inital time; negative in the past and positive in the future **/ pub fn eq_coords_frm_motion(asc0: f64, dec0: f64, r: f64, delta_r: f64, proper_motion_asc: f64, proper_motion_dec: f64, t: f64) -> (f64, f64) { let x = r * dec0.cos() * asc0.cos(); let y = r * dec0.cos() * asc0.sin(); let z = r * dec0.sin(); let delta_asc = 3600.0 * proper_motion_asc.to_degrees()/13751.0; let delta_dec = 3600.0 * proper_motion_dec.to_degrees()/206265.0; let delta_x = (x / r)*delta_r - z*delta_dec*asc0.cos() - y*delta_asc; let delta_y = (y / r)*delta_r - z*delta_dec*asc0.sin() + x*delta_asc; let delta_z = (z / r)*delta_r + r*delta_dec*dec0.cos(); let x1 = x + t*delta_x; let y1 = y + t*delta_y; let z1 = z + t*delta_z; let asc = y1.atan2(x1); let dec = z1.atan2((x1*x1 + y1*y1).sqrt()); (asc, dec) } pub fn proper_motion_in_eq_coords(asc: f64, dec: f64, pmotion_asc: f64, pmotion_dec: f64, ecl_lat: f64, oblq_eclip: f64) -> (f64, f64) { let ecl_lat_cos = ecl_lat.cos(); let pmotion_long = ( pmotion_dec * oblq_eclip.sin() * asc.cos() + pmotion_asc * dec.cos() * ( oblq_eclip.cos() * dec.cos() + oblq_eclip.sin() * dec.sin() * asc.sin() ) ) / (ecl_lat_cos * ecl_lat_cos); let pmotion_lat = ( pmotion_dec * ( oblq_eclip.cos() * dec.cos() + oblq_eclip.sin() * dec.sin() * asc.sin() ) - pmotion_asc * oblq_eclip.sin() * asc.cos() * dec.cos() ) / ecl_lat_cos; (pmotion_long, pmotion_lat) }
pub mod blank; pub mod height_map;
#[doc = "Reader of register MACMDIOAR"] pub type R = crate::R<u32, super::MACMDIOAR>; #[doc = "Writer for register MACMDIOAR"] pub type W = crate::W<u32, super::MACMDIOAR>; #[doc = "Register MACMDIOAR `reset()`'s with value 0"] impl crate::ResetValue for super::MACMDIOAR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `MB`"] pub type MB_R = crate::R<bool, bool>; #[doc = "Write proxy for field `MB`"] pub struct MB_W<'a> { w: &'a mut W, } impl<'a> MB_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Reader of field `C45E`"] pub type C45E_R = crate::R<bool, bool>; #[doc = "Write proxy for field `C45E`"] pub struct C45E_W<'a> { w: &'a mut W, } impl<'a> C45E_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Reader of field `GOC`"] pub type GOC_R = crate::R<u8, u8>; #[doc = "Write proxy for field `GOC`"] pub struct GOC_W<'a> { w: &'a mut W, } impl<'a> GOC_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 2)) | (((value as u32) & 0x03) << 2); self.w } } #[doc = "Reader of field `SKAP`"] pub type SKAP_R = crate::R<bool, bool>; #[doc = "Write proxy for field `SKAP`"] pub struct SKAP_W<'a> { w: &'a mut W, } impl<'a> SKAP_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "Reader of field `CR`"] pub type CR_R = crate::R<u8, u8>; #[doc = "Write proxy for field `CR`"] pub struct CR_W<'a> { w: &'a mut W, } impl<'a> CR_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0f << 8)) | (((value as u32) & 0x0f) << 8); self.w } } #[doc = "Reader of field `NTC`"] pub type NTC_R = crate::R<u8, u8>; #[doc = "Write proxy for field `NTC`"] pub struct NTC_W<'a> { w: &'a mut W, } impl<'a> NTC_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 12)) | (((value as u32) & 0x07) << 12); self.w } } #[doc = "Reader of field `RDA`"] pub type RDA_R = crate::R<u8, u8>; #[doc = "Write proxy for field `RDA`"] pub struct RDA_W<'a> { w: &'a mut W, } impl<'a> RDA_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x1f << 16)) | (((value as u32) & 0x1f) << 16); self.w } } #[doc = "Reader of field `PA`"] pub type PA_R = crate::R<u8, u8>; #[doc = "Write proxy for field `PA`"] pub struct PA_W<'a> { w: &'a mut W, } impl<'a> PA_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x1f << 21)) | (((value as u32) & 0x1f) << 21); self.w } } #[doc = "Reader of field `BTB`"] pub type BTB_R = crate::R<bool, bool>; #[doc = "Write proxy for field `BTB`"] pub struct BTB_W<'a> { w: &'a mut W, } impl<'a> BTB_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 26)) | (((value as u32) & 0x01) << 26); self.w } } #[doc = "Reader of field `PSE`"] pub type PSE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `PSE`"] pub struct PSE_W<'a> { w: &'a mut W, } impl<'a> PSE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 27)) | (((value as u32) & 0x01) << 27); self.w } } impl R { #[doc = "Bit 0 - MII Busy"] #[inline(always)] pub fn mb(&self) -> MB_R { MB_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - Clause 45 PHY Enable"] #[inline(always)] pub fn c45e(&self) -> C45E_R { C45E_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bits 2:3 - MII Operation Command"] #[inline(always)] pub fn goc(&self) -> GOC_R { GOC_R::new(((self.bits >> 2) & 0x03) as u8) } #[doc = "Bit 4 - Skip Address Packet"] #[inline(always)] pub fn skap(&self) -> SKAP_R { SKAP_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bits 8:11 - CSR Clock Range"] #[inline(always)] pub fn cr(&self) -> CR_R { CR_R::new(((self.bits >> 8) & 0x0f) as u8) } #[doc = "Bits 12:14 - Number of Training Clocks"] #[inline(always)] pub fn ntc(&self) -> NTC_R { NTC_R::new(((self.bits >> 12) & 0x07) as u8) } #[doc = "Bits 16:20 - Register/Device Address"] #[inline(always)] pub fn rda(&self) -> RDA_R { RDA_R::new(((self.bits >> 16) & 0x1f) as u8) } #[doc = "Bits 21:25 - Physical Layer Address"] #[inline(always)] pub fn pa(&self) -> PA_R { PA_R::new(((self.bits >> 21) & 0x1f) as u8) } #[doc = "Bit 26 - Back to Back transactions"] #[inline(always)] pub fn btb(&self) -> BTB_R { BTB_R::new(((self.bits >> 26) & 0x01) != 0) } #[doc = "Bit 27 - Preamble Suppression Enable"] #[inline(always)] pub fn pse(&self) -> PSE_R { PSE_R::new(((self.bits >> 27) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - MII Busy"] #[inline(always)] pub fn mb(&mut self) -> MB_W { MB_W { w: self } } #[doc = "Bit 1 - Clause 45 PHY Enable"] #[inline(always)] pub fn c45e(&mut self) -> C45E_W { C45E_W { w: self } } #[doc = "Bits 2:3 - MII Operation Command"] #[inline(always)] pub fn goc(&mut self) -> GOC_W { GOC_W { w: self } } #[doc = "Bit 4 - Skip Address Packet"] #[inline(always)] pub fn skap(&mut self) -> SKAP_W { SKAP_W { w: self } } #[doc = "Bits 8:11 - CSR Clock Range"] #[inline(always)] pub fn cr(&mut self) -> CR_W { CR_W { w: self } } #[doc = "Bits 12:14 - Number of Training Clocks"] #[inline(always)] pub fn ntc(&mut self) -> NTC_W { NTC_W { w: self } } #[doc = "Bits 16:20 - Register/Device Address"] #[inline(always)] pub fn rda(&mut self) -> RDA_W { RDA_W { w: self } } #[doc = "Bits 21:25 - Physical Layer Address"] #[inline(always)] pub fn pa(&mut self) -> PA_W { PA_W { w: self } } #[doc = "Bit 26 - Back to Back transactions"] #[inline(always)] pub fn btb(&mut self) -> BTB_W { BTB_W { w: self } } #[doc = "Bit 27 - Preamble Suppression Enable"] #[inline(always)] pub fn pse(&mut self) -> PSE_W { PSE_W { w: self } } }
use std::cmp::Ordering; use std::collections; use std::io; use std::num::TryFromIntError; use std::option::Option::Some; use rand::Rng; use t_c::test_vector; use crate::structures::{Coin, State}; use std::borrow::Borrow; mod structures; mod variables; mod lib; mod front_of_house; mod t_c; fn main() { // guess_game(); // place_holder(); // if_let(); // test_map(); // test_vector::main(); // test_ownership(); test_vector(); } fn test_vector() { let mut m = vec![1, 2, 3, 4, 5]; // 修改vec中元素的值 for mut i in &mut m { *i += 10; } match m.pop() { Some(io) => println!("{}", io), None => {println!("空数组")} } for i in m { println!("i is : {}", i); } let row = vec![ SpreadSheetCell::Int(10), SpreadSheetCell::Float(10.1), SpreadSheetCell::Text("行".to_string()), SpreadSheetCell::Int(108) ]; for rand in row { println!("row: {:?}", rand) } } // tests the place holder _ fn place_holder() { let some_u8_value = 0o01; match some_u8_value { 1 => println!("one"), 2 => println!("two"), _ => println!("others") } } fn test_map() { let mut map = collections::HashMap::new(); map.insert("hello", "世界1"); map.insert("hello1", "世界2"); println!("key is {} and value is {}", "hello", match map.get_mut("hello") { Some(str) => &str, None => "key不存在", }); } // tests if let fn if_let() { let some_u8_value = Some(0x10); if let Some(16) = some_u8_value { println!("16"); } if some_u8_value == Some(16) { println!("真16?"); } // origin let mut count = 0; let coin = Coin::Quarter(State::M); let coin1 = &coin; let coin2 = &coin; // 拿到coin的引用 match coin1 { Coin::Quarter(state) => { println!("State is : {}", state.get_u8()); } _ => count += 1, } // replace via if let or else. if let Coin::Quarter(s) = coin2 { println!("状态为: {}", s.get_u8()); } else { count += 1; } } fn guess_game() { println!("猜数字游戏:"); let secret_number = rand::thread_rng().gen_range(1..101); println!("生成的谜底数字为: {}", secret_number); println!("请你开始猜吧!"); loop { let mut guess = String::new(); io::stdin() .read_line(&mut guess) .expect("读取数据失败"); let guess: u32 = match guess.trim().parse() { Ok(num) => num, Err(error) => { println!("监控到异常: {}, 请输入正确的数字!", error); continue; } }; println!("你猜的数字为: {}", guess); match guess.cmp(&secret_number) { Ordering::Less => { println!("猜小了"); } Ordering::Equal => { println!("你赢了"); break; } Ordering::Greater => { println!("猜大了"); } } } } // 理解下ownership, borrowing, move, slice. fn test_ownership() { fn test_inner(s: &str) { println!("test_inner s is : {}", s); } // 函数,将获取s的ownership. borrowing from s; will move s's ownership. fn test_inner_str(s: String) { println!("test_inner_str s is : {}", s); } // let s = "hello"; let s = "hello".to_string(); // let mut s2 = "hello"; // let mut s3 = "hello".to_string(); test_inner(&s); println!("now s is : {}", s); // 创建新的字符串 let m = s; test_inner_str(m); // value moved, memory s already released after let m = s; // + +=的函数签名的&self, +=self模式,会获取数据的ownership // println!("now s is : {}", s); // test_outer(&s); // println!("now s is : {}", s); // test_outer_str(s); // println!("now s is : {}", s); } fn test_outer(s: &str) { println!("test_inner s is : {}", s); } fn test_outer_str(s: String) { println!("test_inner_str s is : {}", s); } #[derive(Debug)] enum SpreadSheetCell { Int(i32), Float(f64), Text(String), }
fn main() { cpp_build::Config::new() .compiler("g++") .flag("-fopenmp") .flag("-std=c++17") .opt_level(3) .build("src/calculation/openmp/cpp_interface.rs"); }
fn main() { for i in 1..=3 { println!("count-{}", i); } print!("aaa"); print!("bbb"); }
use std::collections::hash_map::*; use std::ffi::OsStr; use std::fs::File; use std::env; use std::io::{self, BufReader}; use std::path::{Path, PathBuf}; use std::str; use hangouts_json_parser::{raw, Hangouts}; fn usage() { eprintln!("usage: {} <json path> <participant name>", env::args().next().unwrap()); } fn chrono(parts: Result<(i64, u32), std::num::ParseIntError>) -> chrono::DateTime<chrono::Utc> { let (secs, nsecs) = parts.expect("bad timestamp"); chrono::TimeZone::timestamp(&chrono::Utc, secs, nsecs) } fn start_formatting(formatting: &raw::Formatting) { if formatting.bold { print!("<b>"); } if formatting.italics { print!("<i>"); } if formatting.strikethrough { print!("<s>"); } if formatting.underline { print!("<ul>"); } } fn end_formatting(formatting: &raw::Formatting) { if formatting.bold { print!("</b>"); } if formatting.italics { print!("</i>"); } if formatting.strikethrough { print!("</s>"); } if formatting.underline { print!("</ul>"); } } fn parent_path(s: &OsStr) -> Result<PathBuf, io::Error> { let mut canonical = Path::new(s).canonicalize()?; canonical.pop(); Ok(canonical) } fn urldecode(s: &str) -> Result<String, String> { let mut bytes = vec![]; let mut skip = 0; for (i, byte) in s.as_bytes().iter().cloned().enumerate() { if skip > 0 { skip -= 1; continue; } if byte == b'%' { let num_str = s.get(i + 1 .. i + 3) .ok_or("%-encoded character cut short")?; let n = u8::from_str_radix(num_str, 16) .map_err(|e| format!("invalid %-encoded character: {}", e))?; bytes.push(n); skip = 2; } else { bytes.push(byte); } } String::from_utf8(bytes) .map_err(|e| format!("{}", e)) } fn find_local_file(url: &str, base_path: &Path) -> Option<PathBuf> { let url_filename = url .rsplit_terminator('/') .next().unwrap(); let decoded_filename = urldecode(url_filename) .map_err(|e| { eprintln!("bad URL {:?}: {}", url_filename, e); e }) .ok()?; let mut filename = decoded_filename.clone(); let mut localpath = base_path.join(&filename); loop { if localpath.exists() { return Some(localpath); } if localpath.extension().is_none() { localpath.set_extension("jpg"); if localpath.exists() { return Some(localpath); } } // Try unwrapping another layer of urlencoding. if let Ok(decoded) = urldecode(&filename) { if decoded == filename { break; } filename = decoded; localpath.set_file_name(&filename); } else { break; } } // Try again, additionally replacing some characters because this is what Google does sometimes. filename = decoded_filename; loop { filename = filename.replace('+', " "); filename = filename.replace('?', "_"); localpath.set_file_name(&filename); if localpath.exists() { return Some(localpath); } if localpath.extension().is_none() { localpath.set_extension("jpg"); if localpath.exists() { return Some(localpath); } } // Try unwrapping another layer of urlencoding. if let Ok(decoded) = urldecode(&filename) { if decoded == filename { break; } filename = decoded; } else { break; } } None } fn file_url(path: &Path) -> String { let s = path.to_str().expect("non-utf8 path"); if cfg!(windows) && s.starts_with(r"\\?\") { // Browsers don't like "\\?\" paths; remove the prefix. format!("file:///{}", &s[4..]) } else { format!("file:///{}", s) } } fn main() -> Result<(), io::Error> { let path = env::args_os() .nth(1) .unwrap_or_else(|| { usage(); std::process::exit(2); }); let base_path = parent_path(&path).unwrap_or_else(|e| { eprintln!("Error: could not canonicalize path {:?}: {}", path, e); std::process::exit(2); }); let participant_name = env::args() .nth(2) .unwrap_or_else(|| { usage(); std::process::exit(2); }); let mut hangouts: Hangouts = serde_json::from_reader(BufReader::new(File::open(path)?))?; let convo = hangouts.conversations .iter_mut() .find(|convo| convo.header.details.participant_data.iter().any(|p| p.fallback_name.as_ref() == Some(&participant_name))) .unwrap_or_else(|| { eprintln!("No matching conversation found with a person named {:?}", participant_name); std::process::exit(1); }); let names: HashMap<raw::ParticipantId, String> = convo.header.details.participant_data .iter() .map(|p| (p.id.clone(), p.fallback_name.as_deref().unwrap_or("[unknown]").to_owned())) .collect(); println!("<!DOCTYPE html>"); println!("<html>"); println!("<head><meta charset=\"utf-8\"/></head>"); println!("<body>"); convo.events.sort_unstable_by_key(|event| event.header.timestamp.clone()); for event in &convo.events { let dt = chrono(event.header.timestamp()).format("%Y-%m-%d %H:%M:%S"); let name = &names[&event.header.sender_id]; println!("[{}] {}: ", dt, name); match event.data { raw::EventData::ChatMessage { ref message_content, .. } => { for segment in &message_content.segments { match segment { raw::ChatSegment::Text { ref text, formatting, } => { start_formatting(&formatting); println!("{}", text); end_formatting(&formatting); } raw::ChatSegment::Link { text, link_data, formatting } => { start_formatting(&formatting); println!("<a href=\"{}\">{}</a>", link_data.link_target, text); end_formatting(&formatting); } raw::ChatSegment::LineBreak { text: _ } => { println!("<br />"); } } } for attachment in &message_content.attachments { if let Some(ref photo) = attachment.embed_item.plus_photo { if let Some(path) = find_local_file( &photo.url, &base_path) { println!("<img src=\"{}\"", file_url(&path)); } else { println!("<img src=\"{}\"", photo.url); } // TODO: maybe make a thumbnail: smaller image and link to full one? println!("width=\"100%\"/>"); } else { println!("[an attachment: <pre>{:#?}</pre>]", attachment); } } } raw::EventData::HangoutEvent { ref data, .. } => { match data { raw::HangoutEvent::StartHangout => { println!("<i>[call started]</i><br />"); } raw::HangoutEvent::EndHangout { ref hangout_duration_secs } => { println!("<i>[call ended; duration was {} seconds]</i><br />", hangout_duration_secs); } } } raw::EventData::MembershipChange { .. } => { println!("<i>[membership change: {:#?}]</i>", event.data); } raw::EventData::ConversationRename { ref old_name, ref new_name } => { println!("<i>[conversation renamed from {:?} to {:?}]</i>", old_name, new_name); } } println!("<hr />"); } println!("</body></html>"); Ok(()) }
extern crate chrono; #[macro_use] extern crate clap; #[macro_use] extern crate nom; extern crate uuid; use clap::{App, Arg, SubCommand}; use std::fs::File; use std::io::Read; use std::path::Path; mod vm; mod instruction; mod repl; mod assembler; mod scheduler; fn main() { let yaml = load_yaml!("cli.yml"); let matches = App::from_yaml(yaml).get_matches(); let target_file = matches.value_of("INPUT_FILE"); match target_file { Some(filename) => { let program = read_file(filename); let mut asm = assembler::Assembler::new(); let mut vm = vm::VM::new(); let program = asm.assemble(&program); match program { Ok(mut p) => { vm.add_bytes(&mut p); let events = vm.run(); println!("VM Events"); println!("--------------------------"); for event in &events { println!("{:#?}", event); }; std::process::exit(0); } Err(_e) => {} } } None => { start_repl(); } } } fn start_repl() { let mut repl = repl::REPL::new(); repl.run(); } fn read_file(tmp: &str) -> String { let filename = Path::new(tmp); match File::open(Path::new(&filename)) { Ok(mut fh) => { let mut contents = String::new(); match fh.read_to_string(&mut contents) { Ok(_) => contents, Err(e) => { println!("There was an error reading file: {:?}", e); std::process::exit(1); } } } Err(e) => { println!("File not found: {:?}", e); std::process::exit(1) } } }
// Copyright 2022 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::any::Any; use std::sync::Arc; use async_trait::async_trait; use async_trait::unboxed_simple; use common_base::runtime::GlobalIORuntime; use common_base::runtime::TrySpawn; use common_exception::Result; use common_expression::DataBlock; use common_pipeline_core::processors::port::InputPort; use common_pipeline_core::processors::processor::Event; use common_pipeline_core::processors::Processor; #[async_trait] pub trait AsyncSink: Send { const NAME: &'static str; async fn on_start(&mut self) -> Result<()> { Ok(()) } async fn on_finish(&mut self) -> Result<()> { Ok(()) } #[unboxed_simple] async fn consume(&mut self, data_block: DataBlock) -> Result<bool>; } pub struct AsyncSinker<T: AsyncSink + 'static> { inner: Option<T>, finished: bool, input: Arc<InputPort>, input_data: Option<DataBlock>, called_on_start: bool, called_on_finish: bool, } impl<T: AsyncSink + 'static> AsyncSinker<T> { pub fn create(input: Arc<InputPort>, inner: T) -> Box<dyn Processor> { Box::new(AsyncSinker { input, finished: false, input_data: None, inner: Some(inner), called_on_start: false, called_on_finish: false, }) } } impl<T: AsyncSink + 'static> Drop for AsyncSinker<T> { fn drop(&mut self) { if !self.called_on_start || !self.called_on_finish { if let Some(mut inner) = self.inner.take() { GlobalIORuntime::instance().spawn({ let called_on_start = self.called_on_start; let called_on_finish = self.called_on_finish; async move { if !called_on_start { let _ = inner.on_start().await; } if !called_on_finish { let _ = inner.on_finish().await; } } }); } } } } #[async_trait::async_trait] impl<T: AsyncSink + 'static> Processor for AsyncSinker<T> { fn name(&self) -> String { T::NAME.to_string() } fn as_any(&mut self) -> &mut dyn Any { self } fn event(&mut self) -> Result<Event> { if !self.called_on_start { return Ok(Event::Async); } if self.input_data.is_some() { return Ok(Event::Async); } if self.finished { if !self.called_on_finish { return Ok(Event::Async); } self.input.finish(); return Ok(Event::Finished); } if self.input.is_finished() { return match !self.called_on_finish { true => Ok(Event::Async), false => Ok(Event::Finished), }; } match self.input.has_data() { true => { self.input_data = Some(self.input.pull_data().unwrap()?); Ok(Event::Async) } false => { self.input.set_need_data(); Ok(Event::NeedData) } } } async fn async_process(&mut self) -> Result<()> { if !self.called_on_start { self.called_on_start = true; self.inner.as_mut().unwrap().on_start().await?; } else if let Some(data_block) = self.input_data.take() { self.finished = self.inner.as_mut().unwrap().consume(data_block).await?; } else if !self.called_on_finish { self.called_on_finish = true; self.inner.as_mut().unwrap().on_finish().await?; } Ok(()) } }
use rule::Rule; #[test] fn char_in() { let digit = Rule::new(|_, l| Ok((l.chars().next().unwrap() as u32) - 48)); digit.char_in('0', '9'); let af = Rule::new(|_, l| Ok((l.chars().next().unwrap() as u32) - 55)); af.char_in('A', 'F'); let hex = Rule::default(); hex.any_of(vec![&digit, &af]); let parser: Rule<u32> = Rule::new(|b, _| { let mut m = 1u32; let mut n = 0u32; for i in b.iter().rev() { n += i * m; m <<= 4; } Ok(n) }); parser.between(1, 8, &hex); if let Ok(branches) = parser.scan("A") { assert_eq!(branches[0], 10); } else { assert!(false); } if let Ok(branches) = parser.scan("12345678") { assert_eq!(branches[0], 305419896); } else { assert!(false); } if let Ok(branches) = parser.scan("FF") { assert_eq!(branches[0], 255); } else { assert!(false); } if let Ok(branches) = parser.scan("FFFFFFFF") { assert_eq!(branches[0], u32::max_value()); } else { assert!(false); } if let Ok(_) = parser.scan("FFFFFFFFF") { assert!(false); } else { assert!(true); } if let Ok(_) = parser.scan("FFxFF") { assert!(false); } else { assert!(true); } if let Ok(_) = parser.scan("") { assert!(false); } else { assert!(true); } }
extern crate pretty_bytes; use std::mem; use std::net::UdpSocket; use std::process::exit; use pretty_bytes::converter::convert; use crate::tftp::shared::{data_channel::{DataChannel, DataChannelMode}, err_packet::ErrorPacket, request_packet::{ReadRequestPacket, WriteRequestPacket}, Serializable, STRIDE_SIZE, TFTPPacket}; use crate::tftp::shared::data_channel::DataChannelOwner; struct TFTPClient { packet_buffer: Option<Vec<u8>>, data_channel: DataChannel, error: Option<String>, transfer_size: u64, } impl TFTPClient { /// Constructs a new TFTPClient. fn new(file_name: &str, mode: DataChannelMode) -> Self { let data_channel = DataChannel::new(file_name, mode, DataChannelOwner::Client); let data_channel = match data_channel { Ok(channel) => channel, Err(e) => { eprintln!("[ERROR] {}", e.err()); exit(-2) } }; // Keep the information we need to know // in the object and initialize them // to some default values. TFTPClient { packet_buffer: None, data_channel, error: None, transfer_size: 0, } } /// Places a RRQ in the packet buffer to be sent to the server. pub fn download(file_name: &str) -> TFTPClient { let mut client = TFTPClient::new(file_name, DataChannelMode::Rx); let rrq = Box::new(ReadRequestPacket::new(file_name, "octet")); client.packet_buffer = Some(rrq.serialize()); client } /// Places a WRQ in the packet buffer to be sent /// to the server, then opens the file to be read. pub fn upload(file_name: &str) -> TFTPClient { let mut client = TFTPClient::new(file_name, DataChannelMode::Tx); let wrq = Box::new(WriteRequestPacket::new(file_name, "octet")); client.packet_buffer = Some(wrq.serialize()); client } /// Returns the first packet in the packet /// buffer to be sent to the server. pub fn get_next_packet(&mut self) -> Vec<u8> { self.transfer_size += self.data_channel.transfer_size() as u64; let packet_at_hand = self.data_channel.packet_at_hand(); if packet_at_hand.is_none() { // RRQ / WRQ are managed here. return mem::replace(&mut self.packet_buffer, None).unwrap(); } packet_at_hand.unwrap() } /// Tells whether the client's packet buffer /// has any pending packets to be sent. pub fn is_done(&self) -> bool { self.data_channel.is_done() } /// Facade to client logic, parses the given buffer to a TFTP packet /// then acts accordingly. pub fn process_packet(&mut self, buf: &[u8]) { let packet = crate::tftp::shared::parse_udp_packet(&buf); match packet { TFTPPacket::DATA(data) => { self.data_channel.on_data(data); } TFTPPacket::ACK(ack) => { self.data_channel.on_ack(ack); } TFTPPacket::ERR(err) => self.on_err(err), t => panic!(format!("Unexpected packet type: [{:?}]", t)), }; } pub fn on_packet_sent(&mut self) { self.data_channel.on_packet_sent(); } /// Returns true if the client entered an error /// state. fn is_err(&self) -> bool { self.error.is_some() } /// Number of bytes transferred. fn transferred_bytes(&self) -> u64 { self.transfer_size } /// Extracts the error message from the client. fn get_err(self) -> String { self.error.unwrap() } /// Set the error state for the client. fn on_err(&mut self, err: ErrorPacket) { self.error = Some(String::from(err.err())); } } fn check_done(client: &TFTPClient) { if client.is_done() { let size = convert(client.transferred_bytes() as f64); println!("{} bytes transferred successfully.", size); exit(0); } } /// Entry point for TFTP client. pub fn client_main(server_address: &str, filename: &str, upload: bool) -> std::io::Result<()> { // Make a UDPSocket on any port on localhost. let sock = UdpSocket::bind("0.0.0.0:58955")?; let mut server_address = server_address.to_string(); let mut client = if upload { println!("Uploading..."); TFTPClient::upload(filename) } else { println!("Downloading..."); TFTPClient::download(filename) }; println!("[CLIENT_ADDRESS]: {}", sock.local_addr().unwrap()); loop { let mut buf = [0; 1024]; if client.is_err() { eprintln!("[ERROR] {}", client.get_err()); exit(-3); } let next_packet = &client.get_next_packet(); sock.send_to(next_packet, server_address)?; client.on_packet_sent(); check_done(&client); // Download ends here, when sending the last ACK. let (count, addr) = sock.recv_from(&mut buf)?; // The server opens a UDP socket for each new client. // that's why we need to change the address to send // data to, otherwise we'll get an error from the // server. I didn't notice that on the first time I // tried and was getting an error, inspecting src/dst // port revealed that. (and it's mentioned in the RFC) server_address = addr.to_string(); let raw_packet = &buf[..count]; client.process_packet(raw_packet); check_done(&client); // Upload ends here, when receiving the last ACK. } }
//! A Rust wrapper around the `easy-ecc` C library. #![warn(missing_docs)] #[macro_use] extern crate serde_hex; extern crate libc; pub mod secp256r1;
use std::fs; use std::path::PathBuf; use guarding_core::rule_executor::rule_error::MismatchType; use crate::exec_guarding; fn test_dir() -> PathBuf { PathBuf::from(env!("CARGO_MANIFEST_DIR")) .join("_fixtures") .join("java") } #[test] fn should_working_in_process() { let path = test_dir().join("size.guarding"); let content = fs::read_to_string(path).expect("not file"); let code_dir = test_dir(); let errors = exec_guarding(content, code_dir); assert_eq!(1, errors.len()); assert_eq!("50".to_string(), errors[0].expected); assert_eq!("26".to_string(), errors[0].actual); assert_eq!(MismatchType::FileSize, errors[0].mismatch_type); assert_eq!("file.len = 26, expected: len > 50".to_string(), errors[0].msg); } #[test] fn should_get_errors_when_lt() { let content = "package(\".\")::file.len should = 27;"; let errors = exec_guarding(content.to_string(), test_dir()); assert_eq!(1, errors.len()); } #[test] fn should_support_filter() { let content = "package(\"com.phodal.pepper.refactor.parser\")::file.len should = 3;"; let errors = exec_guarding(content.to_string(), test_dir()); assert_eq!(0, errors.len()); } #[test] fn should_support_for_class_filter() { let content = "class(\".\")::len should < 25; class(\".\")::len should > 20;"; let errors = exec_guarding(content.to_string(), test_dir()); assert_eq!(0, errors.len()); } #[test] fn should_support_for_extends_count() { let content = "class(implementation \"BaseParser\")::len = 2"; let errors = exec_guarding(content.to_string(), test_dir()); assert_eq!(0, errors.len()); } #[test] fn should_support_for_extends_ends_with() { let content = "class(implementation \"BaseParser\")::name should endsWith \"Parser2\";"; let errors = exec_guarding(content.to_string(), test_dir()); assert_eq!(1, errors.len()); assert_eq!(2, errors[0].items.len()); let correct_content = "class(implementation \"BaseParser\")::name should endsWith \"Parser\";"; let errors = exec_guarding(correct_content.to_string(), test_dir()); assert_eq!(0, errors.len()); } #[test] fn should_support_for_starts_with() { let content = "class(implementation \"BaseParser\")::name should startsWith \"Json\";"; let errors = exec_guarding(content.to_string(), test_dir()); assert_eq!(1, errors.len()); assert_eq!(1, errors[0].items.len()); } #[test] fn should_support_for_reside_in() { let content = "class(implementation \"BaseParser\") resideIn package(\"....parser2\");"; let errors = exec_guarding(content.to_string(), test_dir()); assert_eq!(1, errors.len()); let content = "class(implementation \"BaseParser\") resideIn package(\"....parser\");"; let errors = exec_guarding(content.to_string(), test_dir()); assert_eq!(0, errors.len()); } #[test] fn should_support_for_not_reside_in() { let content = "class(implementation \"BaseParser\") not resideIn package(\"....parser2\");"; let errors = exec_guarding(content.to_string(), test_dir()); assert_eq!(0, errors.len()); let content = "class(implementation \"BaseParser\") not resideIn package(\"....parser\");"; let errors = exec_guarding(content.to_string(), test_dir()); assert_eq!(1, errors.len()); } #[test] fn should_support_for_contains() { let content = "class(implementation \"BaseParser\")::name should contains \"Lexer\";"; let errors = exec_guarding(content.to_string(), test_dir()); assert_eq!(1, errors.len()); assert_eq!(2, errors[0].items.len()); let content = "class(implementation \"BaseParser\")::name should contains \"Parser\";"; let errors = exec_guarding(content.to_string(), test_dir()); assert_eq!(0, errors.len()); } #[test] fn should_support_for_not_contains() { let content = "class(implementation \"BaseParser\")::name should not contains \"Lexer\";"; let errors = exec_guarding(content.to_string(), test_dir()); assert_eq!(0, errors.len()); let content = "class(implementation \"BaseParser\")::name should not contains \"Parser\";"; let errors = exec_guarding(content.to_string(), test_dir()); assert_eq!(1, errors.len()); } #[test] fn should_support_for_accessed() { let content = "class(\"java.util.Map\") only accessed([\"com.phodal.pepper.refactor.staticclass\"]);"; let errors = exec_guarding(content.to_string(), test_dir()); assert_eq!(1, errors.len()); assert!(errors[0].items[0].contains("MyDictionary.java")) }
// Copyright 2022 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::collections::VecDeque; use std::fmt::Debug; use std::fmt::Formatter; use std::sync::Arc; use common_base::runtime::TrackedFuture; use common_base::runtime::TrySpawn; use common_exception::Result; use petgraph::dot::Config; use petgraph::dot::Dot; use petgraph::prelude::EdgeIndex; use petgraph::prelude::NodeIndex; use petgraph::prelude::StableGraph; use petgraph::Direction; use tracing::debug; use crate::pipelines::executor::executor_condvar::WorkersCondvar; use crate::pipelines::executor::executor_tasks::ExecutorTasksQueue; use crate::pipelines::executor::executor_worker_context::ExecutorTask; use crate::pipelines::executor::executor_worker_context::ExecutorWorkerContext; use crate::pipelines::executor::processor_async_task::ProcessorAsyncTask; use crate::pipelines::executor::PipelineExecutor; use crate::pipelines::pipeline::Pipeline; use crate::pipelines::processors::connect; use crate::pipelines::processors::port::InputPort; use crate::pipelines::processors::port::OutputPort; use crate::pipelines::processors::processor::Event; use crate::pipelines::processors::processor::ProcessorPtr; use crate::pipelines::processors::DirectedEdge; use crate::pipelines::processors::UpdateList; use crate::pipelines::processors::UpdateTrigger; enum State { Idle, // Preparing, Processing, Finished, } struct Node { state: std::sync::Mutex<State>, processor: ProcessorPtr, updated_list: Arc<UpdateList>, #[allow(dead_code)] inputs_port: Vec<Arc<InputPort>>, #[allow(dead_code)] outputs_port: Vec<Arc<OutputPort>>, } impl Node { pub fn create( processor: &ProcessorPtr, inputs_port: &[Arc<InputPort>], outputs_port: &[Arc<OutputPort>], ) -> Arc<Node> { Arc::new(Node { state: std::sync::Mutex::new(State::Idle), processor: processor.clone(), updated_list: UpdateList::create(), inputs_port: inputs_port.to_vec(), outputs_port: outputs_port.to_vec(), }) } pub unsafe fn trigger(&self, queue: &mut VecDeque<DirectedEdge>) { self.updated_list.trigger(queue) } pub unsafe fn create_trigger(&self, index: EdgeIndex) -> *mut UpdateTrigger { self.updated_list.create_trigger(index) } } struct ExecutingGraph { graph: StableGraph<Arc<Node>, ()>, } type StateLockGuard = ExecutingGraph; impl ExecutingGraph { pub fn create(mut pipeline: Pipeline) -> Result<ExecutingGraph> { let mut graph = StableGraph::new(); Self::init_graph(&mut pipeline, &mut graph); Ok(ExecutingGraph { graph }) } pub fn from_pipelines(mut pipelines: Vec<Pipeline>) -> Result<ExecutingGraph> { let mut graph = StableGraph::new(); for pipeline in &mut pipelines { Self::init_graph(pipeline, &mut graph); } Ok(ExecutingGraph { graph }) } fn init_graph(pipeline: &mut Pipeline, graph: &mut StableGraph<Arc<Node>, ()>) { #[derive(Debug)] struct Edge { source_port: usize, source_node: NodeIndex, target_port: usize, target_node: NodeIndex, } let mut pipes_edges: Vec<Vec<Edge>> = Vec::new(); for pipe in &pipeline.pipes { assert_eq!( pipe.input_length, pipes_edges.last().map(|x| x.len()).unwrap_or_default() ); let mut edge_index = 0; let mut pipe_edges = Vec::with_capacity(pipe.output_length); for item in &pipe.items { let node = Node::create(&item.processor, &item.inputs_port, &item.outputs_port); let graph_node_index = graph.add_node(node.clone()); unsafe { item.processor.set_id(graph_node_index); } for offset in 0..item.inputs_port.len() { let last_edges = pipes_edges.last_mut().unwrap(); last_edges[edge_index].target_port = offset; last_edges[edge_index].target_node = graph_node_index; edge_index += 1; } for offset in 0..item.outputs_port.len() { pipe_edges.push(Edge { source_port: offset, source_node: graph_node_index, target_port: 0, target_node: Default::default(), }); } } pipes_edges.push(pipe_edges); } // The last pipe cannot contain any output edge. assert!(pipes_edges.last().map(|x| x.is_empty()).unwrap_or_default()); pipes_edges.pop(); for pipe_edges in &pipes_edges { for edge in pipe_edges { let edge_index = graph.add_edge(edge.source_node, edge.target_node, ()); unsafe { let (target_node, target_port) = (edge.target_node, edge.target_port); let input_trigger = graph[target_node].create_trigger(edge_index); graph[target_node].inputs_port[target_port].set_trigger(input_trigger); let (source_node, source_port) = (edge.source_node, edge.source_port); let output_trigger = graph[source_node].create_trigger(edge_index); graph[source_node].outputs_port[source_port].set_trigger(output_trigger); connect( &graph[target_node].inputs_port[target_port], &graph[source_node].outputs_port[source_port], ); } } } } /// # Safety /// /// Method is thread unsafe and require thread safe call pub unsafe fn init_schedule_queue( locker: &StateLockGuard, capacity: usize, ) -> Result<ScheduleQueue> { let mut schedule_queue = ScheduleQueue::with_capacity(capacity); for sink_index in locker.graph.externals(Direction::Outgoing) { ExecutingGraph::schedule_queue(locker, sink_index, &mut schedule_queue)?; } Ok(schedule_queue) } /// # Safety /// /// Method is thread unsafe and require thread safe call pub unsafe fn schedule_queue( locker: &StateLockGuard, index: NodeIndex, schedule_queue: &mut ScheduleQueue, ) -> Result<()> { let mut need_schedule_nodes = VecDeque::new(); let mut need_schedule_edges = VecDeque::new(); need_schedule_nodes.push_back(index); while !need_schedule_nodes.is_empty() || !need_schedule_edges.is_empty() { // To avoid lock too many times, we will try to cache lock. let mut state_guard_cache = None; if need_schedule_nodes.is_empty() { let edge = need_schedule_edges.pop_front().unwrap(); let target_index = DirectedEdge::get_target(&edge, &locker.graph)?; let node = &locker.graph[target_index]; let node_state = node.state.lock().unwrap(); if matches!(*node_state, State::Idle) { state_guard_cache = Some(node_state); need_schedule_nodes.push_back(target_index); } } if let Some(schedule_index) = need_schedule_nodes.pop_front() { let node = &locker.graph[schedule_index]; if state_guard_cache.is_none() { state_guard_cache = Some(node.state.lock().unwrap()); } let event = node.processor.event()?; if tracing::enabled!(tracing::Level::TRACE) { tracing::trace!( "node id: {:?}, name: {:?}, event: {:?}", node.processor.id(), node.processor.name(), event ); } let processor_state = match event { Event::Finished => State::Finished, Event::NeedData | Event::NeedConsume => State::Idle, Event::Sync => { schedule_queue.push_sync(node.processor.clone()); State::Processing } Event::Async => { schedule_queue.push_async(node.processor.clone()); State::Processing } }; node.trigger(&mut need_schedule_edges); *state_guard_cache.unwrap() = processor_state; } } Ok(()) } } pub struct ScheduleQueue { pub sync_queue: VecDeque<ProcessorPtr>, pub async_queue: VecDeque<ProcessorPtr>, } impl ScheduleQueue { pub fn with_capacity(capacity: usize) -> ScheduleQueue { ScheduleQueue { sync_queue: VecDeque::with_capacity(capacity), async_queue: VecDeque::with_capacity(capacity), } } #[inline] pub fn push_sync(&mut self, processor: ProcessorPtr) { self.sync_queue.push_back(processor); } #[inline] pub fn push_async(&mut self, processor: ProcessorPtr) { self.async_queue.push_back(processor); } pub fn schedule_tail(mut self, global: &ExecutorTasksQueue, ctx: &mut ExecutorWorkerContext) { let mut tasks = VecDeque::with_capacity(self.sync_queue.len()); while let Some(processor) = self.sync_queue.pop_front() { tasks.push_back(ExecutorTask::Sync(processor)); } global.push_tasks(ctx, tasks) } pub fn schedule( mut self, global: &Arc<ExecutorTasksQueue>, context: &mut ExecutorWorkerContext, executor: &PipelineExecutor, ) { debug_assert!(!context.has_task()); while let Some(processor) = self.async_queue.pop_front() { Self::schedule_async_task( processor, context.query_id.clone(), executor, context.get_worker_num(), context.get_workers_condvar().clone(), global.clone(), ) } if !self.sync_queue.is_empty() { self.schedule_sync(global, context); } if !self.sync_queue.is_empty() { self.schedule_tail(global, context); } } pub fn schedule_async_task( proc: ProcessorPtr, query_id: Arc<String>, executor: &PipelineExecutor, wakeup_worker_num: usize, workers_condvar: Arc<WorkersCondvar>, global_queue: Arc<ExecutorTasksQueue>, ) { unsafe { workers_condvar.inc_active_async_worker(); let process_future = proc.async_process(); executor .async_runtime .spawn(TrackedFuture::create(ProcessorAsyncTask::create( query_id, wakeup_worker_num, proc.clone(), global_queue, workers_condvar, process_future, ))); } } fn schedule_sync(&mut self, _: &ExecutorTasksQueue, ctx: &mut ExecutorWorkerContext) { if let Some(processor) = self.sync_queue.pop_front() { ctx.set_task(ExecutorTask::Sync(processor)); } } } pub struct RunningGraph(ExecutingGraph); impl RunningGraph { pub fn create(pipeline: Pipeline) -> Result<RunningGraph> { let graph_state = ExecutingGraph::create(pipeline)?; debug!("Create running graph:{:?}", graph_state); Ok(RunningGraph(graph_state)) } pub fn from_pipelines(pipelines: Vec<Pipeline>) -> Result<RunningGraph> { let graph_state = ExecutingGraph::from_pipelines(pipelines)?; debug!("Create running graph:{:?}", graph_state); Ok(RunningGraph(graph_state)) } /// # Safety /// /// Method is thread unsafe and require thread safe call pub unsafe fn init_schedule_queue(&self, capacity: usize) -> Result<ScheduleQueue> { ExecutingGraph::init_schedule_queue(&self.0, capacity) } /// # Safety /// /// Method is thread unsafe and require thread safe call pub unsafe fn schedule_queue(&self, node_index: NodeIndex) -> Result<ScheduleQueue> { let mut schedule_queue = ScheduleQueue::with_capacity(0); ExecutingGraph::schedule_queue(&self.0, node_index, &mut schedule_queue)?; Ok(schedule_queue) } pub fn interrupt_running_nodes(&self) { unsafe { for node_index in self.0.graph.node_indices() { self.0.graph[node_index].processor.interrupt(); } } } pub fn format_graph_nodes(&self) -> String { pub struct NodeDisplay { id: usize, name: String, state: String, } impl Debug for NodeDisplay { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { f.debug_struct("Node") .field("name", &self.name) .field("id", &self.id) .field("state", &self.state) .finish() } } let mut nodes_display = Vec::with_capacity(self.0.graph.node_count()); for node_index in self.0.graph.node_indices() { unsafe { let state = self.0.graph[node_index].state.lock().unwrap(); nodes_display.push(NodeDisplay { id: self.0.graph[node_index].processor.id().index(), name: self.0.graph[node_index].processor.name(), state: String::from(match *state { State::Idle => "Idle", State::Processing => "Processing", State::Finished => "Finished", }), }); } } format!("{:?}", nodes_display) } } impl Debug for Node { fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { unsafe { write!(f, "{}", self.processor.name()) } } } impl Debug for ExecutingGraph { fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { write!( f, "{:?}", Dot::with_config(&self.graph, &[Config::EdgeNoLabel]) ) } } impl Debug for RunningGraph { fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { // let graph = self.0.read(); write!(f, "{:?}", self.0) } } impl Debug for ScheduleQueue { fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { #[derive(Debug)] #[allow(dead_code)] struct QueueItem { id: usize, name: String, } unsafe { let mut sync_queue = Vec::with_capacity(self.sync_queue.len()); let mut async_queue = Vec::with_capacity(self.async_queue.len()); for item in &self.sync_queue { sync_queue.push(QueueItem { id: item.id().index(), name: item.name().to_string(), }) } for item in &self.async_queue { async_queue.push(QueueItem { id: item.id().index(), name: item.name().to_string(), }) } f.debug_struct("ScheduleQueue") .field("sync_queue", &sync_queue) .field("async_queue", &async_queue) .finish() } } }
use std::fs::File; use std::io::BufReader; use std::io::Read; fn main() { let file = File::open("input").expect("Failed to read"); let mut buf_reader = BufReader::new(file); let mut contents = String::new(); buf_reader.read_to_string(&mut contents).expect("Failed to bufferize file"); let lines = contents.lines(); for i in 0..lines.clone().count() { let a : i64 = match lines.clone().nth(i).unwrap_or("-1").trim().parse() { Ok(num) => num, Err(_) => continue, }; for j in i..lines.clone().count() { let b : i64 = match lines.clone().nth(j).unwrap_or("-1").trim().parse() { Ok(num) => num, Err(_) => continue, }; for k in j..lines.clone().count() { let c : i64 = match lines.clone().nth(k).unwrap_or("-1").trim().parse() { Ok(num) => num, Err(_) => continue, }; if a + b + c == 2020 { println!("{}", a*b*c); break; } } } } }
use itertools::Itertools; use regex::Regex; use std::borrow::Borrow; use std::fs::File; use std::io::{BufRead, BufReader, Error, ErrorKind, Read}; use std::str::FromStr; use lazy_static::lazy_static; // 1.3.0 lazy_static! { static ref byr_present: Regex = Regex::new(r"\bbyr\s*:").unwrap(); static ref iyr_present: Regex = Regex::new(r"\biyr\s*:").unwrap(); static ref eyr_present: Regex = Regex::new(r"\beyr\s*:").unwrap(); static ref hgt_present: Regex = Regex::new(r"\bhgt\s*:").unwrap(); static ref hcl_present: Regex = Regex::new(r"\bhcl\s*:").unwrap(); static ref ecl_present: Regex = Regex::new(r"\becl\s*:").unwrap(); static ref pid_present: Regex = Regex::new(r"\bpid\s*:").unwrap(); static ref byr: Regex = Regex::new(r"\bbyr\s*:\s*(\d{4})\b").unwrap(); static ref iyr: Regex = Regex::new(r"\biyr\s*:\s*(\d{4})\b").unwrap(); static ref eyr: Regex = Regex::new(r"\beyr\s*:\s*(\d{4})\b").unwrap(); static ref hgt: Regex = Regex::new(r"\bhgt\s*:\s*(\d+)(in|cm)\b").unwrap(); static ref hcl: Regex = Regex::new(r"\bhcl\s*:\s*(#[0-9a-f]{6})\b").unwrap(); static ref ecl: Regex = Regex::new(r"\becl\s*:\s*(amb|blu|brn|gry|grn|hzl|oth)\b").unwrap(); static ref pid: Regex = Regex::new(r"\bpid\s*:\s*(\d{9})\b").unwrap(); } pub fn validate_byr(text: &String) -> bool { byr.is_match(text) && match byr.captures_iter(text).next() { Some(cap) => { let year: u32 = FromStr::from_str(&cap[1]).unwrap(); year >= 1920 && year <= 2002 } None => { println!(" byr false {}", text); false } } } pub fn validate_iyr(text: &String) -> bool { iyr.is_match(text) && match iyr.captures_iter(text).next() { Some(cap) => { let year: u32 = FromStr::from_str(&cap[1]).unwrap(); year >= 2010 && year <= 2020 } None => { println!(" iyr false {}", text); false } } } pub fn validate_eyr(text: &String) -> bool { eyr.is_match(text) && match eyr.captures_iter(text).next() { Some(cap) => { let year: u32 = FromStr::from_str(&cap[1]).unwrap(); year >= 2020 && year <= 2030 } None => { println!(" eyr false {}", text); false } } } pub fn validate_hgt(text: &String) -> bool { hgt.is_match(text) && match hgt.captures_iter(text).next() { Some(cap) => { let height: i32 = FromStr::from_str(&cap[1]).unwrap(); let unit = &cap[2]; match unit { "in" => height >= 59 && height <= 76, "cm" => height >= 150 && height <= 193, _ => false, } } None => { println!(" hgt false {}", text); false } } } pub fn validate_hcl(text: &String) -> bool { hcl.is_match(text) } pub fn validate_ecl(text: &String) -> bool { ecl.is_match(text) } pub fn validate_pid(text: &String) -> bool { pid.is_match(text) } pub fn validate_passport1(text: &String) -> bool { hcl_present.is_match(text) && pid_present.is_match(text) && ecl_present.is_match(text) && hgt_present.is_match(text) && eyr_present.is_match(text) && iyr_present.is_match(text) && byr_present.is_match(text) } pub fn validate_passport2(text: &String) -> bool { validate_byr(text) && validate_ecl(text) && validate_eyr(text) && validate_hcl(text) && validate_hgt(text) && validate_ecl(text) && validate_pid(text) } pub fn day_four() -> Result<(), Error> { let count = include_str!("../day4.txt") .split("\n\n") // .filter(|text| validate_passport1(&text.to_string())) .filter(|text| validate_passport2(&text.to_string())) .count(); dbg!(count); Ok(()) } #[cfg(test)] mod tests { use super::*; #[test] fn test_byr() { assert_eq!(validate_byr(&"byr: 2002".to_owned()), true); assert_eq!(validate_byr(&"byr: 1920".to_owned()), true); assert_eq!(validate_byr(&"byr: 2003".to_owned()), false); assert_eq!(validate_byr(&"byr: 1915".to_owned()), false); } #[test] fn test_hgt() { assert_eq!(validate_hgt(&"hgt: 60in".to_owned()), true); assert_eq!(validate_hgt(&"hgt: 190cm".to_owned()), true); assert_eq!(validate_hgt(&"hgt: 190in".to_owned()), false); assert_eq!(validate_hgt(&"hgt: 190".to_owned()), false); } #[test] fn test_hcl() { assert_eq!(validate_hcl(&"hcl: #123abc".to_owned()), true); assert_eq!(validate_hcl(&"hcl: #123abz".to_owned()), false); assert_eq!(validate_hcl(&"hcl: 123abc".to_owned()), false); } #[test] fn test_ecl() { assert_eq!(validate_ecl(&"ecl: amb".to_owned()), true); assert_eq!(validate_ecl(&"ecl: blu".to_owned()), true); assert_eq!(validate_ecl(&"ecl: brn".to_owned()), true); assert_eq!(validate_ecl(&"ecl: gry".to_owned()), true); assert_eq!(validate_ecl(&"ecl: grn".to_owned()), true); assert_eq!(validate_ecl(&"ecl: hzl".to_owned()), true); assert_eq!(validate_ecl(&"ecl: oth".to_owned()), true); assert_eq!(validate_ecl(&"ecl: otha".to_owned()), false); assert_eq!(validate_ecl(&"ecl: wat".to_owned()), false); } #[test] fn test_pid() { assert_eq!(validate_pid(&"pid: 000000001".to_owned()), true); assert_eq!(validate_pid(&"pid: 0123456789".to_owned()), false); } #[test] fn test_passport() { assert_eq!( validate_passport2( &"eyr:1972 cid:100 hcl:#18171d ecl:amb hgt:170 pid:186cm iyr:2018 byr:1926" .to_owned() ), false ); assert_eq!( validate_passport2( &"iyr:2019 hcl:#602927 eyr:1967 hgt:170cm ecl:grn pid:012533040 byr:1946" .to_owned() ), false ); assert_eq!( validate_passport2( &"hcl:dab227 iyr:2012 ecl:brn hgt:182cm pid:021572410 eyr:2020 byr:1992 cid:277" .to_owned() ), false ); assert_eq!( validate_passport2( &"hgt:59cm ecl:zzz eyr:2038 hcl:74454a iyr:2023 pid:3556412378 byr:2007".to_owned() ), false ); assert_eq!( validate_passport2( &"pid:087499704 hgt:74in ecl:grn iyr:2012 eyr:2030 byr:1980 hcl:#623a2f".to_owned() ), true ); assert_eq!( validate_passport2( &"eyr:2029 ecl:blu cid:129 byr:1989 iyr:2014 pid:896056539 hcl:#a97842 hgt:165cm" .to_owned() ), true ); assert_eq!( validate_passport2( &"hcl:#888785 hgt:164cm byr:2001 iyr:2015 cid:88 pid:545766238 ecl:hzl eyr:2022" .to_owned() ), true ); assert_eq!( validate_passport2( &"iyr:2010 hgt:158cm hcl:#b6652a ecl:blu byr:1944 eyr:2021 pid:093154719" .to_owned() ), true ); } }
use fastanvil::{ render_region, CCoord, Dimension, HeightMode, JavaChunk, RCoord, RegionFileLoader, TopShadeRenderer, }; use std::path::{Path, PathBuf}; use crate::palette::BlockPalette; pub async fn render_map(palette: BlockPalette, path: PathBuf) { let loader = RegionFileLoader::<JavaChunk>::new(Path::new("mcserver/world/region/").to_path_buf()); let dim = Dimension::new(Box::new(loader)); let (x, z) = coords_from_region(path.clone().as_path()).unwrap(); let drawer = TopShadeRenderer::new(&palette, HeightMode::Calculate); let map = render_region(RCoord(x), RCoord(z), dim, drawer); let mut img = image::ImageBuffer::new(512, 512); for xc in 0..32 { for zc in 0..32 { let chunk = map.chunk(CCoord(xc), CCoord(zc)); for z in 0..16 { for x in 0..16 { let pixel = chunk[z * 16 + x]; let x = xc * 16 + x as isize; let z = zc * 16 + z as isize; img.put_pixel(x as u32, z as u32, image::Rgba(pixel)) } } } } println!("./out/{}.{}.png", x, z); img.save_with_format(format!("./out/{}.{}.png", x, z), image::ImageFormat::Png) .unwrap(); } fn coords_from_region(region: &Path) -> Option<(isize, isize)> { let filename = region.file_name()?.to_str()?; let mut parts = filename.split('.').skip(1); Some((parts.next()?.parse().ok()?, parts.next()?.parse().ok()?)) }
use std::collections::BTreeMap; use std::io::{self, BufRead}; /* use std::rc::Rc; use std::cell::{ struct Node { children: Vec<Rc<Node>>, } impl Node { fn new() -> Self { Self { children: Vec::new(), } } } */ type Children<'a> = BTreeMap<&'a str, Vec<&'a str>>; fn f(n: &str, d: u32, children: &Children) -> u32 { let e: u32 = match children.get(n) { None => 0, Some(cs) => cs.iter().map(|c| f(c, d + 1, children)).sum(), }; d + e } fn ancestors<'a>(mut n: &'a str, parents: &BTreeMap<&'a str, &'a str>) -> Vec<&'a str> { let mut v = Vec::new(); loop { match parents.get(n) { None => break, Some(p) => { v.push(*p); n = p; } } } v } fn main() { let stdin = io::stdin(); let handle = stdin.lock(); let mut children: Children = BTreeMap::new(); children.insert("COM", Vec::new()); let mut parents = BTreeMap::new(); let lines: Vec<String> = handle.lines().map(|line| line.unwrap()).collect(); let rows: Vec<Vec<&str>> = lines.iter().map(|line| line.split(')').collect()).collect(); for row in rows { children.entry(row[0]).or_insert(Vec::new()).push(row[1]); parents.insert(row[1], row[0]); } let part1 = f("COM", 0, &children); println!("{}", part1); let mut you = ancestors("YOU", &parents); let mut san = ancestors("SAN", &parents); while you.last() == san.last() { you.pop(); san.pop(); } println!("{}", you.len() + san.len()); }
// Copyright 2021 rust-ipfs-api Developers // // Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or // http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or // http://opensource.org/licenses/MIT>, at your option. This file may not be // copied, modified, or distributed except according to those terms. // use thiserror::Error; #[derive(Debug, Error)] pub enum Error { #[error("api returned error `{0}`")] Api(ipfs_api_prelude::ApiError), #[error("hyper client error `{0}`")] Client(#[from] hyper::Error), #[error("http error `{0}`")] Http(#[from] http::Error), #[error("ipfs client error `{0}`")] IpfsClientError(#[from] ipfs_api_prelude::Error), } impl From<ipfs_api_prelude::ApiError> for Error { fn from(err: ipfs_api_prelude::ApiError) -> Self { Error::Api(err) } }
use crate::errors::Result; use cap_std::fs::Dir; use std::fs::File; use std::path::Path; pub fn open_dir<P: AsRef<Path>>(path: P) -> Result<Dir> { Ok(unsafe { Dir::open_ambient_dir(path)? }) } pub fn open_wasi_dir<P: AsRef<Path>>(path: P) -> Result<wasi_cap_std_sync::dir::Dir> { Ok(wasi_cap_std_sync::dir::Dir::from_cap_std(open_dir(path)?)) } pub fn open_wasi_file<P: AsRef<Path>>(path: P) -> Result<wasi_cap_std_sync::file::File> { let file = File::create(path)?; let file = unsafe { cap_std::fs::File::from_std(file) }; Ok(wasi_cap_std_sync::file::File::from_cap_std(file)) }
use cargo::core::manifest::Target; use cargo::core::package_id::PackageId; use cargo::util::errors::{CargoResult, ProcessError}; use cargo::util::process_builder::ProcessBuilder; use derive_more::Display; use failure::{Fallible, ResultExt as _}; use fixedbitset::FixedBitSet; use once_cell::sync::Lazy; use regex::Regex; use structopt::StructOpt; use std::ffi::{OsStr, OsString}; use std::ops::Range; use std::process::Output; use std::str::{self, FromStr}; #[derive(Debug, serde::Deserialize)] pub(crate) struct ErrorMessage { pub(crate) message: String, pub(crate) code: Option<ErrorMessageCode>, } #[derive(Debug, serde::Deserialize)] pub(crate) struct ErrorMessageCode { pub(crate) code: String, } #[derive(Debug)] pub(crate) struct Rustc<'a> { cmd: ProcessBuilder, opts: RustcOpts, id: PackageId, target: &'a Target, } impl<'a> Rustc<'a> { pub(crate) fn new(cmd: ProcessBuilder, id: PackageId, target: &'a Target) -> CargoResult<Self> { let mut args = vec![cmd.get_program()]; args.extend(cmd.get_args()); let opts = RustcOpts::from_iter_safe(&args) .with_context(|_| failure::err_msg(format!("Failed to parse {:?}", args)))?; Ok(Self { cmd, opts, id, target, }) } pub(crate) fn externs(&self) -> &[Extern] { &self.opts.r#extern } pub(crate) fn capture_error_messages( &mut self, exclude: &FixedBitSet, on_stdout_line: &mut dyn FnMut(&str) -> CargoResult<()>, on_stderr_line: &mut dyn FnMut(&str) -> CargoResult<()>, ) -> CargoResult<Option<Vec<ErrorMessage>>> { self.eprint_exclusion(exclude, on_stderr_line)?; self.cmd.args_replace(&self.opts.to_args(exclude, true)); if let Err(err) = self .cmd .exec_with_streaming(on_stdout_line, &mut |_| Ok(()), true) { let output = err .iter_chain() .flat_map(|e| e.downcast_ref::<ProcessError>()) .flat_map(|ProcessError { output, .. }| output) .next(); let stderr = match output { None => return Err(err), Some(Output { stderr, .. }) => str::from_utf8(stderr)?, }; Ok(Some( stderr.lines().flat_map(serde_json::from_str).collect(), )) } else { Ok(None) } } pub(crate) fn run( &mut self, exclude: &FixedBitSet, on_stdout_line: &mut dyn FnMut(&str) -> CargoResult<()>, on_stderr_line: &mut dyn FnMut(&str) -> CargoResult<()>, ) -> CargoResult<()> { self.eprint_exclusion(exclude, on_stderr_line)?; self.cmd.args_replace(&self.opts.to_args(exclude, true)); self.cmd .exec_with_streaming(on_stdout_line, on_stderr_line, false) .map(|_| ()) } fn eprint_exclusion( &self, exclude: &FixedBitSet, on_stderr_line: &mut dyn FnMut(&str) -> CargoResult<()>, ) -> CargoResult<()> { let width = self .opts .r#extern .iter() .map(|e| e.name().len()) .max() .unwrap_or(0); on_stderr_line(&format!("`{}`", self.id))?; on_stderr_line(&format!("└─── {}", self.target))?; for (i, r#extern) in self.opts.r#extern.iter().enumerate() { let mut msg = if i < self.opts.r#extern.len() - 1 { format!(" ├─── {}: ", r#extern.name()) } else { format!(" └─── {}: ", r#extern.name()) }; (0..width - r#extern.name().len()).for_each(|_| msg.push(' ')); msg += if exclude[i] { "off" } else { "on" }; on_stderr_line(&msg)?; } Ok(()) } } #[derive(Debug, StructOpt)] pub(crate) struct RustcOpts { #[structopt(long, parse(from_os_str))] cfg: Vec<OsString>, #[structopt(short = "L", parse(from_os_str))] link_path: Vec<OsString>, #[structopt(short = "l", parse(from_os_str))] link_crate: Vec<OsString>, #[structopt(long, parse(from_os_str))] crate_type: Vec<OsString>, #[structopt(long)] crate_name: Option<String>, #[structopt(long, parse(from_os_str))] edition: Option<OsString>, #[structopt(long, parse(from_os_str))] emit: Option<OsString>, #[structopt(long, parse(from_os_str))] print: Option<OsString>, #[structopt(short = "g")] debuginfo_2: bool, #[structopt(short = "O")] opt_level_2: bool, #[structopt(short = "o", parse(from_os_str))] output: Option<OsString>, #[structopt(long, parse(from_os_str))] out_dir: Option<OsString>, #[structopt(long, parse(from_os_str))] explain: Vec<OsString>, #[structopt(long, parse(from_os_str))] target: Option<OsString>, #[structopt(long)] test: bool, #[structopt(short = "W", parse(from_os_str))] warn: Vec<OsString>, #[structopt(short = "A", parse(from_os_str))] allow: Vec<OsString>, #[structopt(short = "D", parse(from_os_str))] deny: Vec<OsString>, #[structopt(short = "F", parse(from_os_str))] forbid: Vec<OsString>, #[structopt(long, parse(from_os_str))] cap_lints: Option<OsString>, #[structopt(short = "C", parse(from_os_str))] codegen: Vec<OsString>, #[structopt(short = "v")] verbose: bool, #[structopt(long = "extern")] r#extern: Vec<Extern>, #[structopt(long, parse(from_os_str))] extern_private: Vec<OsString>, #[structopt(long, parse(from_os_str))] sysroot: Option<OsString>, #[structopt(long, parse(from_os_str))] error_format: Option<OsString>, #[structopt(long, parse(from_os_str))] json: Option<OsString>, #[structopt(long, parse(from_os_str))] color: Option<OsString>, #[structopt(long, parse(from_os_str))] remap_path_prefix: Option<OsString>, #[structopt(parse(from_os_str))] input: OsString, } impl RustcOpts { pub(crate) fn out_dir(&self) -> Option<&OsStr> { self.out_dir.as_deref() } pub(crate) fn crate_name(&self) -> Option<&str> { self.crate_name.as_deref() } #[allow(clippy::cognitive_complexity)] fn to_args(&self, exclude: &FixedBitSet, error_format_json: bool) -> Vec<&OsStr> { let mut args = Vec::<&OsStr>::new(); for cfg in &self.cfg { args.push("--cfg".as_ref()); args.push(cfg); } for l in &self.link_path { args.push("-L".as_ref()); args.push(l); } for l in &self.link_crate { args.push("-l".as_ref()); args.push(l); } for crate_type in &self.crate_type { args.push("--crate-type".as_ref()); args.push(crate_type); } if let Some(crate_name) = &self.crate_name { args.push("--crate-name".as_ref()); args.push(crate_name.as_ref()); } if let Some(edition) = &self.edition { args.push("--edition".as_ref()); args.push(edition); } if let Some(emit) = &self.emit { args.push("--emit".as_ref()); args.push(emit); } if let Some(print) = &self.print { args.push("--print".as_ref()); args.push(print); } if self.debuginfo_2 { args.push("-g".as_ref()); } if self.opt_level_2 { args.push("-O".as_ref()); } if let Some(o) = &self.output { args.push("-o".as_ref()); args.push(o); } if let Some(out_dir) = &self.out_dir { args.push("--out-dir".as_ref()); args.push(out_dir); } for explain in &self.explain { args.push("--explain".as_ref()); args.push(explain); } if self.test { args.push("--test".as_ref()); } if let Some(target) = &self.target { args.push("--target".as_ref()); args.push(target); } for warn in &self.warn { args.push("--warn".as_ref()); args.push(warn); } for allow in &self.allow { args.push("--allow".as_ref()); args.push(allow); } for deny in &self.deny { args.push("--deny".as_ref()); args.push(deny); } for forbid in &self.forbid { args.push("--forbid".as_ref()); args.push(forbid); } if let Some(cap_lints) = &self.cap_lints { args.push("--cap-lints".as_ref()); args.push(cap_lints); } for codegen in &self.codegen { args.push("--codegen".as_ref()); args.push(codegen); } if self.verbose { args.push("--verbose".as_ref()); } for (i, r#extern) in self.r#extern.iter().enumerate() { if !exclude[i] { args.push("--extern".as_ref()); args.push(r#extern.as_ref()); } } for extern_private in &self.extern_private { args.push("--extern-private".as_ref()); args.push(extern_private); } if let Some(sysroot) = &self.sysroot { args.push("--sysroot".as_ref()); args.push(sysroot); } if error_format_json { args.push("--error-format".as_ref()); args.push("json".as_ref()); } else if let Some(error_format) = &self.error_format { args.push("--error-format".as_ref()); args.push(error_format); } if let Some(json) = &self.json { args.push("--json".as_ref()); args.push(json); } if let Some(color) = &self.color { args.push("--color".as_ref()); args.push(color); } if let Some(remap_path_prefix) = &self.remap_path_prefix { args.push("--remap-path-prefix".as_ref()); args.push(remap_path_prefix); } args.push(&self.input); args } } #[derive(Display, Debug, PartialEq, Eq, Hash)] #[display(fmt = "{}", string)] pub(crate) struct Extern { string: String, name: Range<usize>, } impl Extern { pub(crate) fn name(&self) -> &str { &self.string[self.name.clone()] } } impl FromStr for Extern { type Err = failure::Error; fn from_str(s: &str) -> Fallible<Self> { static EXTERN: Lazy<Regex> = lazy_regex!(r"\A([a-zA-Z0-9_]+)=.*\z"); let caps = EXTERN.captures(s).ok_or_else(|| { failure::err_msg(format!("{:?} does not match {:?}", s, EXTERN.as_str())) })?; Ok(Self { string: s.to_owned(), name: 0..caps[1].len(), }) } } impl AsRef<OsStr> for Extern { fn as_ref(&self) -> &OsStr { self.string.as_ref() } }
use msfs::sim_connect::{ data_definition, Period, SimConnect, SimConnectRecv, SIMCONNECT_OBJECT_ID_USER, }; #[data_definition] #[derive(Debug)] struct Data { #[name = "RADIO HEIGHT"] #[epsilon = 0.01] height: f64, #[name = "AIRSPEED INDICATED"] #[epsilon = 0.01] airspeed: f64, } #[data_definition] #[derive(Debug)] struct Controls { #[name = "ELEVATOR POSITION"] elevator: f64, #[name = "AILERON POSITION"] ailerons: f64, #[name = "RUDDER POSITION"] rudder: f64, #[name = "ELEVATOR TRIM POSITION"] elevator_trim: f64, } #[data_definition] #[derive(Debug)] struct Throttle( #[name = "GENERAL ENG THROTTLE LEVER POSITION:1"] f64, #[name = "GENERAL ENG THROTTLE LEVER POSITION:2"] f64, ); fn main() -> Result<(), Box<dyn std::error::Error>> { let mut sim = SimConnect::open("LOG", |sim, recv| match recv { SimConnectRecv::SimObjectData(event) => match event.dwRequestID { 0 => { println!("{:?}", event.into::<Data>(sim).unwrap()); } 1 => { println!("{:?}", event.into::<Controls>(sim).unwrap()); } 2 => { println!("{:?}", event.into::<Throttle>(sim).unwrap()); } _ => {} }, _ => println!("{:?}", recv), })?; sim.request_data_on_sim_object::<Data>(0, SIMCONNECT_OBJECT_ID_USER, Period::SimFrame)?; sim.request_data_on_sim_object::<Controls>(1, SIMCONNECT_OBJECT_ID_USER, Period::SimFrame)?; sim.request_data_on_sim_object::<Throttle>(2, SIMCONNECT_OBJECT_ID_USER, Period::SimFrame)?; loop { sim.call_dispatch()?; std::thread::sleep(std::time::Duration::from_millis(10)); } }
use std::ops::{Add, Div, Mul, Sub}; #[derive(Debug, Copy, Clone, PartialEq)] pub struct Vec2 { pub x: f64, pub y: f64, } #[derive(Debug, Copy, Clone, PartialEq)] pub struct Bbox { min: Vec2, max: Vec2, } #[derive(Debug, Copy, Clone, PartialEq)] pub struct Circle { pub center: Vec2, pub radius: f64, } #[derive(Debug, Copy, Clone, PartialEq)] pub struct BarycentricCoords { w0: f64, w1: f64, w2: f64, } impl Vec2 { pub fn zero() -> Self { Vec2::new(0.0, 0.0) } pub fn new(x: f64, y: f64) -> Self { Vec2 { x, y } } pub fn dist(&self, p: Vec2) -> f64 { self.dist2(p).sqrt() } pub fn dist2(&self, p: Vec2) -> f64 { (*self - p).norm2() } pub fn norm(&self) -> f64 { self.norm2().sqrt() } pub fn norm2(&self) -> f64 { self.x.powi(2) + self.y.powi(2) } } impl Bbox { pub fn new(p: Vec2) -> Self { Bbox { min: p, max: p } } pub fn min(&self) -> Vec2 { self.min } pub fn max(&self) -> Vec2 { self.max } pub fn center(&self) -> Vec2 { (self.min + self.max) / 2.0 } pub fn split(&self, p: Vec2) -> [Bbox; 4] { debug_assert!(self.contains(p)); [ Bbox { min: self.min, max: p, }, Bbox { min: Vec2::new(p.x, self.min.y), max: Vec2::new(self.max.x, p.y), }, Bbox { min: Vec2::new(self.min.x, p.y), max: Vec2::new(p.x, self.max.y), }, Bbox { min: p, max: self.max, }, ] } pub fn expand(&mut self, p: Vec2) { self.min.x = self.min.x.min(p.x); self.min.y = self.min.y.min(p.y); self.max.x = self.max.x.max(p.x); self.max.y = self.max.y.max(p.y); } pub fn enlarge(&mut self, amount: f64) { self.min.x -= amount; self.min.y -= amount; self.max.x += amount; self.max.y += amount; } pub fn contains(&self, p: Vec2) -> bool { self.min.x <= p.x && self.min.y <= p.y && self.max.x >= p.x && self.max.y >= p.y } pub fn intersection(&self, other: Bbox) -> Option<Bbox> { let min_x = self.min.x.max(other.min.x); let min_y = self.min.y.max(other.min.y); let max_x = self.max.x.min(other.max.x); let max_y = self.max.y.min(other.max.y); if min_x > max_x || min_y > max_y { None } else { Some(Bbox { min: Vec2::new(min_x, min_y), max: Vec2::new(max_x, max_y), }) } } pub fn dimensions(&self) -> Vec2 { self.max - self.min } pub fn area(&self) -> f64 { let d = self.dimensions(); d.x * d.y } } pub fn collinear(a: Vec2, b: Vec2, c: Vec2) -> bool { let area = a.x * (b.y - c.y) + b.x * (c.y - a.y) + c.x * (a.y - b.y); area == 0.0 } impl Circle { pub fn new(center: Vec2, radius: f64) -> Self { debug_assert!(radius >= 0.0); Circle { center, radius } } pub fn circumcircle(a: Vec2, b: Vec2, c: Vec2) -> Self { // https://en.wikipedia.org/wiki/Circumscribed_circle#Cartesian_coordinates_2 let b = b - a; let c = c - a; let d = 2.0 * (b.x * c.y - b.y * c.x); let x = (c.y * (b.x.powi(2) + b.y.powi(2)) - b.y * (c.x.powi(2) + c.y.powi(2))) / d; let y = (b.x * (c.x.powi(2) + c.y.powi(2)) - c.x * (b.x.powi(2) + b.y.powi(2))) / d; Circle::new(a + Vec2::new(x, y), Vec2::new(x, y).norm()) } pub fn contains(&self, p: Vec2) -> bool { self.center.dist(p) - self.radius <= 1e-4 } pub fn bbox(&self) -> Bbox { let mut b = Bbox::new(self.center); b.enlarge(self.radius); b } } impl BarycentricCoords { pub fn triangle([a, b, c]: [Vec2; 3], p: Vec2) -> Option<Self> { let d = (b.y - c.y) * (a.x - c.x) + (c.x - b.x) * (a.y - c.y); let w0 = ((b.y - c.y) * (p.x - c.x) + (c.x - b.x) * (p.y - c.y)) / d; let w1 = ((c.y - a.y) * (p.x - c.x) + (a.x - c.x) * (p.y - c.y)) / d; let w2 = 1.0 - w0 - w1; if w0 + w1 + w2 > 1.0 { None } else { Some(BarycentricCoords { w0, w1, w2 }) } } pub fn to_point(&self, triangle: [Vec2; 3]) -> Vec2 { triangle[0] * self.w0 + triangle[1] * self.w1 + triangle[2] * self.w2 } pub fn interpolate(&self, vals: [f64; 3]) -> f64 { vals[0] * self.w0 + vals[1] * self.w1 + vals[2] * self.w2 } } impl Add for Vec2 { type Output = Vec2; fn add(mut self, rhs: Vec2) -> Self::Output { self.x += rhs.x; self.y += rhs.y; self } } impl Add<f64> for Vec2 { type Output = Vec2; fn add(mut self, rhs: f64) -> Self::Output { self.x += rhs; self.y += rhs; self } } impl Sub for Vec2 { type Output = Vec2; fn sub(mut self, rhs: Vec2) -> Self::Output { self.x -= rhs.x; self.y -= rhs.y; self } } impl Sub<f64> for Vec2 { type Output = Vec2; fn sub(mut self, rhs: f64) -> Self::Output { self.x -= rhs; self.y -= rhs; self } } impl Mul for Vec2 { type Output = Vec2; fn mul(mut self, rhs: Vec2) -> Self::Output { self.x *= rhs.x; self.y *= rhs.y; self } } impl Mul<f64> for Vec2 { type Output = Vec2; fn mul(mut self, rhs: f64) -> Self::Output { self.x *= rhs; self.y *= rhs; self } } impl Div for Vec2 { type Output = Vec2; fn div(mut self, rhs: Vec2) -> Self::Output { self.x /= rhs.x; self.y /= rhs.y; self } } impl Div<f64> for Vec2 { type Output = Vec2; fn div(mut self, rhs: f64) -> Self::Output { self.x /= rhs; self.y /= rhs; self } } #[cfg(test)] mod tests { use super::*; use proptest::prelude::*; fn rand_vec2() -> impl Strategy<Value = Vec2> { (any::<u32>(), any::<u32>()).prop_map(|(a, b)| Vec2::new(a.into(), b.into())) } #[test] fn test_triangle_circumcircle() { assert_eq!( Circle::circumcircle(Vec2::zero(), Vec2::new(3.0, 4.0), Vec2::new(0.0, 4.0)), Circle::new(Vec2::new(1.5, 2.0), 2.5) ); assert_eq!( Circle::circumcircle( Vec2::new(1.0, 1.0), Vec2::new(4.0, 5.0), Vec2::new(1.0, 5.0) ), Circle::new(Vec2::new(2.5, 3.0), 2.5) ); } proptest! { #[test] fn prop_circle_always_contains_known_points(c in rand_vec2(), r in any::<u32>()) { let r = r.into(); let circle = Circle::new(c, r); let circle_bbox = circle.bbox(); let known_points = [ c, c + Vec2::new(0.0, 1.0) * r, c + Vec2::new(0.0, -1.0) * r, c + Vec2::new(1.0, 0.0) * r, c + Vec2::new(-1.0, 0.0) * r, ]; for p in known_points.iter() { prop_assert!(circle.contains(*p)); prop_assert!(circle_bbox.contains(*p)); } } } proptest! { #[test] fn prop_bbox_split_center(a in rand_vec2(), b in rand_vec2()) { let mut bbox = Bbox::new(a); bbox.expand(b); let center = bbox.center(); prop_assert!(bbox.contains(center)); let children = bbox.split(center); for child in children.iter() { prop_assert_eq!(bbox.intersection(*child), Some(*child)); prop_assert!(bbox.contains(child.min())); prop_assert!(bbox.contains(child.max())); } } } proptest! { #[test] fn prop_triangle_vertices_valid_bary( a in rand_vec2(), b in rand_vec2(), c in rand_vec2(), ) { prop_assume!(!collinear(a, b, c)); let manifest = [ (a, (1.0, 0.0, 0.0)), (b, (0.0, 1.0, 0.0)), (c, (0.0, 0.0, 1.0)), ]; for (v, exp_bary) in manifest.iter() { let bary = BarycentricCoords::triangle([a, b, c], *v).unwrap(); prop_assert_eq!( bary, BarycentricCoords { w0: exp_bary.0, w1: exp_bary.1, w2: exp_bary.2 } ); prop_assert_eq!(bary.to_point([a, b, c]), *v); } let centroid = (a + b + c) / 3.0; let mid_bary = BarycentricCoords::triangle([a, b, c], centroid).unwrap(); prop_assert!( mid_bary.to_point([a, b, c]).dist2(centroid) < 1e-4, "centoid {:?} bary {:?}", centroid, mid_bary ); } } proptest! { #[test] fn prop_triangle_circumcircle_contains_vertices( a in rand_vec2(), b in rand_vec2(), c in rand_vec2(), ) { prop_assume!(!collinear(a, b, c)); let circle = Circle::circumcircle(a, b, c); let known_points = [ a, b, c, (a + b + c) / 3.0, ]; for v in known_points.iter() { prop_assert!( circle.contains(*v), "circle {:?} p: {:?} dist: {:?}", circle, v, (circle.center - *v).norm() - circle.radius ); } } } }
pub use game_camera as camera; pub use game_controller as controller; pub use game_core as core; pub use game_input as input; pub use game_physics as physics; pub use game_tiles as tiles; pub use game_wasi as wasi; mod plugins; use game_lib::bevy::{ diagnostic::{DiagnosticsPlugin, FrameTimeDiagnosticsPlugin}, ecs::schedule::ReportExecutionOrderAmbiguities, prelude::*, }; use physics::{Drag, Gravity, PhysicsState}; use tiles::EntityWorldPosition; game_lib::fix_bevy_derive!(game_lib::bevy); #[bevy_main] fn main() { App::build() .insert_resource(ClearColor(Color::rgb_u8(135, 206, 235))) .insert_resource(WindowDescriptor { title: "Tiles".into(), ..Default::default() }) .insert_resource(ReportExecutionOrderAmbiguities) .add_plugins(DefaultPlugins) .add_plugin(FrameTimeDiagnosticsPlugin) .add_plugin(DiagnosticsPlugin) .add_plugin(crate::core::CorePlugin) .add_plugin(crate::core::random::RandomPlugin) .add_plugin(crate::core::loading::LoadingPlugin) .add_plugin(crate::tiles::TilePlugin) .add_plugin(crate::camera::CameraPlugin) .add_plugin(crate::physics::PhysicsPlugin) .add_plugin(crate::input::InputPlugin) .add_plugin(crate::controller::ControllerPlugin) .add_plugin(crate::plugins::PlayerPlugin) .add_plugin(crate::plugins::ConfigPlugin) .add_plugin(crate::plugins::DebugPlugin) .add_plugin(crate::plugins::TimedPlugin) // .add_plugin(crate::wasi::WasmPlugin) .insert_resource(PhysicsState { // drag: Drag::from_terminal_velocity(10.0, 62.0, 98.1), drag: Drag::from_terminal_velocity(300.0, 62.0, 9.81), // gravity: Gravity(EntityWorldPosition::ZERO), gravity: Gravity(EntityWorldPosition::Y * -9.81), // gravity: Gravity(EntityWorldPosition::ZERO), ..Default::default() }) .run(); }
//! A dynamically sized, multi-channel audio buffer. use audio_core::{ Buf, Channel, ChannelMut, Channels, ChannelsMut, ExactSizeBuf, ResizableBuf, Sample, }; use std::cmp; use std::fmt; use std::hash; use std::mem; use std::ops; use std::ptr; use std::slice; mod iter; pub use self::iter::{Iter, IterMut}; /// A dynamically sized, multi-channel audio buffer. /// /// An audio buffer can only be resized if it contains a type which is /// sample-apt For more information of what this means, see [Sample]. /// /// This kind of buffer stores each channel in its own heap-allocated slice of /// memory, meaning they can be manipulated more cheaply independently of each /// other than say [Interleaved][crate::Interleaved] or /// [Sequential][crate::Sequential]. These would have to re-organize every /// constituent channel when resizing, while [Dynamic] generally only requires /// [growing and shrinking][std::alloc::Allocator] of a memory region. /// /// This kind of buffer is a good choice if you need to /// [resize][Dynamic::resize] frequently. pub struct Dynamic<T> { /// The stored data for each channel. data: RawSlice<RawSlice<T>>, /// The number of channels stored. channels: usize, /// The capacity of channels stored. channels_cap: usize, /// The length of each channel. frames: usize, /// Allocated capacity of each channel. Each channel is guaranteed to be /// filled with as many values as is specified in this capacity. frames_cap: usize, } impl<T> Dynamic<T> { /// Construct a new empty audio buffer. /// /// # Examples /// /// ```rust /// let mut buffer = audio::Dynamic::<f32>::new(); /// /// assert_eq!(buffer.frames(), 0); /// ``` pub fn new() -> Self { Self { data: RawSlice::empty(), channels: 0, channels_cap: 0, frames: 0, frames_cap: 0, } } /// Allocate an audio buffer with the given topology. A "topology" is a /// given number of `channels` and the corresponding number of `frames` in /// their buffers. /// /// # Examples /// /// ```rust /// let mut buffer = audio::Dynamic::<f32>::with_topology(4, 256); /// /// assert_eq!(buffer.frames(), 256); /// assert_eq!(buffer.channels(), 4); /// ``` pub fn with_topology(channels: usize, frames: usize) -> Self where T: Sample, { let mut data = RawSlice::uninit(channels); for n in 0..channels { // Safety: We just allocated the vector w/ a capacity matching channels. unsafe { data.write(n, RawSlice::zeroed(frames)); } } Self { // Safety: we just initialized the associated array with the // expected topology. data, channels, channels_cap: channels, frames, frames_cap: frames, } } /// Allocate an audio buffer from a fixed-size array. /// /// See [dynamic!]. /// /// # Examples /// /// ```rust /// let mut buffer = audio::Dynamic::<f32>::from_array([[2.0; 256]; 4]); /// /// assert_eq!(buffer.frames(), 256); /// assert_eq!(buffer.channels(), 4); /// /// for chan in &buffer { /// assert_eq!(chan, vec![2.0; 256]); /// } /// ``` pub fn from_array<const F: usize, const C: usize>(channels: [[T; F]; C]) -> Self where T: Copy, { return Self { // Safety: We just created the box with the data so we know that // it's initialized. data: unsafe { data_from_array(channels) }, channels: C, channels_cap: C, frames: F, frames_cap: F, }; #[inline] unsafe fn data_from_array<T, const F: usize, const C: usize>( values: [[T; F]; C], ) -> RawSlice<RawSlice<T>> where T: Copy, { let mut data = Vec::with_capacity(C); for frames in std::array::IntoIter::new(values) { let slice = Box::<[T]>::from(frames); let slice = ptr::NonNull::new_unchecked(Box::into_raw(slice) as *mut T); data.push(RawSlice { data: slice }); } RawSlice { data: ptr::NonNull::new_unchecked(mem::ManuallyDrop::new(data).as_mut_ptr()), } } } /// Allocate a dynamic audio buffer from a fixed-size array acting as a /// template for all the channels. /// /// See [dynamic!]. /// /// # Examples /// /// ```rust /// let mut buffer = audio::Dynamic::from_frames([1.0, 2.0, 3.0, 4.0], 4); /// /// assert_eq!(buffer.frames(), 4); /// assert_eq!(buffer.channels(), 4); /// ``` pub fn from_frames<const N: usize>(frames: [T; N], channels: usize) -> Self where T: Copy, { return Self { data: data_from_frames(frames, channels), channels, channels_cap: channels, frames: N, frames_cap: N, }; fn data_from_frames<T, const N: usize>( frames: [T; N], channels: usize, ) -> RawSlice<RawSlice<T>> where T: Copy, { // Safety: we control and can trust all of the allocated buffer sizes. unsafe { let mut data = RawSlice::uninit(channels); for c in 0..channels { let slice = RawSlice::uninit(N); ptr::copy_nonoverlapping(frames.as_ptr(), slice.as_ptr(), N); data.write(c, slice); } data } } } /// Get the number of frames in the channels of an audio buffer. /// /// # Examples /// /// ```rust /// let mut buffer = audio::Dynamic::<f32>::new(); /// /// assert_eq!(buffer.frames(), 0); /// buffer.resize(256); /// assert_eq!(buffer.frames(), 256); /// ``` pub fn frames(&self) -> usize { self.frames } /// Check how many channels there are in the buffer. /// /// # Examples /// /// ```rust /// let mut buffer = audio::Dynamic::<f32>::new(); /// /// assert_eq!(buffer.channels(), 0); /// buffer.resize_channels(2); /// assert_eq!(buffer.channels(), 2); /// ``` pub fn channels(&self) -> usize { self.channels } /// Construct a mutable iterator over all available channels. /// /// # Examples /// /// ``` /// use rand::Rng as _; /// /// let mut buffer = audio::Dynamic::<f32>::with_topology(4, 256); /// /// let all_zeros = vec![0.0; 256]; /// /// for chan in buffer.iter() { /// assert_eq!(chan, &all_zeros[..]); /// } /// ``` pub fn iter(&self) -> Iter<'_, T> { // Safety: we're using a trusted length to build the slice. unsafe { Iter::new(self.data.as_ref(self.channels), self.frames) } } /// Construct a mutable iterator over all available channels. /// /// # Examples /// /// ``` /// use rand::Rng as _; /// /// let mut buffer = audio::Dynamic::<f32>::with_topology(4, 256); /// let mut rng = rand::thread_rng(); /// /// for chan in buffer.iter_mut() { /// rng.fill(chan); /// } /// ``` pub fn iter_mut(&mut self) -> IterMut<'_, T> { // Safety: we're using a trusted length to build the slice. unsafe { IterMut::new(self.data.as_mut(self.channels), self.frames) } } /// Set the size of the buffer. The size is the size of each channel's /// buffer. /// /// If the size of the buffer increases as a result, the new regions in the /// frames will be zeroed. If the size decreases, the region will be left /// untouched. So if followed by another increase, the data will be "dirty". /// /// # Examples /// /// ```rust /// let mut buffer = audio::Dynamic::<f32>::new(); /// /// assert_eq!(buffer.channels(), 0); /// assert_eq!(buffer.frames(), 0); /// /// buffer.resize_channels(4); /// buffer.resize(256); /// /// assert_eq!(buffer[1][128], 0.0); /// buffer[1][128] = 42.0; /// /// assert_eq!(buffer.channels(), 4); /// assert_eq!(buffer.frames(), 256); /// ``` /// /// Decreasing and increasing the size will not touch a buffer that has /// already been allocated. /// /// ```rust /// # let mut buffer = audio::Dynamic::<f32>::with_topology(4, 256); /// assert_eq!(buffer[1][128], 0.0); /// buffer[1][128] = 42.0; /// /// buffer.resize(64); /// assert!(buffer[1].get(128).is_none()); /// /// buffer.resize(256); /// assert_eq!(buffer[1][128], 42.0); /// ``` pub fn resize(&mut self, frames: usize) where T: Sample, { if self.frames_cap < frames { let from = self.frames_cap; let to = <RawSlice<T>>::next_cap(from, frames); if self.channels_cap > 0 { let additional = to - from; for n in 0..self.channels_cap { // Safety: We control the known sizes, so we can guarantee // that the slice is allocated and sized tio exactly `from`. unsafe { self.data .get_unchecked_mut(n) .reserve_zeroed(from, additional) }; } } self.frames_cap = to; } self.frames = frames; } /// Set the number of channels in use. /// /// If the size of the buffer increases as a result, the new channels will /// be zeroed. If the size decreases, the channels that falls outside of the /// new size will be dropped. /// /// # Examples /// /// ```rust /// let mut buffer = audio::Dynamic::<f32>::new(); /// /// assert_eq!(buffer.channels(), 0); /// assert_eq!(buffer.frames(), 0); /// /// buffer.resize_channels(4); /// buffer.resize(256); /// /// assert_eq!(buffer.channels(), 4); /// assert_eq!(buffer.frames(), 256); /// ``` pub fn resize_channels(&mut self, channels: usize) where T: Sample, { if channels == self.channels { return; } if channels > self.channels_cap { let old_cap = self.channels_cap; let new_cap = <RawSlice<RawSlice<T>>>::next_cap(old_cap, channels); let additional = new_cap - old_cap; // Safety: We trust that the old capacity is correct. unsafe { self.data.reserve_uninit(old_cap, additional); } for n in old_cap..new_cap { let slice = RawSlice::zeroed(self.frames_cap); // Safety: we control the capacity of channels and have just // guranteed above that it is appropriate. unsafe { self.data.write(n, slice); } } self.channels_cap = new_cap; } debug_assert!(channels <= self.channels_cap); self.channels = channels; } /// Get a reference to the buffer of the given channel. /// /// # Examples /// /// ```rust /// let mut buffer = audio::Dynamic::<f32>::new(); /// /// buffer.resize_channels(4); /// buffer.resize(256); /// /// let expected = vec![0.0; 256]; /// /// assert_eq!(Some(&expected[..]), buffer.get(0)); /// assert_eq!(Some(&expected[..]), buffer.get(1)); /// assert_eq!(Some(&expected[..]), buffer.get(2)); /// assert_eq!(Some(&expected[..]), buffer.get(3)); /// assert_eq!(None, buffer.get(4)); /// ``` pub fn get(&self, channel: usize) -> Option<&[T]> { if channel < self.channels { // Safety: We control the length of each channel so we can assert that // it is both allocated and initialized up to `len`. unsafe { Some(self.data.get_unchecked(channel).as_ref(self.frames)) } } else { None } } /// Get the given channel or initialize the buffer with the default value. /// /// # Examples /// /// ```rust /// let mut buffer = audio::Dynamic::<f32>::new(); /// /// buffer.resize(256); /// /// let expected = vec![0f32; 256]; /// /// assert_eq!(buffer.get_or_default(0), &expected[..]); /// assert_eq!(buffer.get_or_default(1), &expected[..]); /// /// assert_eq!(buffer.channels(), 2); /// ``` pub fn get_or_default(&mut self, channel: usize) -> &[T] where T: Sample, { self.resize_channels(channel + 1); // Safety: We initialized the given index just above and we know the // trusted length. unsafe { self.data.get_unchecked(channel).as_ref(self.frames) } } /// Get a mutable reference to the buffer of the given channel. /// /// # Examples /// /// ```rust /// use rand::Rng as _; /// /// let mut buffer = audio::Dynamic::<f32>::new(); /// /// buffer.resize_channels(2); /// buffer.resize(256); /// /// let mut rng = rand::thread_rng(); /// /// if let Some(left) = buffer.get_mut(0) { /// rng.fill(left); /// } /// /// if let Some(right) = buffer.get_mut(1) { /// rng.fill(right); /// } /// ``` pub fn get_mut(&mut self, channel: usize) -> Option<&mut [T]> { if channel < self.channels { // Safety: We control the length of each channel so we can assert that // it is both allocated and initialized up to `len`. unsafe { Some(self.data.get_unchecked_mut(channel).as_mut(self.frames)) } } else { None } } /// Get the given channel or initialize the buffer with the default value. /// /// If a channel that is out of bound is queried, the buffer will be empty. /// /// # Examples /// /// ```rust /// use rand::Rng as _; /// /// let mut buffer = audio::Dynamic::<f32>::new(); /// /// buffer.resize(256); /// /// let mut rng = rand::thread_rng(); /// /// rng.fill(buffer.get_or_default_mut(0)); /// rng.fill(buffer.get_or_default_mut(1)); /// /// assert_eq!(buffer.channels(), 2); /// ``` pub fn get_or_default_mut(&mut self, channel: usize) -> &mut [T] where T: Sample, { self.resize_channels(channel + 1); // Safety: We initialized the given index just above and we know the // trusted length. unsafe { self.data.get_unchecked_mut(channel).as_mut(self.frames) } } /// Convert into a vector of vectors. /// /// This is provided for the [Dynamic] type because it's a very cheap /// oepration due to its memory topology. No copying of the underlying /// buffers is necessary. /// /// # Examples /// /// ```rust /// let mut buffer = audio::Dynamic::<f32>::new(); /// buffer.resize_channels(4); /// buffer.resize(512); /// /// let expected = vec![0.0; 512]; /// /// let buffers = buffer.into_vectors(); /// assert_eq!(buffers.len(), 4); /// assert_eq!(buffers[0], &expected[..]); /// assert_eq!(buffers[1], &expected[..]); /// assert_eq!(buffers[2], &expected[..]); /// assert_eq!(buffers[3], &expected[..]); /// ``` pub fn into_vectors(self) -> Vec<Vec<T>> { self.into_vectors_if(|_| true) } /// Convert into a vector of vectors using a condition. /// /// This is provided for the [Dynamic] type because it's a very cheap /// oepration due to its memory topology. No copying of the underlying /// buffers is necessary. /// /// Channels which does not match the condition will be filled with an empty /// vector. /// /// # Examples /// /// ```rust /// let mut buffer = audio::Dynamic::<f32>::new(); /// buffer.resize_channels(4); /// buffer.resize(512); /// /// let expected = vec![0.0; 512]; /// /// let buffers = buffer.into_vectors_if(|n| n != 1); /// assert_eq!(buffers.len(), 4); /// assert_eq!(buffers[0], &expected[..]); /// assert_eq!(buffers[1], &[][..]); /// assert_eq!(buffers[2], &expected[..]); /// assert_eq!(buffers[3], &expected[..]); /// ``` pub fn into_vectors_if(self, mut condition: impl FnMut(usize) -> bool) -> Vec<Vec<T>> { let mut this = mem::ManuallyDrop::new(self); let mut vecs = Vec::with_capacity(this.channels); let frames_cap = this.frames_cap; for n in 0..this.channels { // Safety: The capacity end lengths are trusted since they're part // of the audio buffer. unsafe { let mut slice = this.data.read(n); if condition(n) { vecs.push(slice.into_vec(this.frames, frames_cap)); } else { slice.drop_in_place(frames_cap); vecs.push(Vec::new()); } } } // Drop the tail of the channels capacity which is not in use. for n in this.channels..this.channels_cap { // Safety: The capacity end lengths are trusted since they're part // of the audio buffer. unsafe { this.data.get_unchecked_mut(n).drop_in_place(frames_cap); } } // Drop the backing vector for all channels. // // Safety: we trust the capacity provided here. unsafe { let cap = this.channels_cap; this.data.drop_in_place(cap); } vecs } } impl<T> Default for Dynamic<T> { fn default() -> Self { Self::new() } } // Safety: dynamic is simply a container of T's, any Send/Sync properties are // inherited. unsafe impl<T> Send for Dynamic<T> where T: Send {} unsafe impl<T> Sync for Dynamic<T> where T: Sync {} /// Allocate an audio buffer from a fixed-size array. /// /// See [dynamic!]. impl<T, const F: usize, const C: usize> From<[[T; F]; C]> for Dynamic<T> where T: Copy, { #[inline] fn from(channels: [[T; F]; C]) -> Self { Self::from_array(channels) } } impl<T> fmt::Debug for Dynamic<T> where T: fmt::Debug, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.iter()).finish() } } impl<T> cmp::PartialEq for Dynamic<T> where T: cmp::PartialEq, { fn eq(&self, other: &Self) -> bool { self.iter().eq(other.iter()) } } impl<T> cmp::Eq for Dynamic<T> where T: cmp::Eq {} impl<T> cmp::PartialOrd for Dynamic<T> where T: cmp::PartialOrd, { fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> { self.iter().partial_cmp(other.iter()) } } impl<T> cmp::Ord for Dynamic<T> where T: cmp::Ord, { fn cmp(&self, other: &Self) -> cmp::Ordering { self.iter().cmp(other.iter()) } } impl<T> hash::Hash for Dynamic<T> where T: hash::Hash, { fn hash<H: hash::Hasher>(&self, state: &mut H) { for channel in self.iter() { channel.hash(state); } } } impl<'a, T> IntoIterator for &'a Dynamic<T> { type IntoIter = Iter<'a, T>; type Item = <Self::IntoIter as Iterator>::Item; fn into_iter(self) -> Self::IntoIter { self.iter() } } impl<'a, T> IntoIterator for &'a mut Dynamic<T> { type IntoIter = IterMut<'a, T>; type Item = <Self::IntoIter as Iterator>::Item; fn into_iter(self) -> Self::IntoIter { self.iter_mut() } } impl<T> ops::Index<usize> for Dynamic<T> { type Output = [T]; fn index(&self, index: usize) -> &Self::Output { match self.get(index) { Some(slice) => slice, None => panic!("index `{}` is not a channel", index), } } } impl<T> ops::IndexMut<usize> for Dynamic<T> { fn index_mut(&mut self, index: usize) -> &mut Self::Output { match self.get_mut(index) { Some(slice) => slice, None => panic!("index `{}` is not a channel", index,), } } } impl<T> Drop for Dynamic<T> { fn drop(&mut self) { for n in 0..self.channels_cap { // Safety: We're being dropped, so there's no subsequent access to // the current collection. unsafe { self.data .get_unchecked_mut(n) .drop_in_place(self.frames_cap); } } // Safety: We trust the length of the underlying array. unsafe { self.data.drop_in_place(self.channels_cap); } } } impl<T> ExactSizeBuf for Dynamic<T> { fn frames(&self) -> usize { self.frames } } impl<T> Buf for Dynamic<T> { fn frames_hint(&self) -> Option<usize> { Some(self.frames) } fn channels(&self) -> usize { self.channels } } impl<T> Channels<T> for Dynamic<T> { fn channel(&self, channel: usize) -> Channel<'_, T> { Channel::linear(&self[channel]) } } impl<T> ResizableBuf for Dynamic<T> where T: Sample, { fn resize(&mut self, frames: usize) { Self::resize(self, frames); } fn resize_topology(&mut self, channels: usize, frames: usize) { Self::resize(self, frames); Self::resize_channels(self, channels); } } impl<T> ChannelsMut<T> for Dynamic<T> where T: Copy, { fn channel_mut(&mut self, channel: usize) -> ChannelMut<'_, T> { ChannelMut::linear(&mut self[channel]) } fn copy_channels(&mut self, from: usize, to: usize) { assert! { from < self.channels, "copy from channel {} is out of bounds 0-{}", from, self.channels }; assert! { to < self.channels, "copy to channel {} which is out of bounds 0-{}", to, self.channels }; if from != to { // Safety: We're making sure not to access any mutable buffers which are // not initialized. unsafe { let from = self.data.get_unchecked(from).as_ref(self.frames); let to = self.data.get_unchecked_mut(to).as_mut(self.frames); to.copy_from_slice(from); } } } } /// A raw slice. #[repr(transparent)] struct RawSlice<T> { data: ptr::NonNull<T>, } impl<T> RawSlice<T> { const MIN_NON_ZERO_CAP: usize = if mem::size_of::<T>() == 1 { 8 } else if mem::size_of::<T>() <= 256 { 4 } else { 1 }; /// Calculate the next capacity. fn next_cap(from: usize, to: usize) -> usize { let to = usize::max(from * 2, to); usize::max(Self::MIN_NON_ZERO_CAP, to) } /// Construct an empty raw slice. fn empty() -> Self { Self { data: unsafe { ptr::NonNull::new_unchecked(Vec::new().as_mut_ptr()) }, } } /// Construct a new raw slice with the given capacity leaving the memory /// uninitialized. fn uninit(cap: usize) -> Self { // Safety: We're just allocating the vector so we knows it's correctly // sized and aligned. unsafe { let data = Vec::with_capacity(cap); let data = ptr::NonNull::new_unchecked(mem::ManuallyDrop::new(data).as_mut_ptr()); Self { data } } } /// Construct a new raw slice with the given capacity. fn zeroed(cap: usize) -> Self where T: Sample, { // Safety: the type constrain of `T` guarantees that an all-zeros bit // pattern is legal. unsafe { let mut data = Vec::with_capacity(cap); ptr::write_bytes(data.as_mut_ptr(), 0, cap); let data = ptr::NonNull::new_unchecked(mem::ManuallyDrop::new(data).as_mut_ptr()); Self { data } } } /// Resize the slice in place by reserving `additional` more elements in it. /// /// # Safety /// /// The provided `len` must watch the length for which it was allocated. /// This will change the underlying allocation, so subsequent calls must /// provide the new length of `len + additional`. unsafe fn reserve_zeroed(&mut self, len: usize, additional: usize) where T: Sample, { // Note: we need to provide `len` for the `reserve_exact` calculation to // below to be correct. let mut channel = Vec::from_raw_parts(self.data.as_ptr(), len, len); channel.reserve_exact(additional); // Safety: the type constrain of `T` guarantees that an all-zeros bit pattern is legal. ptr::write_bytes(channel.as_mut_ptr().add(len), 0, additional); self.data = ptr::NonNull::new_unchecked(mem::ManuallyDrop::new(channel).as_mut_ptr()); } /// Resize the slice in place by reserving `additional` more elements in it /// without initializing them. /// /// # Safety /// /// The provided `len` must watch the length for which it was allocated. /// This will change the underlying allocation, so subsequent calls must /// provide the new length of `len + additional`. unsafe fn reserve_uninit(&mut self, len: usize, additional: usize) { // Note: we need to provide `len` for the `reserve_exact` calculation to // below to be correct. let mut channel = Vec::from_raw_parts(self.data.as_ptr(), len, len); channel.reserve_exact(additional); self.data = ptr::NonNull::new_unchecked(mem::ManuallyDrop::new(channel).as_mut_ptr()); } /// Get a reference to the value at the given offset. /// /// # Safety /// /// The caller is resonsible for asserting that the value at the given /// location has an initialized bit pattern and is not out of bounds. unsafe fn get_unchecked(&self, n: usize) -> &T { &*self.data.as_ptr().add(n) } /// Get a mutable reference to the value at the given offset. /// /// # Safety /// /// The caller is resonsible for asserting that the value at the given /// location has an initialized bit pattern and is not out of bounds. unsafe fn get_unchecked_mut(&mut self, n: usize) -> &mut T { &mut *self.data.as_ptr().add(n) } /// Read the value at the given offset. /// /// # Safety /// /// The caller is resonsible for asserting that the value at the given /// location has an initialized bit pattern and is not out of bounds. unsafe fn read(&self, n: usize) -> T { ptr::read(self.data.as_ptr().add(n)) } /// Write a value at the given offset. /// /// # Safety /// /// The caller is responsible for asserting that the written to offset is /// not out of bounds. unsafe fn write(&mut self, n: usize, value: T) { ptr::write(self.data.as_ptr().add(n), value) } /// Get the raw base pointer of the slice. fn as_ptr(self) -> *mut T { self.data.as_ptr() } /// Get the raw slice as a slice. /// /// # Safety /// /// The incoming len must represent a valid slice of initialized data. /// The produced lifetime must be bounded to something valid! unsafe fn as_ref<'a>(self, len: usize) -> &'a [T] { slice::from_raw_parts(self.data.as_ptr() as *const _, len) } /// Get the raw slice as a mutable slice. /// /// # Safety /// /// The incoming len must represent a valid slice of initialized data. /// The produced lifetime must be bounded to something valid! unsafe fn as_mut<'a>(self, len: usize) -> &'a mut [T] { slice::from_raw_parts_mut(self.data.as_ptr(), len) } /// Drop the slice in place. /// /// # Safety /// /// The provided `len` must match the allocated length of the raw slice. /// /// After calling drop, the slice must not be used every again because the /// data it is pointing to have been dropped. unsafe fn drop_in_place(&mut self, len: usize) { let _ = Vec::from_raw_parts(self.data.as_ptr(), 0, len); } /// Convert into a vector. /// /// # Safety /// /// The provided `len` and `cap` must match the ones used when allocating /// the slice. /// /// The underlying slices must be dropped and forgotten after this /// operation. pub(crate) unsafe fn into_vec(self, len: usize, cap: usize) -> Vec<T> { Vec::from_raw_parts(self.data.as_ptr(), len, cap) } } // Note: no auto impl cause of `T`. impl<T> Clone for RawSlice<T> { #[inline] fn clone(&self) -> Self { *self } } impl<T> Copy for RawSlice<T> {}
use lazy_static::lazy_static; use std::path::PathBuf; #[cfg(not(debug_assertions))] use dirs::data_local_dir; #[cfg(not(debug_assertions))] use std::path::Path; use std::sync::Mutex; // Paths #[cfg(debug_assertions)] lazy_static! { static ref ORG_PATH: PathBuf = std::env::current_dir() .expect("Couldn't get current_dir") .canonicalize() .expect("Failed to canonicalize current_dir"); static ref ASSETS_PATH: Mutex<PathBuf> = Mutex::new(ORG_PATH.join("resources")); static ref USER_DATA_PATH: PathBuf = ORG_PATH.join("debug_run"); } #[cfg(not(debug_assertions))] lazy_static! { static ref ASSETS_PATH: Mutex<PathBuf> = Mutex::new(PathBuf::new()); static ref USER_DATA_PATH: PathBuf = data_local_dir().expect("No home dir"); } #[cfg(debug_assertions)] pub fn set_app_name(_name: &str) {} #[cfg(not(debug_assertions))] pub fn set_app_name(name: &str) { (*(*ASSETS_PATH).lock().unwrap()) = Path::new("/usr/share").join(name).to_path_buf(); } enum PathRefKind { UserData, Own, } pub struct PathRef { path_ref: &'static str, kind: PathRefKind, } impl PathRef { pub const fn own(path_ref: &'static str) -> Self { Self { path_ref, kind: PathRefKind::Own, } } pub const fn user_cfg(path_ref: &'static str) -> Self { Self { path_ref, kind: PathRefKind::UserData, } } pub fn resolve(&self) -> PathBuf { match self.kind { PathRefKind::UserData => USER_DATA_PATH.join(self.path_ref), PathRefKind::Own => (*ASSETS_PATH.lock().unwrap()).join(self.path_ref), } } } pub struct MultipathRef { refs: &'static [PathRef], } impl MultipathRef { pub const fn new(refs: &'static [PathRef]) -> Self { Self { refs } } // Will get the first that exists from the list pub fn get(&self) -> PathBuf { for r in self.refs { let p = r.resolve(); if p.exists() { return p; } } self.refs[0].resolve() } pub fn save_path(&self) -> PathBuf { self.refs[0].resolve() } pub fn all(&self) -> Vec<PathBuf> { self.refs .iter() .map(|r| r.resolve()) .filter(|p| p.exists()) .collect() } } // Messages pub const AUDIO_REC_START_ERR_MSG: &str = "Failed while trying to start audio recording, please report this"; pub const AUDIO_REC_STOP_ERR_MSG: &str = "Failed while trying to stop audio recording, please report this"; pub const CLOCK_TOO_EARLY_MSG :&str = "Somehow the system's clock time is before unix epoch, this is not supported, check your system's time and the CMOS battery"; // Other pub const LILY_VER: &str = std::env!("CARGO_PKG_VERSION"); pub const DEFAULT_HOTWORD_SENSITIVITY: f32 = 0.43; pub const DEFAULT_SAMPLES_PER_SECOND: u32 = 16000; #[cfg(feature = "client")] pub const RECORD_BUFFER_SIZE: usize = 32_000; // This ammounts to 2s of audio #[cfg(feature = "client")] pub const MAX_SAMPLES_PER_SECOND: u32 = 48_000;
/// The current state of a button #[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] #[repr(u8)] pub enum ButtonState { /// The button was activated this frame Pressed = 0, /// The button is active but was not activated this frame Held = 1, /// The button was released this frame Released = 2, /// The button is not active but was not released this frame NotPressed = 3, } impl ButtonState { pub(crate) fn update(&self, new: ButtonState) -> ButtonState { match (self.is_down(), new.is_down()) { (false, false) => ButtonState::NotPressed, (false, true) => ButtonState::Pressed, (true, false) => ButtonState::Released, (true, true) => ButtonState::Held } } /// Determine if the button is either Pressed or Held pub fn is_down(&self) -> bool { match *self { ButtonState::Pressed | ButtonState::Held => true, ButtonState::Released | ButtonState::NotPressed => false, } } /// Convert the button from a temporary state to a permanent state /// /// Pressed states become Held, Released states become NotPressed #[must_use] pub fn clear_temporary(&self) -> ButtonState { if self.is_down() { ButtonState::Held } else { ButtonState::NotPressed } } } #[cfg(test)] mod tests { use super::*; #[test] fn clear_temporary() { for button in [ButtonState::Pressed, ButtonState::Held, ButtonState::Released, ButtonState::NotPressed].iter() { assert_eq!(button.is_down(), button.clear_temporary().is_down()); } } }
use std::io; use std::io::prelude::*; use std::io::BufReader; use std::fs::File; use std::env; fn tail(file_path: &str) -> io::Result<String> { let f = File::open(file_path)?; let reader = BufReader::new(f); reader.lines().last().unwrap_or(Ok(String::new())) } fn main() { match tail("hoge.txt") { Ok(line) => println!("{}", line), Err(e) => println!("! {:?}", e.kind()) } }
use futures_core::future::BoxFuture; use crate::any::{Any, AnyConnectOptions, AnyKind}; use crate::connection::Connection; use crate::error::Error; #[cfg(feature = "postgres")] use crate::postgres; #[cfg(feature = "sqlite")] use crate::sqlite; #[cfg(feature = "mssql")] use crate::mssql; #[cfg(feature = "mysql")] use crate::mysql; use crate::transaction::Transaction; mod establish; mod executor; /// A connection to _any_ SQLx database. /// /// The database driver used is determined by the scheme /// of the connection url. /// /// ```text /// postgres://postgres@localhost/test /// sqlite://a.sqlite /// ``` #[derive(Debug)] pub struct AnyConnection(pub(super) AnyConnectionKind); #[derive(Debug)] // Used internally in `sqlx-macros` #[doc(hidden)] pub enum AnyConnectionKind { #[cfg(feature = "postgres")] Postgres(postgres::PgConnection), #[cfg(feature = "mssql")] Mssql(mssql::MssqlConnection), #[cfg(feature = "mysql")] MySql(mysql::MySqlConnection), #[cfg(feature = "sqlite")] Sqlite(sqlite::SqliteConnection), } impl AnyConnectionKind { pub fn kind(&self) -> AnyKind { match self { #[cfg(feature = "postgres")] AnyConnectionKind::Postgres(_) => AnyKind::Postgres, #[cfg(feature = "mysql")] AnyConnectionKind::MySql(_) => AnyKind::MySql, #[cfg(feature = "sqlite")] AnyConnectionKind::Sqlite(_) => AnyKind::Sqlite, #[cfg(feature = "mssql")] AnyConnectionKind::Mssql(_) => AnyKind::Mssql, } } } impl AnyConnection { pub fn kind(&self) -> AnyKind { self.0.kind() } // Used internally in `sqlx-macros` #[doc(hidden)] pub fn private_get_mut(&mut self) -> &mut AnyConnectionKind { &mut self.0 } } macro_rules! delegate_to { ($self:ident.$method:ident($($arg:ident),*)) => { match &$self.0 { #[cfg(feature = "postgres")] AnyConnectionKind::Postgres(conn) => conn.$method($($arg),*), #[cfg(feature = "mysql")] AnyConnectionKind::MySql(conn) => conn.$method($($arg),*), #[cfg(feature = "sqlite")] AnyConnectionKind::Sqlite(conn) => conn.$method($($arg),*), #[cfg(feature = "mssql")] AnyConnectionKind::Mssql(conn) => conn.$method($($arg),*), } }; } macro_rules! delegate_to_mut { ($self:ident.$method:ident($($arg:ident),*)) => { match &mut $self.0 { #[cfg(feature = "postgres")] AnyConnectionKind::Postgres(conn) => conn.$method($($arg),*), #[cfg(feature = "mysql")] AnyConnectionKind::MySql(conn) => conn.$method($($arg),*), #[cfg(feature = "sqlite")] AnyConnectionKind::Sqlite(conn) => conn.$method($($arg),*), #[cfg(feature = "mssql")] AnyConnectionKind::Mssql(conn) => conn.$method($($arg),*), } }; } impl Connection for AnyConnection { type Database = Any; type Options = AnyConnectOptions; fn close(self) -> BoxFuture<'static, Result<(), Error>> { match self.0 { #[cfg(feature = "postgres")] AnyConnectionKind::Postgres(conn) => conn.close(), #[cfg(feature = "mysql")] AnyConnectionKind::MySql(conn) => conn.close(), #[cfg(feature = "sqlite")] AnyConnectionKind::Sqlite(conn) => conn.close(), #[cfg(feature = "mssql")] AnyConnectionKind::Mssql(conn) => conn.close(), } } fn close_hard(self) -> BoxFuture<'static, Result<(), Error>> { match self.0 { #[cfg(feature = "postgres")] AnyConnectionKind::Postgres(conn) => conn.close_hard(), #[cfg(feature = "mysql")] AnyConnectionKind::MySql(conn) => conn.close_hard(), #[cfg(feature = "sqlite")] AnyConnectionKind::Sqlite(conn) => conn.close_hard(), #[cfg(feature = "mssql")] AnyConnectionKind::Mssql(conn) => conn.close_hard(), } } fn ping(&mut self) -> BoxFuture<'_, Result<(), Error>> { delegate_to_mut!(self.ping()) } fn begin(&mut self) -> BoxFuture<'_, Result<Transaction<'_, Self::Database>, Error>> where Self: Sized, { Transaction::begin(self) } fn cached_statements_size(&self) -> usize { match &self.0 { #[cfg(feature = "postgres")] AnyConnectionKind::Postgres(conn) => conn.cached_statements_size(), #[cfg(feature = "mysql")] AnyConnectionKind::MySql(conn) => conn.cached_statements_size(), #[cfg(feature = "sqlite")] AnyConnectionKind::Sqlite(conn) => conn.cached_statements_size(), // no cache #[cfg(feature = "mssql")] AnyConnectionKind::Mssql(_) => 0, } } fn clear_cached_statements(&mut self) -> BoxFuture<'_, Result<(), Error>> { match &mut self.0 { #[cfg(feature = "postgres")] AnyConnectionKind::Postgres(conn) => conn.clear_cached_statements(), #[cfg(feature = "mysql")] AnyConnectionKind::MySql(conn) => conn.clear_cached_statements(), #[cfg(feature = "sqlite")] AnyConnectionKind::Sqlite(conn) => conn.clear_cached_statements(), // no cache #[cfg(feature = "mssql")] AnyConnectionKind::Mssql(_) => Box::pin(futures_util::future::ok(())), } } #[doc(hidden)] fn flush(&mut self) -> BoxFuture<'_, Result<(), Error>> { delegate_to_mut!(self.flush()) } #[doc(hidden)] fn should_flush(&self) -> bool { delegate_to!(self.should_flush()) } } #[cfg(feature = "postgres")] impl From<postgres::PgConnection> for AnyConnection { fn from(conn: postgres::PgConnection) -> Self { AnyConnection(AnyConnectionKind::Postgres(conn)) } } #[cfg(feature = "mssql")] impl From<mssql::MssqlConnection> for AnyConnection { fn from(conn: mssql::MssqlConnection) -> Self { AnyConnection(AnyConnectionKind::Mssql(conn)) } } #[cfg(feature = "mysql")] impl From<mysql::MySqlConnection> for AnyConnection { fn from(conn: mysql::MySqlConnection) -> Self { AnyConnection(AnyConnectionKind::MySql(conn)) } } #[cfg(feature = "sqlite")] impl From<sqlite::SqliteConnection> for AnyConnection { fn from(conn: sqlite::SqliteConnection) -> Self { AnyConnection(AnyConnectionKind::Sqlite(conn)) } }
pub use VkDisplaySurfaceCreateFlagsKHR::*; #[repr(u32)] #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub enum VkDisplaySurfaceCreateFlagsKHR { VK_DISPLAY_SURFACE_CREATE_NULL_BIT = 0, } use crate::SetupVkFlags; #[repr(C)] #[derive(Clone, Copy, Eq, PartialEq, Hash)] pub struct VkDisplaySurfaceCreateFlagBitsKHR(u32); SetupVkFlags!( VkDisplaySurfaceCreateFlagsKHR, VkDisplaySurfaceCreateFlagBitsKHR );
use crate::vec3::{Vec3, Point3}; use rand::{self, Rng}; const POINT_COUNT: i32 = 256; pub struct Perlin { ran_vec: Vec<Vec3>, perm_x: Vec<i32>, perm_y: Vec<i32>, perm_z: Vec<i32>, } impl Perlin { fn permute(p: &mut Vec<i32>) { let mut rng = rand::thread_rng(); // TODO Refactor later to use more ideal way, e.g. reverse & iterator let mut i = POINT_COUNT - 1; while i > 0 { let target = rng.gen_range(0, i); p.swap(i as usize, target as usize); i -= 1; } } pub fn perlin_generate_perm() -> Vec<i32> { let mut p: Vec<i32> = (0..POINT_COUNT).collect(); Self::permute(&mut p); p } pub fn new() -> Self { let mut rng = rand::thread_rng(); let ran_vec = (0..POINT_COUNT) .map(|_| Vec3{ x: rng.gen_range(-1., 1.), y: rng.gen_range(-1., 1.), z: rng.gen_range(-1., 1.)} .normalize()) .collect(); Self { ran_vec: ran_vec, perm_x: Self::perlin_generate_perm(), perm_y: Self::perlin_generate_perm(), perm_z: Self::perlin_generate_perm(), } } pub fn noise(&self, p: &Point3) -> f32 { let mut u = p.x - p.x.floor(); let mut v = p.y - p.y.floor(); let mut w = p.z - p.z.floor(); let i = p.x.floor() as i32; let j = p.y.floor() as i32; let k = p.z.floor() as i32; let mut c: [[[Vec3; 2];2];2] = [[[Vec3{ x: 0., y: 0., z: 0.}; 2]; 2]; 2]; for di in 0i32..2 { for dj in 0i32..2 { for dk in 0i32..2 { c[di as usize][dj as usize][dk as usize] = self.ran_vec[(self.perm_x[((i + di) & 255) as usize] ^ self.perm_y[((j + dj) & 255) as usize] ^ self.perm_z[((k + dk) & 255) as usize]) as usize]; } } } perlin_interp(c, u, v, w) } pub fn turb(&self, p: &Point3, depth: usize) -> f32 { let mut accum = 0.; let mut temp_p = *p; let mut weight = 1.; for _i in 0..depth { accum += weight * self.noise(&temp_p); weight *= 0.5; temp_p = 2. * temp_p; } accum.abs() } } fn trilinear_interp(c: [[[f32; 2]; 2]; 2], u: f32, v: f32, w: f32) -> f32 { let mut accum: f32 = 0.; for i in 0..2 { for j in 0..2 { for k in 0..2 { accum += (i as f32 * u + ((1 - i) as f32) * (1. - u)) * (j as f32 * v + ((1 - j) as f32) * (1. - v)) * (k as f32 * w + ((1 - k) as f32) * (1. - w)) * c[i][j][k]; } } } accum } fn perlin_interp(c: [[[Vec3; 2]; 2]; 2], u: f32, v: f32, w: f32) -> f32 { let uu = u * u * (3. - 2. * u); let vv = v * v * (3. - 2. * v); let ww = w * w * (3. - 2. * w); let mut accum = 0.; for i in 0..2 { for j in 0..2 { for k in 0..2 { let weight_v = Vec3{ x: u - i as f32, y: v - j as f32, z: w - k as f32, }; accum += (i as f32 * uu + (1 - i) as f32 * (1. - uu)) * (j as f32 * vv + (1 - j) as f32 * (1. - vv)) * (k as f32 * ww + (1 - k) as f32 * (1. - ww)) * c[i][j][k].dot(weight_v); } } } accum }
pub fn find_special_integer(arr: Vec<i32>) -> i32 { use std::collections::HashMap; let mut kvp_arr: Vec<_> = arr .iter() .fold(HashMap::new(), |mut acc, val| { let entry = acc.entry(*val).or_insert(0); *entry += 1; acc }) .into_iter() .collect(); kvp_arr.sort_by(|x, y| y.1.cmp(&x.1)); kvp_arr[0].0 } #[cfg(test)] mod find_special_integer_tests { use super::*; #[test] fn find_special_integer_one() { // arrange let test = vec![1, 2, 2, 6, 6, 6, 6, 7, 10]; // act let result = find_special_integer(test); // assert assert_eq!(result, 6); } }
extern crate yumeko; fn main() { yumeko::play_blackjack(); }
#[doc = "Reader of register DSCLKCFG"] pub type R = crate::R<u32, super::DSCLKCFG>; #[doc = "Writer for register DSCLKCFG"] pub type W = crate::W<u32, super::DSCLKCFG>; #[doc = "Register DSCLKCFG `reset()`'s with value 0"] impl crate::ResetValue for super::DSCLKCFG { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `DSSYSDIV`"] pub type DSSYSDIV_R = crate::R<u16, u16>; #[doc = "Write proxy for field `DSSYSDIV`"] pub struct DSSYSDIV_W<'a> { w: &'a mut W, } impl<'a> DSSYSDIV_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u16) -> &'a mut W { self.w.bits = (self.w.bits & !0x03ff) | ((value as u32) & 0x03ff); self.w } } #[doc = "Deep Sleep Oscillator Source\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum DSOSCSRC_A { #[doc = "0: PIOSC"] PIOSC = 0, #[doc = "2: LFIOSC"] LFIOSC = 2, #[doc = "3: MOSC"] MOSC = 3, #[doc = "4: Hibernation Module RTCOSC"] RTC = 4, } impl From<DSOSCSRC_A> for u8 { #[inline(always)] fn from(variant: DSOSCSRC_A) -> Self { variant as _ } } #[doc = "Reader of field `DSOSCSRC`"] pub type DSOSCSRC_R = crate::R<u8, DSOSCSRC_A>; impl DSOSCSRC_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> crate::Variant<u8, DSOSCSRC_A> { use crate::Variant::*; match self.bits { 0 => Val(DSOSCSRC_A::PIOSC), 2 => Val(DSOSCSRC_A::LFIOSC), 3 => Val(DSOSCSRC_A::MOSC), 4 => Val(DSOSCSRC_A::RTC), i => Res(i), } } #[doc = "Checks if the value of the field is `PIOSC`"] #[inline(always)] pub fn is_piosc(&self) -> bool { *self == DSOSCSRC_A::PIOSC } #[doc = "Checks if the value of the field is `LFIOSC`"] #[inline(always)] pub fn is_lfiosc(&self) -> bool { *self == DSOSCSRC_A::LFIOSC } #[doc = "Checks if the value of the field is `MOSC`"] #[inline(always)] pub fn is_mosc(&self) -> bool { *self == DSOSCSRC_A::MOSC } #[doc = "Checks if the value of the field is `RTC`"] #[inline(always)] pub fn is_rtc(&self) -> bool { *self == DSOSCSRC_A::RTC } } #[doc = "Write proxy for field `DSOSCSRC`"] pub struct DSOSCSRC_W<'a> { w: &'a mut W, } impl<'a> DSOSCSRC_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: DSOSCSRC_A) -> &'a mut W { unsafe { self.bits(variant.into()) } } #[doc = "PIOSC"] #[inline(always)] pub fn piosc(self) -> &'a mut W { self.variant(DSOSCSRC_A::PIOSC) } #[doc = "LFIOSC"] #[inline(always)] pub fn lfiosc(self) -> &'a mut W { self.variant(DSOSCSRC_A::LFIOSC) } #[doc = "MOSC"] #[inline(always)] pub fn mosc(self) -> &'a mut W { self.variant(DSOSCSRC_A::MOSC) } #[doc = "Hibernation Module RTCOSC"] #[inline(always)] pub fn rtc(self) -> &'a mut W { self.variant(DSOSCSRC_A::RTC) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0f << 20)) | (((value as u32) & 0x0f) << 20); self.w } } #[doc = "Reader of field `MOSCDPD`"] pub type MOSCDPD_R = crate::R<bool, bool>; #[doc = "Write proxy for field `MOSCDPD`"] pub struct MOSCDPD_W<'a> { w: &'a mut W, } impl<'a> MOSCDPD_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30); self.w } } #[doc = "Reader of field `PIOSCPD`"] pub type PIOSCPD_R = crate::R<bool, bool>; #[doc = "Write proxy for field `PIOSCPD`"] pub struct PIOSCPD_W<'a> { w: &'a mut W, } impl<'a> PIOSCPD_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31); self.w } } impl R { #[doc = "Bits 0:9 - Deep Sleep Clock Divisor"] #[inline(always)] pub fn dssysdiv(&self) -> DSSYSDIV_R { DSSYSDIV_R::new((self.bits & 0x03ff) as u16) } #[doc = "Bits 20:23 - Deep Sleep Oscillator Source"] #[inline(always)] pub fn dsoscsrc(&self) -> DSOSCSRC_R { DSOSCSRC_R::new(((self.bits >> 20) & 0x0f) as u8) } #[doc = "Bit 30 - MOSC Disable Power Down"] #[inline(always)] pub fn moscdpd(&self) -> MOSCDPD_R { MOSCDPD_R::new(((self.bits >> 30) & 0x01) != 0) } #[doc = "Bit 31 - PIOSC Power Down"] #[inline(always)] pub fn pioscpd(&self) -> PIOSCPD_R { PIOSCPD_R::new(((self.bits >> 31) & 0x01) != 0) } } impl W { #[doc = "Bits 0:9 - Deep Sleep Clock Divisor"] #[inline(always)] pub fn dssysdiv(&mut self) -> DSSYSDIV_W { DSSYSDIV_W { w: self } } #[doc = "Bits 20:23 - Deep Sleep Oscillator Source"] #[inline(always)] pub fn dsoscsrc(&mut self) -> DSOSCSRC_W { DSOSCSRC_W { w: self } } #[doc = "Bit 30 - MOSC Disable Power Down"] #[inline(always)] pub fn moscdpd(&mut self) -> MOSCDPD_W { MOSCDPD_W { w: self } } #[doc = "Bit 31 - PIOSC Power Down"] #[inline(always)] pub fn pioscpd(&mut self) -> PIOSCPD_W { PIOSCPD_W { w: self } } }
// `error_chain!` can recurse deeply #![recursion_limit = "1024"] #[macro_use] extern crate error_chain; // #[macro_use] // extern crate serde_derive; #[macro_use] extern crate log; pub mod classifier; mod dir_entry; pub mod errors; pub mod file_finder; mod fs; mod join; mod matcher; pub mod rating; mod rule; mod severity; pub use crate::dir_entry::*; pub use crate::errors::*; pub use crate::file_finder::find_files; pub use crate::join::join_violations; pub use crate::rating::*; pub use crate::rule::*; pub use crate::severity::Severity; #[cfg(all(not(target_env = "msvc"), feature = "jemalloc"))] use jemallocator::Jemalloc; #[cfg(all(not(target_env = "msvc"), feature = "jemalloc"))] #[global_allocator] static GLOBAL: Jemalloc = Jemalloc;
#![allow(clippy::comparison_chain)] #![allow(clippy::collapsible_if)] use std::cmp::Reverse; use std::cmp::{max, min}; use std::collections::{BTreeSet, HashMap, HashSet}; use std::fmt::Debug; use itertools::Itertools; use whiteread::parse_line; const ten97: usize = 1000_000_007; /// 2の逆元 mod ten97.割りたいときに使う const inv2ten97: u128 = 500_000_004; const mmm: usize = 998244353; fn main() { let (n, d): (usize, usize) = parse_line().unwrap(); let mut ans = 0; let stop = (d as f64 / 2.0).ceil() as usize; // dbg!(stop); for up in (stop..=d).rev() { let down = d - up; if n < up { continue; } let tmp1 = mypow(2, n - up) - 1; let mut tmp2 = mypow(2, up); let downtmp = if down == 0 || down == 1 { 1 } else { mypow(2, down - 1) }; // dbg!(up, down, tmp1, tmp2, downtmp); tmp2 *= downtmp; tmp2 %= mmm; if up == d / 2 { ans += tmp1 * tmp2; } else { ans += tmp1 * tmp2; ans += tmp1 * tmp2; } ans %= mmm; // dbg!(ans); } println!("{}", ans); } fn mypow(a: usize, mut b: usize) -> usize { let mut ans = 1; let mut ppp = a; while b > 0 { if b % 2 == 1 { ans *= ppp; ans %= mmm; } ppp *= ppp; ppp %= mmm; b /= 2; } ans }
// Models pub mod scores; pub mod maps; pub mod users;
#![allow(dead_code)] #![allow(non_upper_case_globals)] #![allow(non_camel_case_types)] #![allow(non_snake_case)] #[cfg(not(target_os = "android"))] include!("bindings.rs"); #[cfg(target_os = "android")] include!("bindings_android.rs");
use std::env; use std::error::Error; use std::fs; pub struct Config { pattern: String, filename: String, case_sensitive: bool, } impl Config { pub fn new(args: &[String]) -> Result<Config, &'static str> { if args.len() < 3 { return Err("Not enough arguments"); } let pattern = args[1].clone(); let filename = args[2].clone(); let case_sensitive = env::var("CASE_INSENSITIVE").is_err(); Ok(Config { pattern, filename , case_sensitive}) } // Extension one day, use `clap` crate for parsing command line arguments // <https://mattgathu.github.io/writing-cli-app-rust/> } pub fn run(config: Config) -> Result<(), Box<dyn Error>> { let contents = fs::read_to_string(&config.filename)?; let results = if config.case_sensitive { search(&config.pattern, &contents) } else { search_case_insensitive(&config.pattern, &contents) }; for line in results { println!("{}", line) } Ok(()) // This says "we're using this fn for its side effects only" } fn search<'a>(pattern: &str, contents: &'a str) -> Vec<&'a str> { let mut ret = Vec::new(); for line in contents.lines() { if line.contains(&pattern){ ret.push(line); } } ret } fn search_case_insensitive<'a>(pattern: &str, contents: &'a str) -> Vec<&'a str> { let pattern = pattern.to_lowercase(); let mut ret = Vec::new(); for line in contents.lines() { if line.to_lowercase().contains(&pattern) { ret.push(line); } } ret } #[cfg(test)] mod tests { use super::*; #[test] fn case_sensitive() { let pattern = "duct"; // This is an interesting way of doing long multi-line strings let contents = "\ Rust: safe, fast, productive. Pick three. I love Duct Tape."; assert_eq!(vec!["safe, fast, productive."], search(pattern, contents)); } #[test] fn case_insensitive() { let pattern = "rUsT"; let contents = "\ Rust: safe, fast, productive. Pick three. Trust me."; assert_eq!(vec!["Rust:", "Trust me."], search_case_insensitive(pattern, contents)); } }
use std::ffi::OsString; use async_fuse::FileType; use menmos_client::Query; use super::{Error, Result}; use crate::MenmosFS; #[derive(Debug)] pub struct ReadDirEntry { pub ino: u64, pub offset: i64, pub kind: FileType, pub name: OsString, } pub struct ReadDirReply { pub entries: Vec<ReadDirEntry>, } impl MenmosFS { pub async fn readdir_impl(&self, ino: u64, offset: i64) -> Result<ReadDirReply> { log::info!("readdir i{}", ino); let entries = if let Some(v) = self.virtual_directories_inodes.get(&ino).await { self.list_virtual_entries(v, ino).await } else { // We assume the inode points to a directory blob id. let blob_id = self .inode_to_blobid .get(&ino) .await .ok_or(Error::NotFound)?; self.list_entries(Query::default().and_parent(blob_id), ino) .await } .map_err(|e| { log::error!("client error: {}", e); Error::IOError })? .into_iter() .enumerate() .skip(offset as usize) .map(|(offset, (ino, kind, name))| ReadDirEntry { offset: (offset + 1) as i64, ino, kind, name: name.into(), }) .collect::<Vec<ReadDirEntry>>(); Ok(ReadDirReply { entries }) } }
use super::*; mod registered; #[test] fn unregistered_errors_badarg() { run!( |arc_process| { ( Just(arc_process.clone()), strategy::term::atom(), strategy::term(arc_process.clone()), ) }, |(arc_process, name, message)| { let destination = arc_process.tuple_from_slice(&[name, erlang::node_0::result()]); prop_assert_badarg!( result(&arc_process, destination, message), format!("name ({}) not registered", name) ); Ok(()) }, ); }