text
stringlengths
8
4.13M
use crate::rtb_type_strict; rtb_type_strict! { Topframe, UnfriendlyOrUnknown=0; Topframe = 1 }
//! How to extract subcommands' args into external structs. //! //! Running this example with --help prints this message: //! ----------------------------------------------------- //! classify 0.3.25 //! //! USAGE: //! enum_tuple <SUBCOMMAND> //! //! FLAGS: //! -h, --help Prints help information //! -V, --version Prints version information //! //! SUBCOMMANDS: //! foo //! help Prints this message or the help of the given subcommand(s) //! ----------------------------------------------------- use structopt::StructOpt; #[derive(Debug, StructOpt)] pub struct Foo { pub bar: Option<String>, } #[derive(Debug, StructOpt)] pub enum Command { #[structopt(name = "foo")] Foo(Foo), } #[derive(Debug, StructOpt)] #[structopt(name = "classify")] pub struct ApplicationArguments { #[structopt(subcommand)] pub command: Command, } fn main() { let opt = ApplicationArguments::from_args(); println!("{:?}", opt); }
enum BinaryTree { Data(Free), Root(Box<[BinaryTree; 2]>), } impl BinaryTree { /// Gets first element with free root and sets it to used pub fn get_first_free(&mut self, traverse_levels: usize) -> Option<usize> { match self { Self::Data(free) => { if traverse_levels == 0 { if *free == Free::Free { *free = Free::Used; Some(0) } else { None } } else { match free { Free::Free => { *self = BinaryTree::Root(Box::new([ BinaryTree::Data(Free::Free), BinaryTree::Data(Free::Free), ])); match self { BinaryTree::Data(_) => panic!("impossible condition"), BinaryTree::Root(data) => { Some(data[0].get_first_free(traverse_levels - 1).unwrap() << 1) } } } Free::Used => None, } } } Self::Root(data) => { if traverse_levels > 0 { if let Some(first_try) = data[0].get_first_free(traverse_levels - 1) { Some(first_try << 1) } else if let Some(second_try) = data[1].get_first_free(traverse_levels - 1) { Some((second_try << 1) + 1) } else { None } } else { None } } } } pub fn free(&mut self, index: usize) { match self { Self::Data(free) => *free = Free::Free, Self::Root(trees) => trees[index & 1].free(index >> 1), } } } #[derive(Copy, Clone, PartialEq)] enum Free { Used, Free, } pub struct BuddyAllocator { free_tree: BinaryTree, data: [u8; Self::BLOCK_LEVELS * Self::MIN_BLOCK_SIZE], } #[derive(Clone)] pub struct Allocation { pub data: *mut u8, alloc_index: usize, } impl BuddyAllocator { const STARTING_BLOCK_POW: usize = 16; const MIN_BLOCK_SIZE: usize = 1 << Self::STARTING_BLOCK_POW; const BLOCK_LEVELS: usize = 8; pub fn new() -> Self { Self { free_tree: BinaryTree::Data(Free::Free), data: [0; Self::BLOCK_LEVELS * Self::MIN_BLOCK_SIZE], } } pub fn alloc(&mut self, allocation_size: usize) -> Option<Allocation> { let depth_in_tree = Self::BLOCK_LEVELS - Self::get_block_level(allocation_size); if let Some(alloc_index) = self.free_tree.get_first_free(depth_in_tree) { let mem_index = Self::get_alloc_memory_index(alloc_index); let data = unsafe { self.data.as_ptr().offset(mem_index as isize) as *mut u8 }; Some(Allocation { data, alloc_index }) } else { None } } pub fn free(&mut self, allocation: Allocation) { self.free_tree.free(allocation.alloc_index); } fn get_alloc_memory_index(alloc_index: usize) -> usize { (0..Self::BLOCK_LEVELS) .map(|i| ((alloc_index >> i) & 1) << Self::STARTING_BLOCK_POW) .sum::<usize>() } fn get_block_level(alloc_size: usize) -> usize { let num_blocks = alloc_size / Self::MIN_BLOCK_SIZE + if alloc_size % Self::MIN_BLOCK_SIZE == 1 { 1 } else { 0 }; let mut max_size = 0; for i in 0..std::mem::size_of::<usize>() { if (num_blocks >> i) & 1 == 1 { max_size = i; }; } return max_size; } } #[cfg(test)] mod tests { use super::*; #[test] fn build() { let _tree = BuddyAllocator::new(); } #[test] fn allocate() { let mut tree = BuddyAllocator::new(); let _alloc = tree.alloc(10).unwrap(); } #[test] fn dealloc() { let mut tree = BuddyAllocator::new(); let alloc = tree.alloc(10).unwrap(); tree.free(alloc); } #[test] fn alloc_batches() { let alloc_list = [100, 2341, 213, 1234, 1234, 12]; let mut tree = BuddyAllocator::new(); let mut allocs = alloc_list .iter() .map(|i| tree.alloc(*i).unwrap()) .collect::<Vec<_>>(); allocs .drain(..) .map(|alloc| tree.free(alloc)) .collect::<Vec<_>>(); } #[test] fn test_block_level() { assert_eq!(BuddyAllocator::get_block_level(4), 0); assert_eq!( BuddyAllocator::get_block_level(BuddyAllocator::MIN_BLOCK_SIZE + 1), 1 ); assert_eq!( BuddyAllocator::get_block_level(BuddyAllocator::MIN_BLOCK_SIZE * 3 + 1), 2 ); } }
fn main() { let mut s = String::from("Please add a dot"); append_dot(&mut s); println!("s with dot = {}", s); } fn append_dot(t : &mut String) { t.push('.'); }
//! Module with various macros to make code less verbose /// Macro for handling errors returned from the `rusqlite` crate /// /// The argument of this macro invoication should be a `Result<T, rusqlite::Error>` #[macro_export] macro_rules! unwrap_db_err { ($expression:expr) => { match $expression { Ok(t) => t, Err(e) => return Err(($crate::Error::DatabaseError(e), std::line!(), std::file!())) } } } /// Macro for handling errors returned from the `reqwest` crate /// /// The argument of this macro_invocation should be a `Result<T, reqwest::Error>` #[macro_export] macro_rules! unwrap_req_err { ($expression:expr) => { match $expression { Ok(t) => t, Err(e) => return Err(($crate::Error::RequestError(e), std::line!(), std::file!())) } } } /// Macro for handling errors that fit into no category /// /// The argument of this macro invocation should be a `Result<T, P: ToString>` #[macro_export] macro_rules! unwrap_other_err { ($expression:expr) => { match $expression { Ok(t) => t, Err(e) => return Err(($crate::Error::Other(e.to_string()), std::line!(), std::file!())) } } } /// Handle a Result<T, crate::Error> /// /// When the passed in Result is `Ok`, this macro will return `T`. /// When the passed in Result is `Err`, this macro will print out the Error in a nice way to stderr and exit with exit code 1 /// #[macro_export] macro_rules! handle_err { ($expression:expr) => { match $expression { Ok(t) => t, Err((e, line, file)) => { match e { $crate::Error::DatabaseError(e) => eprintln!("Error: An error occurred while processing or handling database data: {:?} (line {} in {})", e, line, file), $crate::Error::RequestError(e) => eprintln!("Error: An error occurred while sending a HTTP request: {:?} (line {} in {})", e, line, file), $crate::Error::GoogleError(e) => eprintln!("Error: The Google API returned an error: {:?} (line {} in {})", e, line, file), $crate::Error::Other(e) => eprintln!("Error: An error occurred: {:?} (line {} in {})", e, line, file) } eprintln!("This is a fatal error. Exiting!"); std::process::exit(1); } } } } /// This macro is used for dealing with responses from the Google API /// /// The struct passed in as the first argument should be of type GoogleResponse<T> /// /// ## Example: /// ``` /// use crate::api::GoogleError /// use crate::api::GoogleResponse /// /// struct Foo { /// bar: String /// } /// /// fn baz() -> Return<String, String> { /// let response: GoogleResponse<Foo> = some_request(); /// /// // `foo` is of type Foo /// let foo = google_error!(response) /// Ok(bar) /// } /// ``` /// /// This would expand to: /// ``` /// use crate::api::GoogleError /// use crate::api::GoogleResponse /// /// struct Foo { /// bar: String /// } /// /// fn baz() -> Return<String, String> { /// let response: GoogleResponse<Foo> = some_request(); /// /// // `foo` is of type Foo /// let foo = if response.error.is_some() { /// return Err(format!("{:?}", foo.error)); /// } else { /// response.data.unwrap() /// } /// /// Ok(foo.bar) /// } #[macro_export] macro_rules! unwrap_google_err { ($expression:expr) => { if $expression.error.is_some() { return Err(($crate::Error::GoogleError($expression.error.unwrap()), std::line!(), std::file!())); } else { $expression.data.unwrap() } } }
use std::borrow::Cow; use std::sync::Arc; use command_data_derive::CommandData; use discorsd::{async_trait, BotState}; use discorsd::commands::*; use discorsd::errors::BotError; use discorsd::http::ClientResult; use discorsd::model::ids::*; use discorsd::model::interaction_response::message; use crate::Bot; use crate::games::GameType; #[derive(Clone, Debug)] pub struct AddMeCommand; #[async_trait] impl SlashCommand for AddMeCommand { type Bot = Bot; type Data = AddMeData; type Use = Used; const NAME: &'static str = "addme"; fn description(&self) -> Cow<'static, str> { "Add yourself to a game".into() } async fn run(&self, state: Arc<BotState<Bot>>, interaction: InteractionUse<AppCommandData, Unused>, data: AddMeData, ) -> Result<InteractionUse<AppCommandData, Self::Use>, BotError> { let id = data.player.unwrap_or_else(|| interaction.user().id()); match data.game { GameType::Avalon => avalon(&*state, interaction, id).await, GameType::Coup => unreachable!(), GameType::Hangman => unreachable!(), GameType::Kittens => { interaction.respond(&state.client, format!(r#""added" to {:?}"#, data.game)).await } }.map_err(|e| e.into()) } } #[derive(CommandData)] pub struct AddMeData { #[command(default, desc = "The game to add you to, or Avalon if not specified")] game: GameType, #[command(desc = "Forcibly add someone else to the game")] player: Option<UserId>, } async fn avalon( state: &BotState<Bot>, interaction: InteractionUse<AppCommandData, Unused>, user: UserId, ) -> ClientResult<InteractionUse<AppCommandData, Used>> { let guild = interaction.guild().unwrap(); let mut games = state.bot.avalon_games.write().await; let game = games.entry(guild).or_default(); let config = game.config_mut(); // track which guilds this user is in a game in let deferred = { let mut users = state.bot.user_games.write().await; let guilds = users.entry(user).or_default(); if config.players.iter().any(|m| m.id() == user) { // remove player config.players.retain(|m| m.id() != user); guilds.remove(&guild); interaction.defer(&state).await? } else { // add player if config.players.len() == 10 { return interaction.respond(&state.client, message(|m| { m.content("There can be a maximum of 10 people playing Avalon"); m.ephemeral(); })).await; } if interaction.channel == state.bot.config.channel && user == state.bot.config.owner { for _ in 0..5_usize.saturating_sub(config.players.len()) { config.players.push(interaction.member().unwrap().clone()); }; } else if let Some(member) = state.cache.member(guild, user).await { config.players.push(member); } else if let Ok(member) = state.cache_guild_member(guild, user).await { config.players.push(member); } else { return interaction.respond(&state, message(|m| { m.content("Could not find that user in this guild!"); m.ephemeral(); })).await; } guilds.insert(guild); interaction.defer(&state).await? } }; // let guard = state.slash_commands.read().await; // let commands = guard.get(&guild).unwrap().write().await; // config.start_command(state, commands, config.startable(), guild).await?; // config.update_embed(state, &deferred).await?; deferred.delete(&state).await }
pub mod karkkainen; pub trait Compute <T>{ fn compute(text: String, sa: Vec<T>) -> Result<pLcp<T>,Error>; }
#[doc = "Reader of register DDRCTRL_CRCPARSTAT"] pub type R = crate::R<u32, super::DDRCTRL_CRCPARSTAT>; #[doc = "Reader of field `DFI_ALERT_ERR_CNT`"] pub type DFI_ALERT_ERR_CNT_R = crate::R<u16, u16>; #[doc = "Reader of field `DFI_ALERT_ERR_INT`"] pub type DFI_ALERT_ERR_INT_R = crate::R<bool, bool>; impl R { #[doc = "Bits 0:15 - DFI_ALERT_ERR_CNT"] #[inline(always)] pub fn dfi_alert_err_cnt(&self) -> DFI_ALERT_ERR_CNT_R { DFI_ALERT_ERR_CNT_R::new((self.bits & 0xffff) as u16) } #[doc = "Bit 16 - DFI_ALERT_ERR_INT"] #[inline(always)] pub fn dfi_alert_err_int(&self) -> DFI_ALERT_ERR_INT_R { DFI_ALERT_ERR_INT_R::new(((self.bits >> 16) & 0x01) != 0) } }
#![cfg_attr(not(feature = "std"), no_std)] use codec::{Decode, Encode}; use core::cmp; use frame_support::{ decl_error, decl_event, decl_module, decl_storage, ensure, traits::{Currency, ExistenceRequirement, Get, ReservableCurrency}, }; use frame_system::ensure_signed; use sp_runtime::{ traits::{ AccountIdConversion, CheckedAdd, CheckedSub, SaturatedConversion, StaticLookup, Zero, }, DispatchError, DispatchResult, ModuleId, RuntimeDebug, }; use zrml_traits::shares::{ReservableShares, Shares, WrapperShares}; #[cfg(test)] mod mock; #[cfg(test)] mod tests; type BalanceOf<T> = <<T as Trait>::Currency as Currency<<T as frame_system::Trait>::AccountId>>::Balance; #[derive(Encode, Decode, Clone, PartialEq, Eq, Default, RuntimeDebug)] pub struct AccountShares<Balance> { pub free: Balance, pub reserved: Balance, } pub trait Trait: frame_system::Trait { type Currency: ReservableCurrency<Self::AccountId>; type Event: From<Event<Self>> + Into<<Self as frame_system::Trait>::Event>; type ModuleId: Get<ModuleId>; } decl_storage! { trait Store for Module<T: Trait> as Shares { /// A double map that is keyed by (share_id, account). The reason to make the `share_id` the prefix /// key is so that we can efficiently wipe out shares. pub Accounts get(fn accounts): double_map hasher (identity) T::Hash, hasher (blake2_128_concat) T::AccountId => AccountShares<BalanceOf<T>>; pub TotalSupply get(fn total_supply): map hasher (identity) T::Hash => BalanceOf<T>; } } decl_event!( pub enum Event<T> where AccountId = <T as frame_system::Trait>::AccountId, Hash = <T as frame_system::Trait>::Hash, Balance = BalanceOf<T>, { /// Some shares have been transferred. [shares_id, from, to, amount] Transferred(Hash, AccountId, AccountId, Balance), /// Some shares have been reserved. [shares_id, who, amount] Reserved(Hash, AccountId, Balance), /// Shares have been unreserved. [shares_id, who, amount] Unreserved(Hash, AccountId, Balance), } ); decl_error! { pub enum Error for Module<T: Trait> { TotalIssuanceOverflow, BalanceTooLow, Underflow, Overflow, } } decl_module! { pub struct Module<T: Trait> for enum Call where origin: T::Origin { type Error = Error<T>; fn deposit_event() = default; #[weight = 0] pub fn transfer( origin, dest: <T::Lookup as StaticLookup>::Source, share_id: T::Hash, #[compact] amount: BalanceOf<T>, ) { let from = ensure_signed(origin)?; let to = T::Lookup::lookup(dest)?; <Self as Shares<T::AccountId, T::Hash>>::transfer(share_id, &from, &to, amount)?; Self::deposit_event(RawEvent::Transferred(share_id, from, to, amount)); } /// Wraps the native currency into a "share" so that it can be used as if it was part of this /// pallet. #[weight = 0] pub fn wrap_native_currency(origin, amount: BalanceOf<T>) { let sender = ensure_signed(origin)?; Self::do_wrap_native_currency(sender, amount)?; } #[weight = 0] pub fn unwrap_native_currency(origin, amount: BalanceOf<T>) { let sender = ensure_signed(origin)?; Self::do_unwrap_native_currency(sender, amount)?; } } } impl<T: Trait> Module<T> { pub fn set_balance( share_id: T::Hash, who: &T::AccountId, balance: BalanceOf<T>, ) -> DispatchResult { <Accounts<T>>::mutate(share_id, who, |data| data.free = balance); Ok(()) } pub fn set_reserved( share_id: T::Hash, who: &T::AccountId, reserved: BalanceOf<T>, ) -> DispatchResult { <Accounts<T>>::mutate(share_id, who, |data| data.reserved = reserved); Ok(()) } #[inline] fn get_module_id() -> T::AccountId { T::ModuleId::get().into_account() } } impl<T: Trait> Shares<T::AccountId, T::Hash> for Module<T> { type Balance = BalanceOf<T>; fn free_balance(share_id: T::Hash, who: &T::AccountId) -> BalanceOf<T> { Self::accounts(share_id, who).free } fn total_supply(share_id: T::Hash) -> BalanceOf<T> { <TotalSupply<T>>::get(share_id) } fn destroy(share_id: T::Hash, from: &T::AccountId, amount: BalanceOf<T>) -> DispatchResult { if amount.is_zero() { return Ok(()); } Self::ensure_can_withdraw(share_id, from, amount)?; <TotalSupply<T>>::mutate(share_id, |am| *am -= amount); Self::set_balance(share_id, from, Self::free_balance(share_id, from) - amount)?; Ok(()) } fn destroy_all(share_id: T::Hash) -> DispatchResult { <Accounts<T>>::remove_prefix(share_id); <TotalSupply<T>>::remove(share_id); Ok(()) } fn ensure_can_withdraw( share_id: T::Hash, who: &T::AccountId, amount: BalanceOf<T>, ) -> DispatchResult { if amount.is_zero() { return Ok(()); } let _new_balance = Self::free_balance(share_id, who) .checked_sub(&amount) .ok_or(Error::<T>::BalanceTooLow)?; Ok(()) } fn generate(share_id: T::Hash, to: &T::AccountId, amount: BalanceOf<T>) -> DispatchResult { if amount.is_zero() { return Ok(()); } let new_total = Self::total_supply(share_id) .checked_add(&amount) .ok_or(Error::<T>::TotalIssuanceOverflow)?; <TotalSupply<T>>::insert(share_id, new_total); Self::set_balance(share_id, to, Self::free_balance(share_id, to) + amount)?; Ok(()) } fn transfer( share_id: T::Hash, from: &T::AccountId, to: &T::AccountId, amount: BalanceOf<T>, ) -> DispatchResult { if amount.is_zero() || from == to { return Ok(()); } Self::ensure_can_withdraw(share_id, from, amount)?; let from_balance = Self::free_balance(share_id, from); let to_balance = Self::free_balance(share_id, to); Self::set_balance(share_id, from, from_balance - amount)?; Self::set_balance(share_id, to, to_balance + amount)?; Ok(()) } } impl<T: Trait> ReservableShares<T::AccountId, T::Hash> for Module<T> { fn can_reserve(share_id: T::Hash, who: &T::AccountId, value: BalanceOf<T>) -> bool { if value.is_zero() { return true; } Self::free_balance(share_id, who) .checked_sub(&value) .map_or(false, |new_balance| { Self::ensure_can_withdraw(share_id, who, new_balance).is_ok() }) } fn reserved_balance(share_id: T::Hash, who: &T::AccountId) -> BalanceOf<T> { Self::accounts(share_id, who).reserved } fn reserve(share_id: T::Hash, who: &T::AccountId, value: BalanceOf<T>) -> DispatchResult { if value.is_zero() { return Ok(()); } let free = Self::free_balance(share_id, who); let reserved = Self::reserved_balance(share_id, who); let new_free = free.checked_sub(&value).ok_or(Error::<T>::Underflow)?; let new_reserved = reserved.checked_add(&value).ok_or(Error::<T>::Overflow)?; Self::set_balance(share_id, who, new_free)?; Self::set_reserved(share_id, who, new_reserved)?; Self::deposit_event(RawEvent::Reserved(share_id, who.clone(), value)); Ok(()) } fn unreserve( share_id: T::Hash, who: &T::AccountId, value: BalanceOf<T>, ) -> Result<BalanceOf<T>, DispatchError> { if value.is_zero() { return Ok(BalanceOf::<T>::zero()); } let free = Self::free_balance(share_id, who); let reserved = Self::reserved_balance(share_id, who); let actual = cmp::min(reserved, value); let new_free = free + actual; let new_reserved = reserved - actual; Self::set_balance(share_id, who, new_free)?; Self::set_reserved(share_id, who, new_reserved)?; Self::deposit_event(RawEvent::Unreserved(share_id, who.clone(), actual)); Ok(actual) } } impl<T: Trait> WrapperShares<T::AccountId, T::Hash> for Module<T> { fn get_native_currency_id() -> T::Hash { let mut h = T::Hash::default(); h.as_mut().iter_mut().for_each(|byte| *byte = 00); h } fn do_wrap_native_currency(who: T::AccountId, amount: BalanceOf<T>) -> DispatchResult { ensure!( T::Currency::free_balance(&who) >= amount, Error::<T>::BalanceTooLow ); let id = Self::get_native_currency_id(); T::Currency::transfer( &who, &Self::get_module_id(), amount, ExistenceRequirement::KeepAlive, )?; Self::generate(id, &who, amount.saturated_into().saturated_into()) } fn do_unwrap_native_currency(who: T::AccountId, amount: BalanceOf<T>) -> DispatchResult { let id = Self::get_native_currency_id(); ensure!( Self::free_balance(id, &who) >= amount.saturated_into().saturated_into(), Error::<T>::BalanceTooLow ); Self::destroy(id, &who, amount.saturated_into().saturated_into())?; T::Currency::transfer( &Self::get_module_id(), &who, amount, ExistenceRequirement::AllowDeath, ) } }
/* * Datadog API V1 Collection * * Collection of all Datadog Public endpoints. * * The version of the OpenAPI document: 1.0 * Contact: support@datadoghq.com * Generated by: https://openapi-generator.tech */ /// ImageWidgetDefinition : The image widget allows you to embed an image on your dashboard. An image can be a PNG, JPG, or animated GIF. Only available on FREE layout dashboards. #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ImageWidgetDefinition { /// Whether to display a background or not. #[serde(rename = "has_background", skip_serializing_if = "Option::is_none")] pub has_background: Option<bool>, /// Whether to display a border or not. #[serde(rename = "has_border", skip_serializing_if = "Option::is_none")] pub has_border: Option<bool>, #[serde(rename = "horizontal_align", skip_serializing_if = "Option::is_none")] pub horizontal_align: Option<crate::models::WidgetHorizontalAlign>, #[serde(rename = "margin", skip_serializing_if = "Option::is_none")] pub margin: Option<crate::models::WidgetMargin>, #[serde(rename = "sizing", skip_serializing_if = "Option::is_none")] pub sizing: Option<crate::models::WidgetImageSizing>, #[serde(rename = "type")] pub _type: crate::models::ImageWidgetDefinitionType, /// URL of the image. #[serde(rename = "url")] pub url: String, /// URL of the image in dark mode. #[serde(rename = "url_dark_theme", skip_serializing_if = "Option::is_none")] pub url_dark_theme: Option<String>, #[serde(rename = "vertical_align", skip_serializing_if = "Option::is_none")] pub vertical_align: Option<crate::models::WidgetVerticalAlign>, } impl ImageWidgetDefinition { /// The image widget allows you to embed an image on your dashboard. An image can be a PNG, JPG, or animated GIF. Only available on FREE layout dashboards. pub fn new(_type: crate::models::ImageWidgetDefinitionType, url: String) -> ImageWidgetDefinition { ImageWidgetDefinition { has_background: None, has_border: None, horizontal_align: None, margin: None, sizing: None, _type, url, url_dark_theme: None, vertical_align: None, } } }
mod apply; mod bridge; mod checkpoint; mod connection; mod error; mod ip; mod profile; mod show; pub(crate) use apply::*; pub(crate) use checkpoint::*; pub(crate) use connection::nm_gen_conf; pub(crate) use show::*;
use crate::{Status, TypeMeta}; use serde::{Deserialize, Deserializer, Serialize}; use thiserror::Error; /// The `kind` field in [`TypeMeta`] pub const META_KIND: &str = "ConversionReview"; /// The `api_version` field in [`TypeMeta`] on the v1 version pub const META_API_VERSION_V1: &str = "apiextensions.k8s.io/v1"; #[derive(Debug, Error)] #[error("request missing in ConversionReview")] /// Returned when `ConversionReview` cannot be converted into `ConversionRequest` pub struct ConvertConversionReviewError; /// Struct that describes both request and response #[derive(Serialize, Deserialize)] pub struct ConversionReview { /// Contains the API version and type of the request #[serde(flatten)] pub types: TypeMeta, /// Contains conversion request #[serde(skip_serializing_if = "Option::is_none")] pub request: Option<ConversionRequest>, /// Contains conversion response #[serde(skip_serializing_if = "Option::is_none")] #[serde(default)] pub response: Option<ConversionResponse>, } /// Part of ConversionReview which is set on input (i.e. generated by apiserver) #[derive(Serialize, Deserialize)] pub struct ConversionRequest { /// [`TypeMeta`] of the [`ConversionReview`] this response was created from /// /// This field dopied from the corresponding [`ConversionReview`]. /// It is not part of the Kubernetes API, it's consumed only by `kube`. #[serde(skip)] pub types: Option<TypeMeta>, /// Random uid uniquely identifying this conversion call pub uid: String, /// The API group and version the objects should be converted to #[serde(rename = "desiredAPIVersion")] pub desired_api_version: String, /// The list of objects to convert /// /// Note that list may contain one or more objects, in one or more versions. // This field uses raw Value instead of Object/DynamicObject to simplify // further downcasting. pub objects: Vec<serde_json::Value>, } impl ConversionRequest { /// Extracts request from the [`ConversionReview`] pub fn from_review(review: ConversionReview) -> Result<Self, ConvertConversionReviewError> { ConversionRequest::try_from(review) } } impl TryFrom<ConversionReview> for ConversionRequest { type Error = ConvertConversionReviewError; fn try_from(review: ConversionReview) -> Result<Self, Self::Error> { match review.request { Some(mut req) => { req.types = Some(review.types); Ok(req) } None => Err(ConvertConversionReviewError), } } } /// Part of ConversionReview which is set on output (i.e. generated by conversion webhook) #[derive(Serialize, Deserialize)] pub struct ConversionResponse { /// [`TypeMeta`] of the [`ConversionReview`] this response was derived from /// /// This field is copied from the corresponding [`ConversionRequest`]. /// It is not part of the Kubernetes API, it's consumed only by `kube`. #[serde(skip)] pub types: Option<TypeMeta>, /// Copy of .request.uid pub uid: String, /// Outcome of the conversion operation /// /// Success: all objects were successfully converted /// Failure: at least one object could not be converted. /// It is recommended that conversion fails as rare as possible. pub result: Status, /// Converted objects /// /// This field should contain objects in the same order as in the request /// Should be empty if conversion failed. #[serde(rename = "convertedObjects")] #[serde(deserialize_with = "parse_converted_objects")] pub converted_objects: Vec<serde_json::Value>, } fn parse_converted_objects<'de, D>(de: D) -> Result<Vec<serde_json::Value>, D::Error> where D: Deserializer<'de>, { #[derive(Deserialize)] #[serde(untagged)] enum Helper { List(Vec<serde_json::Value>), Null(()), } let h: Helper = Helper::deserialize(de)?; let res = match h { Helper::List(l) => l, Helper::Null(()) => Vec::new(), }; Ok(res) } impl ConversionResponse { /// Creates a new response, matching provided request /// /// This response must be finalized with one of: /// - [`ConversionResponse::success`] when conversion succeeded /// - [`ConversionResponse::failure`] when conversion failed pub fn for_request(request: ConversionRequest) -> Self { ConversionResponse::from(request) } /// Creates successful conversion response /// /// `converted_objects` must specify objects in the exact same order as on input. pub fn success(mut self, converted_objects: Vec<serde_json::Value>) -> Self { self.result = Status::success(); self.converted_objects = converted_objects; self } /// Creates failed conversion response (discouraged) /// /// `request_uid` must be equal to the `.uid` field in the request. /// `message` and `reason` will be returned to the apiserver. pub fn failure(mut self, status: Status) -> Self { self.result = status; self } /// Creates failed conversion response, not matched with any request /// /// You should only call this function when request couldn't be parsed into [`ConversionRequest`]. /// Otherwise use `error`. pub fn invalid(status: Status) -> Self { ConversionResponse { types: None, uid: String::new(), result: status, converted_objects: Vec::new(), } } /// Converts response into a [`ConversionReview`] value, ready to be sent as a response pub fn into_review(self) -> ConversionReview { self.into() } } impl From<ConversionRequest> for ConversionResponse { fn from(request: ConversionRequest) -> Self { ConversionResponse { types: request.types, uid: request.uid, result: Status { status: None, code: 0, message: String::new(), reason: String::new(), details: None, }, converted_objects: Vec::new(), } } } impl From<ConversionResponse> for ConversionReview { fn from(mut response: ConversionResponse) -> Self { ConversionReview { types: response.types.take().unwrap_or_else(|| { // we don't know which uid, apiVersion and kind to use, let's just use something TypeMeta { api_version: META_API_VERSION_V1.to_string(), kind: META_KIND.to_string(), } }), request: None, response: Some(response), } } } #[cfg(test)] mod tests { use super::{ConversionRequest, ConversionResponse}; #[test] fn simple_request_parses() { // this file contains dump of real request generated by kubernetes v1.22 let data = include_str!("./test_data/simple.json"); // check that we can parse this review, and all chain of conversion worls let review = serde_json::from_str(data).unwrap(); let req = ConversionRequest::from_review(review).unwrap(); let res = ConversionResponse::for_request(req); let _ = res.into_review(); } }
extern crate cocoa; use self::cocoa::base::{id, nil, selector, NO}; use self::cocoa::foundation::{NSAutoreleasePool, NSPoint, NSRect, NSSize, NSString, NSUInteger}; use self::cocoa::appkit::{self, NSApp, NSApplication, NSMenu, NSMenuItem, NSRunningApplication, NSWindow}; use super::common; use super::super::pane::Pane; use super::super::project::Project; mod items; mod panes; mod util; use self::panes::PanesComponent; pub struct Application { pool: id, // NSAutoreleasePool app: id, // NSApplication } impl Application { pub fn new() -> Application { let pool = unsafe { NSAutoreleasePool::new(nil) }; let app: id; unsafe { app = NSApp(); app.setActivationPolicy_(appkit::NSApplicationActivationPolicyRegular); } let application = Application { pool: pool, app: app, }; unsafe { application.setup_menu(); } application } // Boilerplate menu setup. Cribbed from: // https://github.com/servo/cocoa-rs/blob/master/examples/hello_world.rs unsafe fn setup_menu(&self) { let main_menu = NSMenu::new(nil).autorelease(); self.app.setMainMenu_(main_menu); let app_menu_item = NSMenuItem::new(nil).autorelease(); main_menu.addItem_(app_menu_item); let app_menu = NSMenu::new(nil).autorelease(); app_menu_item.setSubmenu_(app_menu); let quit_title = NSString::alloc(nil).init_str("Quit"); let quit_action = selector("terminate:"); let quit_key = NSString::alloc(nil).init_str("q"); let quit_item = NSMenuItem::alloc(nil).initWithTitle_action_keyEquivalent_( quit_title, quit_action, quit_key ).autorelease(); app_menu.addItem_(quit_item); } /// Bring the application to the front (as we've just been launched) and /// start the event loop pub fn run(&self) { use self::cocoa::appkit::NSApplicationActivateIgnoringOtherApps; unsafe { let current_app = NSRunningApplication::currentApplication(nil); current_app.activateWithOptions_(NSApplicationActivateIgnoringOtherApps); self.app.run(); } } } pub struct Menu { title: String, items: Vec<MenuItem>, } impl common::Menu for Menu { type MenuItem = MenuItem; fn new(title: String, items: Option<Vec<MenuItem>>) -> Menu { Menu { title: title, items: items.unwrap_or(vec![]), } } } pub struct MenuItem { title: String, } pub struct Window { window: id, // NSWindow panes: PanesComponent, } impl Window { pub fn new() -> Window { use self::cocoa::appkit::*; // Window setup stuff cribbed from: // https://github.com/servo/cocoa-rs/blob/5f5eece/examples/hello_world.rs#L42-L55 let style_mask = (NSTitledWindowMask as NSUInteger) | (NSClosableWindowMask as NSUInteger) | (NSMiniaturizableWindowMask as NSUInteger) | (NSResizableWindowMask as NSUInteger); let window = unsafe { NSWindow::alloc(nil).initWithContentRect_styleMask_backing_defer_( NSRect::new(NSPoint::new(0., 0.), NSSize::new(200., 200.)), style_mask, NSBackingStoreBuffered, NO ).autorelease() }; let panes = PanesComponent::new(); unsafe { window.setContentView_(panes.view); window.cascadeTopLeftFromPoint_(NSPoint::new(20., 20.)); window.makeKeyAndOrderFront_(nil); }; Window { window: window, panes: panes, } } pub fn render(&mut self, project: Project, panes: Vec<Box<Pane>>) { if let Some(name) = project.directory.file_name().and_then(|s| s.to_str()) { self.set_title(name) } let content_view = self.panes.view; let mut bounds: NSRect = unsafe { self.window.frame() }; bounds.origin = NSPoint::new(0., 0.); let contect_rect: NSRect = unsafe { self.window.contentRectForFrameRect_(bounds) }; unsafe { msg_send![ content_view, setFrame:contect_rect ] }; self.panes.render(panes) } pub fn set_title<T: AsRef<str>>(&self, title: T) { unsafe { let title = NSString::alloc(nil).init_str(title.as_ref()); self.window.setTitle_(title); } } }
#![allow(proc_macro_derive_resolution_fallback)] use diesel; use diesel::prelude::*; use crate::schema::posts; use crate::posts::Post; pub fn all(connection: &PgConnection) -> QueryResult<Vec<Post>> { posts::table.load::<Post>(&*connection) } pub fn get(id: i32, connection: &PgConnection) -> QueryResult<Post> { posts::table.find(id).get_result::<Post>(connection) } pub fn insert(post: Post, connection: &PgConnection) -> QueryResult<Post> { diesel::insert_into(posts::table) .values(&InsertablePost::from_post(post)) .get_result(connection) } pub fn update(id: i32, post: Post, connection: &PgConnection) -> QueryResult<Post> { diesel::update(posts::table.find(id)) .set(&post) .get_result(connection) } pub fn delete(id: i32, connection: &PgConnection) -> QueryResult<usize> { diesel::delete(posts::table.find(id)) .execute(connection) } #[derive(Insertable)] #[table_name = "posts"] struct InsertablePost { title: String, body: String, published: bool } impl InsertablePost { fn from_post(post: Post) -> InsertablePost { InsertablePost { title: post.title, body: post.body, published: post.published, } } }
mod school_member{ pub mod teacher{ pub fn get_salary(){ println!("Salary"); } } } fn main() { school_member::teacher::get_salary(); }
use std::collections::BTreeMap; use serde::{Deserialize, Serialize}; use crate::models; use super::common::DBError; use super::engine; const GAME_DB_FILE_NAME: &'static str = "db/game.csv"; const MAP_DB_FILE_NAME: &'static str = "db/map.csv"; const TILES_DB_FILE_NAME: &'static str = "db/tiles.csv"; const CHARACTER_DB_FILE_NAME: &'static str = "db/characters.csv"; const ALL_DB_FILE_NAMES: &'static [&'static str] = &[ GAME_DB_FILE_NAME, MAP_DB_FILE_NAME, TILES_DB_FILE_NAME, CHARACTER_DB_FILE_NAME, ]; #[derive(Serialize, Deserialize, Clone)] struct DBGame { id: u32, map_id: u32, cursor_x: u32, cursor_y: u32, } #[derive(Serialize, Deserialize, Clone)] struct DBMap { id: u32, default_terrain: models::Terrain, hint_max_x: u32, hint_max_y: u32, } #[derive(Serialize, Deserialize, Clone)] struct DBTileLine { id: u32, map_id: u32, terrain: models::Terrain, x: u32, y: u32, } #[derive(Serialize, Deserialize, Clone)] struct DBCharacter { id: u32, game_id: u32, character: models::Character, x: u32, y: u32, } pub struct DB { engine: engine::Engine, } impl DB { pub fn new() -> Self { DB { engine: engine::Engine::new(ALL_DB_FILE_NAMES), } } pub fn get_games(&self) -> Result<Vec<models::Game>, DBError> { Ok(self .read_db_games()? .into_iter() .map(|x| { let map = self.get_db_map(x.map_id)?; let tiles = self.read_db_tile_lines_for_map_id(map.id)?; let characters = self.read_db_characters_for_game_id(x.id)?; Ok(game_model_from_db(x, map, tiles, characters)) }) .collect::<Result<Vec<models::Game>, DBError>>()?) } pub fn get_game(&self, game_id: u32) -> Result<models::Game, DBError> { get_single_result(self.get_games()?, |game| game.id == game_id, "games") } fn read_db_games(&self) -> Result<Vec<DBGame>, DBError> { self.engine.read_db_records(GAME_DB_FILE_NAME) } pub fn add_game(&self) -> Result<(), DBError> { let map = self.add_db_map()?; let mut records = self.read_db_games()?; let max_id = records .iter() .fold(0, |acc, game| std::cmp::max(acc, game.id)); records.push(DBGame { id: max_id + 1, map_id: map.id, cursor_x: 0, cursor_y: 0, }); self.engine .write_replace_records(GAME_DB_FILE_NAME, records) } pub fn get_maps(&self) -> Result<Vec<models::Map>, DBError> { Ok(self .read_db_maps()? .into_iter() .map(|map| { let tiles = self.read_db_tile_lines_for_map_id(map.id)?; Ok(map_model_from_db(map, tiles)) }) .collect::<Result<Vec<models::Map>, DBError>>()?) } pub fn get_map(&self, map_id: u32) -> Result<models::Map, DBError> { get_single_result(self.get_maps()?, |record| record.id == map_id, "maps") } fn add_db_map(&self) -> Result<DBMap, DBError> { let mut records = self.read_db_maps()?; let max_id = records .iter() .fold(0, |acc, record| std::cmp::max(acc, record.id)); let new_record = DBMap { id: max_id + 1, default_terrain: models::Terrain::Grass, hint_max_x: 15, hint_max_y: 12, }; records.push(new_record.clone()); self.engine .write_replace_records(MAP_DB_FILE_NAME, records)?; Ok(new_record) } fn get_db_map(&self, map_id: u32) -> Result<DBMap, DBError> { get_single_result(self.read_db_maps()?, |record| record.id == map_id, "maps") } fn read_db_maps(&self) -> Result<Vec<DBMap>, DBError> { self.engine.read_db_records(MAP_DB_FILE_NAME) } pub fn update_game_cursor(&self, id: u32, cursor: (u32, u32)) -> Result<(), DBError> { let records = self .read_db_games()? .into_iter() .map(|mut record| { if record.id == id { record.cursor_x = cursor.0; record.cursor_y = cursor.1; record } else { record } }) .collect(); self.engine .write_replace_records(GAME_DB_FILE_NAME, records) } fn read_db_tile_lines_for_map_id(&self, map_id: u32) -> Result<Vec<DBTileLine>, DBError> { Ok(self .read_db_tile_lines()? .into_iter() .filter(|record| record.map_id == map_id) .collect()) } fn read_db_tile_lines(&self) -> Result<Vec<DBTileLine>, DBError> { self.engine.read_db_records(TILES_DB_FILE_NAME) } fn read_db_characters_for_game_id(&self, game_id: u32) -> Result<Vec<DBCharacter>, DBError> { Ok(self .read_db_characters()? .into_iter() .filter(|record| record.game_id == game_id) .collect()) } fn read_db_characters(&self) -> Result<Vec<DBCharacter>, DBError> { self.engine.read_db_records(CHARACTER_DB_FILE_NAME) } pub fn update_game_terrain( &self, game_id: u32, terrain: models::Terrain, ) -> Result<(), DBError> { let mut records = self.read_db_tile_lines()?; let max_id = records .iter() .fold(0, |acc, record| std::cmp::max(acc, record.id)); let game = self.get_game(game_id)?; let new_record = DBTileLine { id: max_id + 1, map_id: game.map.id, terrain: terrain, x: game.current_selection.0, y: game.current_selection.1, }; records.push(new_record.clone()); records = records .into_iter() .map(|record| { ( (record.map_id, record.x, record.y), (record.id, record.terrain), ) }) .collect::<BTreeMap<_, _>>() .into_iter() .map(|(key, value)| DBTileLine { id: value.0, map_id: key.0, terrain: value.1, x: key.1, y: key.2, }) .collect::<Vec<DBTileLine>>(); self.engine .write_replace_records(TILES_DB_FILE_NAME, records)?; Ok(()) } pub fn update_game_character( &self, game_id: u32, character: models::Character, ) -> Result<(), DBError> { let mut records = self.read_db_characters()?; let max_id = records .iter() .fold(0, |acc, record| std::cmp::max(acc, record.id)); let game = self.get_game(game_id)?; let new_record = DBCharacter { id: max_id + 1, game_id: game.map.id, character: character, x: game.current_selection.0, y: game.current_selection.1, }; records.push(new_record.clone()); records = records .into_iter() .map(|record| { ( (record.game_id, record.x, record.y), (record.id, record.character), ) }) .collect::<BTreeMap<_, _>>() .into_iter() .map(|(key, value)| DBCharacter { id: value.0, game_id: key.0, character: value.1, x: key.1, y: key.2, }) .collect::<Vec<DBCharacter>>(); self.engine .write_replace_records(CHARACTER_DB_FILE_NAME, records)?; Ok(()) } pub fn unset_game_terrain(&self, game_id: u32) -> Result<(), DBError> { let mut records = self.read_db_tile_lines()?; let game = self.get_game(game_id)?; records = records .into_iter() .filter(|record| { !((record.map_id == game.map.id) && (record.x == game.current_selection.0) && (record.y == game.current_selection.1)) }) .collect(); self.engine .write_replace_records(TILES_DB_FILE_NAME, records)?; Ok(()) } pub fn unset_game_character(&self, game_id: u32) -> Result<(), DBError> { let mut records = self.read_db_characters()?; let game = self.get_game(game_id)?; records = records .into_iter() .filter(|record| { !((record.game_id == game.id) && (record.x == game.current_selection.0) && (record.y == game.current_selection.1)) }) .collect(); self.engine .write_replace_records(CHARACTER_DB_FILE_NAME, records)?; Ok(()) } } fn game_model_from_db( g: DBGame, m: DBMap, tiles: Vec<DBTileLine>, characters: Vec<DBCharacter>, ) -> models::Game { models::Game { id: g.id, map: map_model_from_db(m, tiles), characters: characters .into_iter() .map(|character| ((character.x, character.y), character.character)) .collect::<BTreeMap<_, _>>(), current_selection: (g.cursor_x, g.cursor_y), } } fn map_model_from_db(m: DBMap, tiles: Vec<DBTileLine>) -> models::Map { models::Map { id: m.id, default_terrain: m.default_terrain, specified_terrain: tiles .into_iter() .map(|tile| ((tile.x, tile.y), tile.terrain)) .collect::<BTreeMap<_, _>>(), hint_max_x: m.hint_max_x, hint_max_y: m.hint_max_y, } } pub fn get_single_result<T, F: Fn(&T) -> bool>( results: Vec<T>, cmp: F, name: &'static str, ) -> Result<T, DBError> { for result in results.into_iter() { if cmp(&result) { return Ok(result); } } return Err(DBError::FindingRecord(name.into())); }
use wasm_bindgen::{prelude::*, JsCast}; use wasm_bindgen_futures::JsFuture; use web_sys::{console, Document, HtmlElement, MediaDeviceInfo, MediaStream, MediaStreamConstraints, Navigator, Window}; pub fn set_panic_hook() { // When the `console_error_panic_hook` feature is enabled, we can call the // `set_panic_hook` function at least once during initialization, and then // we will get better error messages if our code ever panics. // // For more details see // https://github.com/rustwasm/console_error_panic_hook#readme #[cfg(feature = "console_error_panic_hook")] console_error_panic_hook::set_once(); } #[wasm_bindgen] pub fn get_window() -> Window { let window = web_sys::window().expect("Did not have a global window object"); window } #[wasm_bindgen] pub fn get_document() -> Document { let window = get_window(); window .document() .expect("window should have a document object") } #[wasm_bindgen] pub fn get_body() -> HtmlElement { let doc = get_document(); let body = doc.body().expect("document must have a body"); body } /// Returns a Navigator object #[wasm_bindgen] pub fn get_navigator() -> Navigator { let window = get_window(); let nav = window.navigator(); nav } /// Retrieves MediaStream from the navigator /// /// Since get_user_media() returns a Result<Promise, JsValue>, we extract it from the Result and /// then use JsFuture::from() to turn it into a rust Future. To pull out the data from the Future /// we use the await to wait until it has resolved. #[wasm_bindgen] pub async fn get_media_stream() -> Result<MediaStream, JsValue> { let navigator = get_navigator(); let media_devs = navigator.media_devices()?; let mut constraints = MediaStreamConstraints::new(); constraints.video(&js_sys::Boolean::from(true)); match media_devs.get_user_media_with_constraints(&constraints) { Ok(dev) => { let fut = JsFuture::from(dev).await?; let media_stream = MediaStream::from(fut); Ok(media_stream) } Err(e) => Err(e), } } #[wasm_bindgen] pub async fn list_media_devices() -> Result<js_sys::Array, JsValue> { let navigator = get_navigator(); let media_devs = navigator.media_devices()?; // Unfortunately this is all a little bit of magic. enumerate_devices() returns a Promise of an // array of MediaDevice. However, all rust knows is that it is a Promise, but not for example // that is is a Promise<Array<MediaDeviceInfo>> let devices = js_sys::Array::new(); match media_devs.enumerate_devices() { Ok(devs) => { // devs is a Promise<Array<MediaDeviceInfo>>, so we use rust's async await to wait on // the js Promise let media_device_info_arr = JsFuture::from(devs).await?; let iterator = js_sys::try_iter(&media_device_info_arr)? .ok_or_else(|| { console::log_1(&"Could not convert to iterator".into()); }) .expect("Unable to convert to array"); for device in iterator { let device = device?; let device_info = device.dyn_into::<MediaDeviceInfo>()?; let stringified = js_sys::JSON::stringify(&device_info.to_json()).unwrap_or("".into()); console::log_1(&stringified); devices.push(&device_info); } Ok(devices) } Err(e) => Err(e), } }
pub fn lib_function() { println!("featureA"); }
use std::rc::{Rc, Weak}; use std::cell::{RefCell}; use std::mem; use std::option::Option::{None, Some}; use std::fmt::{Display, Formatter, Debug}; use std::borrow::Borrow; #[derive(Debug,Default)] struct Node<E>{ value:E, pre:Option<Weak<RefCell<Node<E>>>>, next:Option<Rc<RefCell<Node<E>>>> } #[derive(Debug)] pub struct LinkedList<E>{ size:usize, start:Option<Rc<RefCell<Node<E>>>>, end:Option<Rc<RefCell<Node<E>>>> } impl <E> LinkedList<E> where E:Default{ pub fn new() -> LinkedList<E>{ LinkedList{ size:0, start:None, end:None } } pub fn add_last(&mut self,value:E){ self.size+=1; let new_node = Node{ pre:None, next:None, value }; let old_end = mem::replace(&mut self.end, Some(Rc::new(RefCell::new(new_node)))); match old_end { Some(node) => { node.borrow_mut().next = Some(self.end.as_ref().unwrap().clone()); self.end.as_ref().unwrap().borrow_mut().pre = Some(Rc::downgrade(&node.clone())); }, None => { self.start = Some(self.end.as_ref().unwrap().clone()); } } } pub fn add_first(&mut self,value:E){ self.size+=1; let new_node = Node{ pre:None, next:None, value }; let old_start = mem::replace(&mut self.start, Some(Rc::new(RefCell::new(new_node)))); match old_start { Some(node) => { self.start.as_ref().unwrap().borrow_mut().next = Some(node.clone()); node.borrow_mut().pre = Some(Rc::downgrade(&self.start.as_ref().unwrap().clone())); }, None => { self.end = Some(self.start.as_ref().unwrap().clone()); } } } pub fn get(&self, index:usize) -> Option<&E> { if self.size == 0 || index >= self.size{ return None; } let mut count = 0; for value in self.iter() { if count == index { return Some(value); } count += 1; } None } pub fn remove(&mut self,index:usize) -> Option<E>{ if self.size == 0 || index >= self.size{ return None; } self.size -= 1; let mut temp = &self.start; let mut count = 0; while let Some(node) = temp { if count == index { break; } unsafe { temp = &(*node.as_ptr()).next; } count += 1; } let mut node = temp.as_ref().unwrap().take(); let pre = mem::replace(&mut node.pre,Option::None); let next = mem::replace(&mut node.next,Option::None); if let Some (node) = pre { node.upgrade().unwrap().borrow_mut().next = next; if let Some(next_node) = &node.upgrade().unwrap().borrow_mut().next { next_node.borrow_mut().pre = Some(node); }else { self.end = Some(node.upgrade().unwrap().clone()); } }else { if let Some(next_node) = next { unsafe { (*next_node.as_ptr()).pre = None; } self.start = Some(next_node.clone()); }else { self.start = None; self.end = None; } } return Some(node.value); } pub fn iter(&self) -> Iter<E>{ Iter{next:self.start.as_ref()} } } impl <T> Display for LinkedList<T> where T:Display+Default { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { let mut result = Vec::with_capacity(self.size); self.iter().for_each(|value|{ result.push(format!("{}",value)); }); write!(f, "{:?}", result) } } pub struct Iter<'a,T>{ next:Option<&'a Rc<RefCell<Node<T>>>> } impl <'a,T> Iterator for Iter<'a,T>{ type Item = &'a T; fn next(&mut self) -> Option<Self::Item> { self.next.map(|node|{ unsafe { self.next = (*(node.as_ptr())).next.as_ref(); (*(node.as_ptr())).value.borrow() } }) } } #[allow(unused_must_use)] impl <T> Drop for LinkedList<T> { fn drop(&mut self) { let mut temp = mem::replace(&mut self.start,None); while let Some(node) = temp{ temp = mem::replace(&mut node.as_ref().borrow_mut().next,None); } mem::replace(&mut self.end,None); } } #[cfg(test)] mod test{ use super::*; #[test] fn test_add() { let mut linked_list = LinkedList::new(); linked_list.add_last(1); linked_list.add_last(2); linked_list.add_last(3); linked_list.add_last(4); linked_list.add_last(5); linked_list.add_first(6); linked_list.add_first(7); linked_list.add_first(8); linked_list.add_first(9); linked_list.add_first(10); println!("{}",linked_list); } #[test] fn test_get() { let mut linked_list = LinkedList::new(); linked_list.add_last(1); linked_list.add_last(2); linked_list.add_last(3); linked_list.add_last(4); linked_list.add_last(5); linked_list.add_first(6); linked_list.add_first(7); linked_list.add_first(8); linked_list.add_first(9); linked_list.add_first(10); assert_eq!(9,*linked_list.get(1).unwrap()) } #[test] fn test_remove() { let mut linked_list = LinkedList::new(); linked_list.add_last(1); linked_list.add_last(2); linked_list.add_last(3); linked_list.add_last(4); linked_list.add_last(5); linked_list.add_first(6); linked_list.add_first(7); linked_list.add_first(8); linked_list.add_first(9); linked_list.add_first(10); assert_eq!(2,linked_list.remove(1).unwrap()) } #[test] fn test_drop() { let mut linked_list = LinkedList::new(); linked_list.add_last(1); linked_list.add_last(2); linked_list.add_last(3); linked_list.add_last(4); linked_list.add_last(5); linked_list.add_first(6); linked_list.add_first(7); linked_list.add_first(8); linked_list.add_first(9); linked_list.add_first(10); mem::drop(linked_list); } }
use crate::float::Float; use crate::matrix::{FloatMatrix, FromVectors, IntoVectors, Matrix, M4}; use crate::numeric::Numeric; use crate::vector::{Vector, V4}; use std::ops::{Add, Deref, DerefMut, Div, Mul, Sub}; impl<T> Deref for M4<T> where T: Numeric, { type Target = [[T; 4]; 4]; fn deref(&self) -> &Self::Target { &self.0 } } impl<T> DerefMut for M4<T> where T: Numeric, { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl<T> IntoVectors<(V4<T>, V4<T>, V4<T>, V4<T>)> for M4<T> where T: Numeric, { fn into_cols(&self) -> (V4<T>, V4<T>, V4<T>, V4<T>) { ( V4([self[0][0], self[1][0], self[2][0], self[3][0]]), V4([self[0][1], self[1][1], self[2][1], self[3][1]]), V4([self[0][2], self[1][2], self[2][2], self[3][2]]), V4([self[0][3], self[1][3], self[2][3], self[3][3]]), ) } fn into_rows(&self) -> (V4<T>, V4<T>, V4<T>, V4<T>) { (V4(self[0]), V4(self[1]), V4(self[2]), V4(self[3])) } } impl<T> FromVectors<(V4<T>, V4<T>, V4<T>, V4<T>)> for M4<T> where T: Numeric, { fn from_cols(v: (V4<T>, V4<T>, V4<T>, V4<T>)) -> Self { let (r1, r2, r3, r4) = v; M4([ [r1[0], r2[0], r3[0], r4[0]], [r1[1], r2[1], r3[1], r4[1]], [r1[2], r2[2], r3[2], r4[2]], [r1[3], r2[3], r3[3], r4[3]], ]) } fn from_rows(v: (V4<T>, V4<T>, V4<T>, V4<T>)) -> Self { let (V4(c1), V4(c2), V4(c3), V4(c4)) = v; M4([c1, c2, c3, c4]) } } impl<T> Matrix for M4<T> where T: Numeric, { fn transpose(&mut self) { unsafe { (&mut self[0][1] as *mut T).swap(&mut self[1][0]); (&mut self[0][2] as *mut T).swap(&mut self[2][0]); (&mut self[0][3] as *mut T).swap(&mut self[3][0]); (&mut self[1][2] as *mut T).swap(&mut self[2][1]); (&mut self[1][3] as *mut T).swap(&mut self[3][1]); (&mut self[2][3] as *mut T).swap(&mut self[3][2]); } } } impl<F> FloatMatrix<F> for M4<F> where F: Float, { fn determinant(&self) -> F { self[0][3] * self[1][2] * self[2][1] * self[3][0] - self[0][2] * self[1][3] * self[2][1] * self[3][0] - self[0][3] * self[1][1] * self[2][2] * self[3][0] + self[0][1] * self[1][3] * self[2][2] * self[3][0] + self[0][2] * self[1][1] * self[2][3] * self[3][0] - self[0][1] * self[1][2] * self[2][3] * self[3][0] - self[0][3] * self[1][2] * self[2][0] * self[3][1] + self[0][2] * self[1][3] * self[2][0] * self[3][1] + self[0][3] * self[1][0] * self[2][2] * self[3][1] - self[0][0] * self[1][3] * self[2][2] * self[3][1] - self[0][2] * self[1][0] * self[2][3] * self[3][1] + self[0][0] * self[1][2] * self[2][3] * self[3][1] + self[0][3] * self[1][1] * self[2][0] * self[3][2] - self[0][1] * self[1][3] * self[2][0] * self[3][2] - self[0][3] * self[1][0] * self[2][1] * self[3][2] + self[0][0] * self[1][3] * self[2][1] * self[3][2] + self[0][1] * self[1][0] * self[2][3] * self[3][2] - self[0][0] * self[1][1] * self[2][3] * self[3][2] - self[0][2] * self[1][1] * self[2][0] * self[3][3] + self[0][1] * self[1][2] * self[2][0] * self[3][3] + self[0][2] * self[1][0] * self[2][1] * self[3][3] - self[0][0] * self[1][2] * self[2][1] * self[3][3] - self[0][1] * self[1][0] * self[2][2] * self[3][3] + self[0][0] * self[1][1] * self[2][2] * self[3][3] } fn cofactor(&self) -> Self { M4([ [ self[1][2] * self[2][3] * self[3][1] - self[1][3] * self[2][2] * self[3][1] + self[1][3] * self[2][1] * self[3][2] - self[1][1] * self[2][3] * self[3][2] - self[1][2] * self[2][1] * self[3][3] + self[1][1] * self[2][2] * self[3][3], self[0][3] * self[2][2] * self[3][1] - self[0][2] * self[2][3] * self[3][1] - self[0][3] * self[2][1] * self[3][2] + self[0][1] * self[2][3] * self[3][2] + self[0][2] * self[2][1] * self[3][3] - self[0][1] * self[2][2] * self[3][3], self[0][2] * self[1][3] * self[3][1] - self[0][3] * self[1][2] * self[3][1] + self[0][3] * self[1][1] * self[3][2] - self[0][1] * self[1][3] * self[3][2] - self[0][2] * self[1][1] * self[3][3] + self[0][1] * self[1][2] * self[3][3], self[0][3] * self[1][2] * self[2][1] - self[0][2] * self[1][3] * self[2][1] - self[0][3] * self[1][1] * self[2][2] + self[0][1] * self[1][3] * self[2][2] + self[0][2] * self[1][1] * self[2][3] - self[0][1] * self[1][2] * self[2][3], ], [ self[1][3] * self[2][2] * self[3][0] - self[1][2] * self[2][3] * self[3][0] - self[1][3] * self[2][0] * self[3][2] + self[1][0] * self[2][3] * self[3][2] + self[1][2] * self[2][0] * self[3][3] - self[1][0] * self[2][2] * self[3][3], self[0][2] * self[2][3] * self[3][0] - self[0][3] * self[2][2] * self[3][0] + self[0][3] * self[2][0] * self[3][2] - self[0][0] * self[2][3] * self[3][2] - self[0][2] * self[2][0] * self[3][3] + self[0][0] * self[2][2] * self[3][3], self[0][3] * self[1][2] * self[3][0] - self[0][2] * self[1][3] * self[3][0] - self[0][3] * self[1][0] * self[3][2] + self[0][0] * self[1][3] * self[3][2] + self[0][2] * self[1][0] * self[3][3] - self[0][0] * self[1][2] * self[3][3], self[0][2] * self[1][3] * self[2][0] - self[0][3] * self[1][2] * self[2][0] + self[0][3] * self[1][0] * self[2][2] - self[0][0] * self[1][3] * self[2][2] - self[0][2] * self[1][0] * self[2][3] + self[0][0] * self[1][2] * self[2][3], ], [ self[1][1] * self[2][3] * self[3][0] - self[1][3] * self[2][1] * self[3][0] + self[1][3] * self[2][0] * self[3][1] - self[1][0] * self[2][3] * self[3][1] - self[1][1] * self[2][0] * self[3][3] + self[1][0] * self[2][1] * self[3][3], self[0][3] * self[2][1] * self[3][0] - self[0][1] * self[2][3] * self[3][0] - self[0][3] * self[2][0] * self[3][1] + self[0][0] * self[2][3] * self[3][1] + self[0][1] * self[2][0] * self[3][3] - self[0][0] * self[2][1] * self[3][3], self[0][1] * self[1][3] * self[3][0] - self[0][3] * self[1][1] * self[3][0] + self[0][3] * self[1][0] * self[3][1] - self[0][0] * self[1][3] * self[3][1] - self[0][1] * self[1][0] * self[3][3] + self[0][0] * self[1][1] * self[3][3], self[0][3] * self[1][1] * self[2][0] - self[0][1] * self[1][3] * self[2][0] - self[0][3] * self[1][0] * self[2][1] + self[0][0] * self[1][3] * self[2][1] + self[0][1] * self[1][0] * self[2][3] - self[0][0] * self[1][1] * self[2][3], ], [ self[1][2] * self[2][1] * self[3][0] - self[1][1] * self[2][2] * self[3][0] - self[1][2] * self[2][0] * self[3][1] + self[1][0] * self[2][2] * self[3][1] + self[1][1] * self[2][0] * self[3][2] - self[1][0] * self[2][1] * self[3][2], self[0][1] * self[2][2] * self[3][0] - self[0][2] * self[2][1] * self[3][0] + self[0][2] * self[2][0] * self[3][1] - self[0][0] * self[2][2] * self[3][1] - self[0][1] * self[2][0] * self[3][2] + self[0][0] * self[2][1] * self[3][2], self[0][2] * self[1][1] * self[3][0] - self[0][1] * self[1][2] * self[3][0] - self[0][2] * self[1][0] * self[3][1] + self[0][0] * self[1][2] * self[3][1] + self[0][1] * self[1][0] * self[3][2] - self[0][0] * self[1][1] * self[3][2], self[0][1] * self[1][2] * self[2][0] - self[0][2] * self[1][1] * self[2][0] + self[0][2] * self[1][0] * self[2][1] - self[0][0] * self[1][2] * self[2][1] - self[0][1] * self[1][0] * self[2][2] + self[0][0] * self[1][1] * self[2][2], ], ]) } } impl<T> Add for M4<T> where T: Numeric, { type Output = M4<T>; fn add(self, rhs: Self) -> Self::Output { M4([ [ self[0][0] + rhs[0][0], self[0][1] + rhs[0][1], self[0][2] + rhs[0][2], self[0][3] + rhs[0][3], ], [ self[1][0] + rhs[1][0], self[1][1] + rhs[1][1], self[1][2] + rhs[1][2], self[1][3] + rhs[1][3], ], [ self[2][0] + rhs[2][0], self[2][1] + rhs[2][1], self[2][2] + rhs[2][2], self[2][3] + rhs[2][3], ], [ self[3][0] + rhs[3][0], self[3][1] + rhs[3][1], self[3][2] + rhs[3][2], self[3][3] + rhs[3][3], ], ]) } } impl<T> Sub for M4<T> where T: Numeric, { type Output = M4<T>; fn sub(self, rhs: Self) -> Self::Output { M4([ [ self[0][0] - rhs[0][0], self[0][1] - rhs[0][1], self[0][2] - rhs[0][2], self[0][3] - rhs[0][3], ], [ self[1][0] - rhs[1][0], self[1][1] - rhs[1][1], self[1][2] - rhs[1][2], self[1][3] - rhs[1][3], ], [ self[2][0] - rhs[2][0], self[2][1] - rhs[2][1], self[2][2] - rhs[2][2], self[2][3] - rhs[2][3], ], [ self[3][0] - rhs[3][0], self[3][1] - rhs[3][1], self[3][2] - rhs[3][2], self[3][3] - rhs[3][3], ], ]) } } impl<T> Mul for M4<T> where T: Numeric, { type Output = M4<T>; fn mul(self, rhs: Self) -> Self::Output { let (c1, c2, c3, c4) = self.into_rows(); let (r1, r2, r3, r4) = rhs.into_cols(); M4([ [c1.dot(r1), c1.dot(r2), c1.dot(r3), c1.dot(r4)], [c2.dot(r1), c2.dot(r2), c2.dot(r3), c2.dot(r4)], [c3.dot(r1), c3.dot(r2), c3.dot(r3), c3.dot(r4)], [c4.dot(r1), c4.dot(r2), c4.dot(r3), c4.dot(r4)], ]) } } impl<T> Mul<V4<T>> for M4<T> where T: Numeric, { type Output = V4<T>; fn mul(self, rhs: V4<T>) -> Self::Output { let (c1, c2, c3, c4) = self.into_rows(); V4([c1.dot(rhs), c2.dot(rhs), c3.dot(rhs), c4.dot(rhs)]) } } impl<T> Div<T> for M4<T> where T: Numeric, { type Output = M4<T>; fn div(self, rhs: T) -> Self::Output { M4([ [ self[0][0] / rhs, self[0][1] / rhs, self[0][2] / rhs, self[0][3] / rhs, ], [ self[1][0] / rhs, self[1][1] / rhs, self[1][2] / rhs, self[1][3] / rhs, ], [ self[2][0] / rhs, self[2][1] / rhs, self[2][2] / rhs, self[2][3] / rhs, ], [ self[3][0] / rhs, self[3][1] / rhs, self[3][2] / rhs, self[3][3] / rhs, ], ]) } }
pub const DEBUG: bool = true; pub fn get_path_separator() -> String { if DEBUG { ";".to_string() } else { ":".to_string() } } pub fn get_project_root() -> String { "".to_string() } pub fn get_public_path() -> String { "".to_string() }
use rand::distributions::{Distribution, Uniform}; use crate::scorecard::{Scorecard, new_scorecard, get_score_by_index, YAHTZEE_BONUS, get_highest_scores, YAHTZEE, set_score_by_index}; use std::collections::HashSet; #[derive(Debug)] pub struct YahtzeeGame{ pub roll: Vec<u8>, pub scorecard: Scorecard, pub scored_categories: HashSet<u8>, pub roll_num: u8 } pub fn new_game() -> YahtzeeGame{ let roll: Vec<u8> = vec![0, 0, 0, 0, 0]; let game = YahtzeeGame { roll, scorecard: new_scorecard(), scored_categories: HashSet::new(), roll_num: 0 }; game } pub fn roll(game: &mut YahtzeeGame){ if game.roll_num != 0{ panic!("Roll already in progress. Roll # {}. Score current roll before starting a new turn", game.roll_num); } game.roll_num = 1; let mut rng = rand::thread_rng(); let die_roller = Uniform::from(1..7); for i in 0..game.roll.len() { game.roll[i] = die_roller.sample(&mut rng); } } pub fn re_roll(game: &mut YahtzeeGame, hold_die: [bool; 5]){ game.roll_num += 1; if game.roll_num > 3 { panic!("Cannot re-roll. Attempting roll # {}", game.roll_num); } let mut rng = rand::thread_rng(); let die_roller = Uniform::from(1..7); for i in 0..hold_die.len() { if !hold_die[i]{ game.roll[i] = die_roller.sample(&mut rng); } } } pub fn record_score(game: &mut YahtzeeGame, roll_score: &Scorecard, score_index:u8) { if score_index == YAHTZEE_BONUS{ game.roll_num = 0; game.scorecard.yahtzee_bonus_count += 1; record_highest_non_yahtzee_score(game, roll_score); return; } if !game.scored_categories.insert(score_index) { panic!("Already scored for specified score_index {}", score_index); } game.roll_num = 0; let score = get_score_by_index(roll_score, score_index); set_score_by_index(&mut game.scorecard, score, score_index); } pub fn record_highest_non_yahtzee_score(game: &mut YahtzeeGame, roll_score: &Scorecard){ let sorted_scores = get_highest_scores(roll_score); for i in 0..sorted_scores.len(){ if sorted_scores[i].0 == YAHTZEE{ continue; } if !game.scored_categories.contains(&sorted_scores[i].0){ record_score(game, roll_score, sorted_scores[i].0); return; } } panic!("Unable to score highest"); } pub fn is_game_over(game: &mut YahtzeeGame) -> bool{ game.scored_categories.len() == 13 }
pub mod traits;
//! Tests auto-converted from "sass-spec/spec/non_conformant/scss/media" #[allow(unused)] use super::rsass; // From "sass-spec/spec/non_conformant/scss/media/interpolated.hrx" #[test] fn interpolated() { assert_eq!( rsass( "// You can interpolate into a media type.\ \n@media bar#{12} {x {y: z}}\ \n\ \n// Media queries should be reparsed after interpolation is resolved.\ \n@media #{\"only screen\"} and\ \n #{\"(min-width: 700px)\"} and\ \n #{\"(max-width: \"+\"1920px)\"} {\ \n x {y: z}\ \n}\ \n\ \n// Queries don\'t have to fully parse before interpolation is resolved.\ \n@media scr#{\"een, pri\"}nt a#{\"nd (max-width: 300px)\"} {x {y: z}}\ \n\ \n\ \n\ \n" ) .unwrap(), "@media bar12 {\ \n x {\ \n y: z;\ \n }\ \n}\ \n@media only screen and (min-width: 700px) and (max-width: 1920px) {\ \n x {\ \n y: z;\ \n }\ \n}\ \n@media screen, print and (max-width: 300px) {\ \n x {\ \n y: z;\ \n }\ \n}\ \n" ); } mod nesting; // From "sass-spec/spec/non_conformant/scss/media/script_features.hrx" #[test] fn script_features() { assert_eq!( rsass( "$foo: 3;\ \n$bar: 4;\ \n// Media features are special-cased to allow raw script without interpolation.\ \n@media only screen and (max-width: $foo) and (min-width: $bar) {x {y: z}}\ \n\ \n// Not just variables, but full script\ \n$vals: 1 2 3;\ \n@media screen and (max-width: 1 + 2) and (min-width: 5 + 6 + nth($vals, 2)) {x {y: z}}\ \n\ \n" ) .unwrap(), "@media only screen and (max-width: 3) and (min-width: 4) {\ \n x {\ \n y: z;\ \n }\ \n}\ \n@media screen and (max-width: 3) and (min-width: 13) {\ \n x {\ \n y: z;\ \n }\ \n}\ \n" ); }
//! Gets temperature data via sysinfo. use super::{is_temp_filtered, temp_vec_sort, TempHarvest, TemperatureType}; use crate::app::Filter; pub async fn get_temperature_data( sys: &sysinfo::System, temp_type: &TemperatureType, actually_get: bool, filter: &Option<Filter>, ) -> crate::utils::error::Result<Option<Vec<TempHarvest>>> { use sysinfo::{ComponentExt, SystemExt}; if !actually_get { return Ok(None); } fn convert_celsius_to_kelvin(celsius: f32) -> f32 { celsius + 273.15 } fn convert_celsius_to_fahrenheit(celsius: f32) -> f32 { (celsius * (9.0 / 5.0)) + 32.0 } let mut temperature_vec: Vec<TempHarvest> = Vec::new(); let sensor_data = sys.get_components(); for component in sensor_data { let name = component.get_label().to_string(); if is_temp_filtered(filter, &name) { temperature_vec.push(TempHarvest { name, temperature: match temp_type { TemperatureType::Celsius => component.get_temperature(), TemperatureType::Kelvin => { convert_celsius_to_kelvin(component.get_temperature()) } TemperatureType::Fahrenheit => { convert_celsius_to_fahrenheit(component.get_temperature()) } }, }); } } temp_vec_sort(&mut temperature_vec); Ok(Some(temperature_vec)) }
mod blog; #[cfg(test)] mod tests { use crate::blog::Post; fn sample_post(text: &str) -> Post { let mut post = Post::new(); post.add_text(text); post } #[test] fn test_flow() { let mut post = sample_post("salad"); assert_eq!("", post.content()); post.request_review(); assert_eq!("", post.content()); post.approve(); assert_eq!("", post.content()); post.approve(); assert_eq!("salad", post.content()); } #[test] fn test_reject() { let mut post = sample_post("salad"); post.request_review(); post.reject(); assert_eq!("", post.content()); } }
extern crate bigint; #[macro_use] extern crate failure; extern crate rlp; extern crate tiny_keccak; pub mod asm; pub mod errors; pub mod vm; #[cfg(test)] mod tests { #[test] fn it_works() { assert_eq!(2 + 2, 4); } }
use std::sync::Arc; use crate::prelude::*; use crate::bxdf::TransportMode; use super::Primitive; use crate::interaction::SurfaceInteraction; use crate::light::Light; use crate::material::Material; use crate::math::AnimatedTransform; #[derive(Clone, Debug)] pub struct TransformedPrimitive { pub primitive: Arc<dyn Primitive + Send + Sync>, pub primitive_to_world: Arc<AnimatedTransform>, } impl Primitive for TransformedPrimitive { fn intersect(&'a self, ray: &mut Ray) -> Option<SurfaceInteraction<'a>> { let interpolated = self.primitive_to_world.interpolate(ray.time); let mut i_ray = interpolated.inverse().transform_ray(*ray); if let Some(mut isect) = self.primitive.intersect(&mut i_ray) { ray.max = i_ray.max; isect.primitive = Some(&*self.primitive); if !interpolated.is_identity() { isect = interpolated.transform_surface_interaction(&isect); assert!(isect.n.unwrap().dot(isect.shading.n) >= 0.0); } Some(isect) } else { None } } fn intersect_p(&self, ray: &Ray) -> bool { let interpolated = self.primitive_to_world.interpolate(ray.time); let ray = interpolated.inverse().transform_ray(*ray); self.primitive.intersect_p(&ray) } fn world_bound(&self) -> Bounds3<Float> { self.primitive_to_world.motion_bounds(self.primitive.world_bound()) } fn get_area_light(&self) -> Option<Arc<dyn Light + Send + Sync>> { panic!("TransformedPrimitive::get_area_light should never be called") } fn get_material(&self) -> Option<&(dyn Material + Send + Sync)> { panic!("TransformedPrimitive::get_material should never be called") } fn compute_scattering_functions(&'a self, _: SurfaceInteraction<'a>, _: &(), _: TransportMode, _: bool) -> SurfaceInteraction<'a> { panic!("TransformedPrimitive::compute_scattering_functions should never be called"); } }
use std::io; use std::env::var_os as env_var; use super::{Vars, VarsError}; #[derive(Debug)] pub enum ExprInternalError { UnexpectedEof, UnknownExpressionType, UnknownEnv(String), } pub enum ExprError { Vars(VarsError), Input(io::Error), Output(io::Error), Internal(ExprInternalError), } impl From<VarsError> for ExprError { fn from(from: VarsError) -> ExprError { ExprError::Vars(from) } } impl From<ExprInternalError> for ExprError { fn from(from: ExprInternalError) -> ExprError { ExprError::Internal(from) } } enum ExprType { Env, Var, } pub fn translate_expr<R: Iterator<Item = io::Result<char>>, W: io::Write>( input_chars: &mut R, output: &mut W, vars: &Box<dyn Vars>, ) -> Result<(), ExprError> { let mut s = String::new(); loop { match input_chars.next() { None => { return Err(ExprInternalError::UnexpectedEof)? } Some(Err(e)) => return Err(ExprError::Input(e)), Some(Ok(ch)) if ch == '}' => break, Some(Ok(ch)) => s.push(ch), } } if s.bytes().len() < 4 { return Err(ExprInternalError::UnknownExpressionType)?; } let (expr_type, expr_path) = if &s[0..4] == "env " { (ExprType::Env, &s[4..]) } else if &s[0..4] == "var " { (ExprType::Var, &s[4..]) } else { return Err(ExprInternalError::UnknownExpressionType)?; }; match expr_type { ExprType::Var => { output.write(vars.get(expr_path)?.as_bytes()).map_err(ExprError::Output)?; Ok(()) }, ExprType::Env => match env_var(expr_path) { Some(value) => { use std::os::unix::ffi::OsStrExt; // linux only for now ... sorry output.write(value.as_bytes()).map_err(ExprError::Output)?; Ok(()) } None => Err(ExprInternalError::UnknownEnv(expr_path.into()))?, }, } }
#![no_std] #![feature(maybe_uninit_uninit_array)] #![feature(maybe_uninit_slice)] mod device; pub mod events; mod gpio; mod timer; use core::cmp::Ordering; use core::mem::MaybeUninit; use device::TimerID; use gpio::InputPin; use gpio::OutputPin; use once_cell::unsync::OnceCell; use device::Pin; use device::Port; use gpio::GpioError; use gpio::GpioID; use gpio::InputGpio; use gpio::OutputGpio; use timer::GeneralPurposeTimer; use timer::Timer; #[non_exhaustive] pub enum ComponentError { LateInitAction, EarlyAccessAction, NotFound, OOM, GpioError(GpioError), NotEnoughMemory, ConversionError, } #[derive(PartialEq, Eq)] pub enum Component { InputGpio(InputGpio), OutputGpio(OutputGpio), Timer(GeneralPurposeTimer), } #[derive(PartialEq, Eq, Debug, Copy, Clone, PartialOrd, Ord, Hash)] struct ComponentIndex(pub(crate) u8); /// functions are unsafe because no concurrency safeties are guaranteed. /// Its your responsibility to synchronize component access. #[repr(transparent)] pub struct Components(&'static mut [Component]); pub struct ComponentsBuilder<const COMPONENT_COUNT: usize> { array: &'static mut [MaybeUninit<Component>; COMPONENT_COUNT], free_space: usize, } impl Component { /// The Ordering should be as follows Gpios < None /// Gpios are ordered as the Gpio type without respect to Gpio-Kind (In, Out, etc.) fn comparator(&self, other: &Self) -> Ordering { match other { Component::InputGpio(other) => { Component::compare_with_gpio_id(self, &other.0.to_gpio()) } Component::OutputGpio(other) => { Component::compare_with_gpio_id(self, &other.0.to_gpio()) } Component::Timer(other) => { Component::compare_with_timer_id(&self, &other.0.to_timer_id()) } } } #[inline] fn compare_with_gpio_id(&self, other: &GpioID) -> Ordering { match self.to_gpio_id() { Ok(gpio) => gpio.cmp(other), Err(_) => Ordering::Greater, } } #[inline] fn compare_with_timer_id(&self, other: &TimerID) -> Ordering { match self { Component::InputGpio(_) => Ordering::Less, Component::OutputGpio(_) => Ordering::Less, Component::Timer(timer) => timer.0.to_timer_id().cmp(&other), _ => Ordering::Greater, } } #[inline] fn to_gpio_id(&self) -> Result<GpioID, ComponentError> { match self { Component::InputGpio(gpio) => Ok(gpio.0.to_gpio()), Component::OutputGpio(gpio) => Ok(gpio.0.to_gpio()), _ => Err(ComponentError::ConversionError), } } #[inline] fn to_timer_id(&self) -> Result<TimerID, ComponentError> { match self { Component::Timer(timer) => Ok(timer.0.to_timer_id()), _ => Err(ComponentError::ConversionError), } } } impl<const COMPONENT_COUNT: usize> ComponentsBuilder<COMPONENT_COUNT> { pub const fn allocate_array() -> [MaybeUninit<Component>; COMPONENT_COUNT] { MaybeUninit::uninit_array() } pub fn new(array: &'static mut [MaybeUninit<Component>; COMPONENT_COUNT]) -> Self { Self { array, free_space: COMPONENT_COUNT, } } fn add_component(&mut self, component: Component) -> Result<(), ComponentError> { if self.free_space > 0 { self.free_space -= 1; self.array[self.free_space].write(component); Ok(()) } else { Err(ComponentError::OOM) } } pub fn add_input_pin(&mut self, gpio: &'static mut dyn InputPin) -> Result<(), ComponentError> { self.add_component(Component::InputGpio(InputGpio(gpio))) } pub fn add_output_pin( &mut self, gpio: &'static mut dyn OutputPin, ) -> Result<(), ComponentError> { self.add_component(Component::OutputGpio(OutputGpio(gpio))) } pub fn add_timer(&mut self, timer: &'static mut dyn Timer) -> Result<(), ComponentError> { self.add_component(Component::Timer(GeneralPurposeTimer(timer))) } pub unsafe fn finalize(self) -> Result<&'static Components, ()> { if self.free_space > 0 { // the array has to be initialized completely return Err(()); } let array = MaybeUninit::slice_assume_init_mut(self.array); array.sort_unstable_by(|this, other| Component::comparator(this, other)); Components::static_array() .set(array) .map_err(|_| Err::<(), ()>(())) .expect("Multiple Component initialization"); Ok(Components::get()) } } impl Components { unsafe fn static_array() -> &'static mut OnceCell<&'static mut [Component]> { static mut ARRAY: OnceCell<&mut [Component]> = OnceCell::new(); &mut ARRAY } unsafe fn get() -> &'static mut Self { let array = Self::static_array() .get_mut() .expect("Tried to access uninitialized Components"); // its the same pointer as the array pointer since the type representation of Self is 'transparent' // https://doc.rust-lang.org/1.41.1/reference/type-layout.html#representations &mut *(*array as *mut [Component] as *mut Self) } pub unsafe fn get_input_pin( pin: Pin, port: Port, ) -> Result<&'static mut dyn InputPin, ComponentError> { let gpio = GpioID { pin, port }; let index = Self::search_array(&gpio, Component::compare_with_gpio_id)?; // check if the gpio kind actually matches match &mut Self::get().0[index] { Component::InputGpio(gpio) => Ok(gpio.0), _ => Err(ComponentError::NotFound), } } pub unsafe fn get_output_pin( pin: Pin, port: Port, ) -> Result<&'static mut dyn OutputPin, ComponentError> { let gpio = GpioID { pin, port }; let index = Self::search_array(&gpio, Component::compare_with_gpio_id)?; // check if the gpio kind actually matches match &mut Self::get().0[index] { Component::OutputGpio(gpio) => Ok(gpio.0), _ => Err(ComponentError::NotFound), } } pub unsafe fn get_timer(id: TimerID) -> Result<&'static mut dyn Timer, ComponentError> { let index = Self::search_array(&id, Component::compare_with_timer_id)?; if let Component::Timer(timer) = &mut Self::get().0[index] { Ok(timer.0) } else { Err(ComponentError::NotFound) } } /// Search for a key in the component array, with a function f that can compare the key with components unsafe fn search_array<K>( key: &K, f: fn(&Component, &K) -> Ordering, ) -> Result<usize, ComponentError> { Self::get() .0 .binary_search_by(|value| f(value, key)) .map_err(|_| ComponentError::NotFound) } }
#[doc = "Reader of register SPI_CTRLR0"] pub type R = crate::R<u32, super::SPI_CTRLR0>; #[doc = "Writer for register SPI_CTRLR0"] pub type W = crate::W<u32, super::SPI_CTRLR0>; #[doc = "Register SPI_CTRLR0 `reset()`'s with value 0x0300_0000"] impl crate::ResetValue for super::SPI_CTRLR0 { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0x0300_0000 } } #[doc = "Reader of field `XIP_CMD`"] pub type XIP_CMD_R = crate::R<u8, u8>; #[doc = "Write proxy for field `XIP_CMD`"] pub struct XIP_CMD_W<'a> { w: &'a mut W, } impl<'a> XIP_CMD_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0xff << 24)) | (((value as u32) & 0xff) << 24); self.w } } #[doc = "Reader of field `SPI_RXDS_EN`"] pub type SPI_RXDS_EN_R = crate::R<bool, bool>; #[doc = "Write proxy for field `SPI_RXDS_EN`"] pub struct SPI_RXDS_EN_W<'a> { w: &'a mut W, } impl<'a> SPI_RXDS_EN_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u32) & 0x01) << 18); self.w } } #[doc = "Reader of field `INST_DDR_EN`"] pub type INST_DDR_EN_R = crate::R<bool, bool>; #[doc = "Write proxy for field `INST_DDR_EN`"] pub struct INST_DDR_EN_W<'a> { w: &'a mut W, } impl<'a> INST_DDR_EN_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17); self.w } } #[doc = "Reader of field `SPI_DDR_EN`"] pub type SPI_DDR_EN_R = crate::R<bool, bool>; #[doc = "Write proxy for field `SPI_DDR_EN`"] pub struct SPI_DDR_EN_W<'a> { w: &'a mut W, } impl<'a> SPI_DDR_EN_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16); self.w } } #[doc = "Reader of field `WAIT_CYCLES`"] pub type WAIT_CYCLES_R = crate::R<u8, u8>; #[doc = "Write proxy for field `WAIT_CYCLES`"] pub struct WAIT_CYCLES_W<'a> { w: &'a mut W, } impl<'a> WAIT_CYCLES_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x1f << 11)) | (((value as u32) & 0x1f) << 11); self.w } } #[doc = "Instruction length (0/4/8/16b)\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum INST_L_A { #[doc = "0: No instruction"] NONE = 0, #[doc = "1: 4-bit instruction"] _4B = 1, #[doc = "2: 8-bit instruction"] _8B = 2, #[doc = "3: 16-bit instruction"] _16B = 3, } impl From<INST_L_A> for u8 { #[inline(always)] fn from(variant: INST_L_A) -> Self { variant as _ } } #[doc = "Reader of field `INST_L`"] pub type INST_L_R = crate::R<u8, INST_L_A>; impl INST_L_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> INST_L_A { match self.bits { 0 => INST_L_A::NONE, 1 => INST_L_A::_4B, 2 => INST_L_A::_8B, 3 => INST_L_A::_16B, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `NONE`"] #[inline(always)] pub fn is_none(&self) -> bool { *self == INST_L_A::NONE } #[doc = "Checks if the value of the field is `_4B`"] #[inline(always)] pub fn is_4b(&self) -> bool { *self == INST_L_A::_4B } #[doc = "Checks if the value of the field is `_8B`"] #[inline(always)] pub fn is_8b(&self) -> bool { *self == INST_L_A::_8B } #[doc = "Checks if the value of the field is `_16B`"] #[inline(always)] pub fn is_16b(&self) -> bool { *self == INST_L_A::_16B } } #[doc = "Write proxy for field `INST_L`"] pub struct INST_L_W<'a> { w: &'a mut W, } impl<'a> INST_L_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: INST_L_A) -> &'a mut W { { self.bits(variant.into()) } } #[doc = "No instruction"] #[inline(always)] pub fn none(self) -> &'a mut W { self.variant(INST_L_A::NONE) } #[doc = "4-bit instruction"] #[inline(always)] pub fn _4b(self) -> &'a mut W { self.variant(INST_L_A::_4B) } #[doc = "8-bit instruction"] #[inline(always)] pub fn _8b(self) -> &'a mut W { self.variant(INST_L_A::_8B) } #[doc = "16-bit instruction"] #[inline(always)] pub fn _16b(self) -> &'a mut W { self.variant(INST_L_A::_16B) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 8)) | (((value as u32) & 0x03) << 8); self.w } } #[doc = "Reader of field `ADDR_L`"] pub type ADDR_L_R = crate::R<u8, u8>; #[doc = "Write proxy for field `ADDR_L`"] pub struct ADDR_L_W<'a> { w: &'a mut W, } impl<'a> ADDR_L_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0f << 2)) | (((value as u32) & 0x0f) << 2); self.w } } #[doc = "Address and instruction transfer format\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum TRANS_TYPE_A { #[doc = "0: Command and address both in standard SPI frame format"] _1C1A = 0, #[doc = "1: Command in standard SPI format, address in format specified by FRF"] _1C2A = 1, #[doc = "2: Command and address both in format specified by FRF (e.g. Dual-SPI)"] _2C2A = 2, } impl From<TRANS_TYPE_A> for u8 { #[inline(always)] fn from(variant: TRANS_TYPE_A) -> Self { variant as _ } } #[doc = "Reader of field `TRANS_TYPE`"] pub type TRANS_TYPE_R = crate::R<u8, TRANS_TYPE_A>; impl TRANS_TYPE_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> crate::Variant<u8, TRANS_TYPE_A> { use crate::Variant::*; match self.bits { 0 => Val(TRANS_TYPE_A::_1C1A), 1 => Val(TRANS_TYPE_A::_1C2A), 2 => Val(TRANS_TYPE_A::_2C2A), i => Res(i), } } #[doc = "Checks if the value of the field is `_1C1A`"] #[inline(always)] pub fn is_1c1a(&self) -> bool { *self == TRANS_TYPE_A::_1C1A } #[doc = "Checks if the value of the field is `_1C2A`"] #[inline(always)] pub fn is_1c2a(&self) -> bool { *self == TRANS_TYPE_A::_1C2A } #[doc = "Checks if the value of the field is `_2C2A`"] #[inline(always)] pub fn is_2c2a(&self) -> bool { *self == TRANS_TYPE_A::_2C2A } } #[doc = "Write proxy for field `TRANS_TYPE`"] pub struct TRANS_TYPE_W<'a> { w: &'a mut W, } impl<'a> TRANS_TYPE_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: TRANS_TYPE_A) -> &'a mut W { unsafe { self.bits(variant.into()) } } #[doc = "Command and address both in standard SPI frame format"] #[inline(always)] pub fn _1c1a(self) -> &'a mut W { self.variant(TRANS_TYPE_A::_1C1A) } #[doc = "Command in standard SPI format, address in format specified by FRF"] #[inline(always)] pub fn _1c2a(self) -> &'a mut W { self.variant(TRANS_TYPE_A::_1C2A) } #[doc = "Command and address both in format specified by FRF (e.g. Dual-SPI)"] #[inline(always)] pub fn _2c2a(self) -> &'a mut W { self.variant(TRANS_TYPE_A::_2C2A) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x03) | ((value as u32) & 0x03); self.w } } impl R { #[doc = "Bits 24:31 - SPI Command to send in XIP mode (INST_L = 8-bit) or to append to Address (INST_L = 0-bit)"] #[inline(always)] pub fn xip_cmd(&self) -> XIP_CMD_R { XIP_CMD_R::new(((self.bits >> 24) & 0xff) as u8) } #[doc = "Bit 18 - Read data strobe enable"] #[inline(always)] pub fn spi_rxds_en(&self) -> SPI_RXDS_EN_R { SPI_RXDS_EN_R::new(((self.bits >> 18) & 0x01) != 0) } #[doc = "Bit 17 - Instruction DDR transfer enable"] #[inline(always)] pub fn inst_ddr_en(&self) -> INST_DDR_EN_R { INST_DDR_EN_R::new(((self.bits >> 17) & 0x01) != 0) } #[doc = "Bit 16 - SPI DDR transfer enable"] #[inline(always)] pub fn spi_ddr_en(&self) -> SPI_DDR_EN_R { SPI_DDR_EN_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bits 11:15 - Wait cycles between control frame transmit and data reception (in SCLK cycles)"] #[inline(always)] pub fn wait_cycles(&self) -> WAIT_CYCLES_R { WAIT_CYCLES_R::new(((self.bits >> 11) & 0x1f) as u8) } #[doc = "Bits 8:9 - Instruction length (0/4/8/16b)"] #[inline(always)] pub fn inst_l(&self) -> INST_L_R { INST_L_R::new(((self.bits >> 8) & 0x03) as u8) } #[doc = "Bits 2:5 - Address length (0b-60b in 4b increments)"] #[inline(always)] pub fn addr_l(&self) -> ADDR_L_R { ADDR_L_R::new(((self.bits >> 2) & 0x0f) as u8) } #[doc = "Bits 0:1 - Address and instruction transfer format"] #[inline(always)] pub fn trans_type(&self) -> TRANS_TYPE_R { TRANS_TYPE_R::new((self.bits & 0x03) as u8) } } impl W { #[doc = "Bits 24:31 - SPI Command to send in XIP mode (INST_L = 8-bit) or to append to Address (INST_L = 0-bit)"] #[inline(always)] pub fn xip_cmd(&mut self) -> XIP_CMD_W { XIP_CMD_W { w: self } } #[doc = "Bit 18 - Read data strobe enable"] #[inline(always)] pub fn spi_rxds_en(&mut self) -> SPI_RXDS_EN_W { SPI_RXDS_EN_W { w: self } } #[doc = "Bit 17 - Instruction DDR transfer enable"] #[inline(always)] pub fn inst_ddr_en(&mut self) -> INST_DDR_EN_W { INST_DDR_EN_W { w: self } } #[doc = "Bit 16 - SPI DDR transfer enable"] #[inline(always)] pub fn spi_ddr_en(&mut self) -> SPI_DDR_EN_W { SPI_DDR_EN_W { w: self } } #[doc = "Bits 11:15 - Wait cycles between control frame transmit and data reception (in SCLK cycles)"] #[inline(always)] pub fn wait_cycles(&mut self) -> WAIT_CYCLES_W { WAIT_CYCLES_W { w: self } } #[doc = "Bits 8:9 - Instruction length (0/4/8/16b)"] #[inline(always)] pub fn inst_l(&mut self) -> INST_L_W { INST_L_W { w: self } } #[doc = "Bits 2:5 - Address length (0b-60b in 4b increments)"] #[inline(always)] pub fn addr_l(&mut self) -> ADDR_L_W { ADDR_L_W { w: self } } #[doc = "Bits 0:1 - Address and instruction transfer format"] #[inline(always)] pub fn trans_type(&mut self) -> TRANS_TYPE_W { TRANS_TYPE_W { w: self } } }
// https://adventofcode.com/2017/day/15 fn main() { // First star let mut gen_a: u64 = 512; let mut gen_b: u64 = 191; let mut matches = 0; for _ in 0..40000000 { // Run generators gen_a = gen_a.wrapping_mul(16807) % 2147483647; gen_b = gen_b.wrapping_mul(48271) % 2147483647; // Compare lowest 16 bits if gen_a & 0xFFFF == gen_b & 0xFFFF { matches += 1; } } // Assert to facilitate further tweaks assert_eq!(567, matches); println!("{} out of 40 million pairs match", matches); // Second star gen_a = 512; gen_b = 191; matches = 0; for _ in 0..5000000 { // Get a value that is a multiple of 4 gen_a = gen_a.wrapping_mul(16807) % 2147483647; while gen_a % 4 != 0 { gen_a = gen_a.wrapping_mul(16807) % 2147483647; } // Get a value that is a multiple of 8 gen_b = gen_b.wrapping_mul(48271) % 2147483647; while gen_b % 8 != 0 { gen_b = gen_b.wrapping_mul(48271) % 2147483647; } // Compare lowest 16 bits if gen_a & 0xFFFF == gen_b & 0xFFFF { matches += 1; } } // Assert to facilitate further tweaks assert_eq!(323, matches); println!("{} out of 5 million pairs match", matches); }
mod string; pub use string::join;
use clap::{crate_version, App, Arg}; use nix::sched::{unshare, CloneFlags}; use nix::unistd::execvp; use std::ffi::{CStr, CString}; fn main() { let matches = App::new("unshare") .version(crate_version!()) .arg( Arg::with_name("ipc") .help("unshare IPC namespace") .short("i") .long("ipc"), ) .arg( Arg::with_name("mount") .help("unshare mount namespace") .short("m") .long("mount"), ) .arg( Arg::with_name("net") .help("unshare network namespace") .short("n") .long("net"), ) .arg( Arg::with_name("pid") .help("unshare PID namespace") .short("p") .long("pid"), ) .arg( Arg::with_name("uts") .help("unshare UTS namespace") .short("u") .long("uts"), ) .arg( Arg::with_name("user") .help("unshare user namespace") .short("U") .long("user"), ) .arg(Arg::with_name("cmd").index(1).required(true)) .arg(Arg::with_name("arg").multiple(true)) .get_matches(); let mut flags = CloneFlags::empty(); if matches.is_present("ipc") { flags.set(CloneFlags::CLONE_NEWIPC, true) } if matches.is_present("mount") { flags.set(CloneFlags::CLONE_NEWNS, true) } if matches.is_present("net") { flags.set(CloneFlags::CLONE_NEWNET, true) } if matches.is_present("pid") { flags.set(CloneFlags::CLONE_NEWPID, true) } if matches.is_present("uts") { flags.set(CloneFlags::CLONE_NEWUTS, true) } if matches.is_present("user") { flags.set(CloneFlags::CLONE_NEWUSER, true) } unshare(flags).expect("unshare() failed"); let cmd = matches.value_of("cmd").unwrap(); let mut args_exec_owned: Vec<CString> = vec![CString::new(cmd).unwrap()]; if matches.is_present("arg") { matches .values_of("arg") .unwrap() .for_each(|a| args_exec_owned.push(CString::new(a).unwrap())); } let args_exec: Vec<&CStr> = args_exec_owned.iter().map(CString::as_c_str).collect(); execvp(&args_exec[0], &args_exec).expect("exec() failed"); }
#[funtime::timed] fn foo(y: i32) -> i32 { let mut x = 1; let d = 1_000; x += d; x += y; x } #[funtime::timed] fn main() { foo(23); }
#[doc = r"Register block"] #[repr(C)] pub struct RegisterBlock { _reserved0: [u8; 0x04], #[doc = "0x04..0x44 - Cluster CH%s, containing ?CR1, ?CR2, ?FRCR, ?SLOTR, ?IM, ?SR, ?CLRFR, ?DR"] pub ch: [CH; 2], #[doc = "0x44 - PDM control register"] pub pdmcr: PDMCR, #[doc = "0x48 - PDM delay register"] pub pdmdly: PDMDLY, } impl RegisterBlock { #[doc = "0x04..0x24 - Cluster CH%s, containing ?CR1, ?CR2, ?FRCR, ?SLOTR, ?IM, ?SR, ?CLRFR, ?DR"] #[inline(always)] pub fn cha(&self) -> &CH { &self.ch[0] } #[doc = "0x24..0x44 - Cluster CH%s, containing ?CR1, ?CR2, ?FRCR, ?SLOTR, ?IM, ?SR, ?CLRFR, ?DR"] #[inline(always)] pub fn chb(&self) -> &CH { &self.ch[1] } } #[doc = "Cluster CH%s, containing ?CR1, ?CR2, ?FRCR, ?SLOTR, ?IM, ?SR, ?CLRFR, ?DR"] pub use self::ch::CH; #[doc = r"Cluster"] #[doc = "Cluster CH%s, containing ?CR1, ?CR2, ?FRCR, ?SLOTR, ?IM, ?SR, ?CLRFR, ?DR"] pub mod ch; #[doc = "PDMCR (rw) register accessor: PDM control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`pdmcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`pdmcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`pdmcr`] module"] pub type PDMCR = crate::Reg<pdmcr::PDMCR_SPEC>; #[doc = "PDM control register"] pub mod pdmcr; #[doc = "PDMDLY (rw) register accessor: PDM delay register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`pdmdly::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`pdmdly::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`pdmdly`] module"] pub type PDMDLY = crate::Reg<pdmdly::PDMDLY_SPEC>; #[doc = "PDM delay register"] pub mod pdmdly;
// This file is part of Substrate. // Copyright (C) 2018-2020 Parity Technologies (UK) Ltd. // SPDX-License-Identifier: Apache-2.0 // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use codec::{Decode, Encode}; use sp_core::sandbox as sandbox_primitives; use sp_io::sandbox; use sp_std::{prelude::*, slice, marker, mem, vec, rc::Rc}; use super::{Error, Value, ReturnValue, HostFuncType}; mod ffi { use sp_std::mem; use super::HostFuncType; /// Index into the default table that points to a `HostFuncType`. pub type HostFuncIndex = usize; /// Coerce `HostFuncIndex` to a callable host function pointer. /// /// # Safety /// /// This function should be only called with a `HostFuncIndex` that was previously registered /// in the environment definition. Typically this should only /// be called with an argument received in `dispatch_thunk`. pub unsafe fn coerce_host_index_to_func<T>(idx: HostFuncIndex) -> HostFuncType<T> { // We need to ensure that sizes of a callable function pointer and host function index is // indeed equal. // We can't use `static_assertions` create because it makes compiler panic, fallback to runtime assert. // const_assert!(mem::size_of::<HostFuncIndex>() == mem::size_of::<HostFuncType<T>>(),); assert!(mem::size_of::<HostFuncIndex>() == mem::size_of::<HostFuncType<T>>()); mem::transmute::<HostFuncIndex, HostFuncType<T>>(idx) } } struct MemoryHandle { memory_idx: u32, } impl Drop for MemoryHandle { fn drop(&mut self) { sandbox::memory_teardown(self.memory_idx); } } #[derive(Clone)] pub struct Memory { // Handle to memory instance is wrapped to add reference-counting semantics // to `Memory`. handle: Rc<MemoryHandle>, } impl Memory { pub fn new(initial: u32, maximum: Option<u32>) -> Result<Memory, Error> { let maximum = if let Some(maximum) = maximum { maximum } else { sandbox_primitives::MEM_UNLIMITED }; match sandbox::memory_new(initial, maximum) { sandbox_primitives::ERR_MODULE => Err(Error::Module), memory_idx => Ok(Memory { handle: Rc::new(MemoryHandle { memory_idx, }), }), } } pub fn get(&self, offset: u32, buf: &mut [u8]) -> Result<(), Error> { let result = sandbox::memory_get( self.handle.memory_idx, offset, buf.as_mut_ptr(), buf.len() as u32, ); match result { sandbox_primitives::ERR_OK => Ok(()), sandbox_primitives::ERR_OUT_OF_BOUNDS => Err(Error::OutOfBounds), _ => unreachable!(), } } pub fn set(&self, offset: u32, val: &[u8]) -> Result<(), Error> { let result = sandbox::memory_set( self.handle.memory_idx, offset, val.as_ptr() as _ , val.len() as u32, ); match result { sandbox_primitives::ERR_OK => Ok(()), sandbox_primitives::ERR_OUT_OF_BOUNDS => Err(Error::OutOfBounds), _ => unreachable!(), } } } pub struct EnvironmentDefinitionBuilder<T> { env_def: sandbox_primitives::EnvironmentDefinition, retained_memories: Vec<Memory>, _marker: marker::PhantomData<T>, } impl<T> EnvironmentDefinitionBuilder<T> { pub fn new() -> EnvironmentDefinitionBuilder<T> { EnvironmentDefinitionBuilder { env_def: sandbox_primitives::EnvironmentDefinition { entries: Vec::new(), }, retained_memories: Vec::new(), _marker: marker::PhantomData::<T>, } } fn add_entry<N1, N2>( &mut self, module: N1, field: N2, extern_entity: sandbox_primitives::ExternEntity, ) where N1: Into<Vec<u8>>, N2: Into<Vec<u8>>, { let entry = sandbox_primitives::Entry { module_name: module.into(), field_name: field.into(), entity: extern_entity, }; self.env_def.entries.push(entry); } pub fn add_host_func<N1, N2>(&mut self, module: N1, field: N2, f: HostFuncType<T>) where N1: Into<Vec<u8>>, N2: Into<Vec<u8>>, { let f = sandbox_primitives::ExternEntity::Function(f as u32); self.add_entry(module, field, f); } pub fn add_memory<N1, N2>(&mut self, module: N1, field: N2, mem: Memory) where N1: Into<Vec<u8>>, N2: Into<Vec<u8>>, { // We need to retain memory to keep it alive while the EnvironmentDefinitionBuilder alive. self.retained_memories.push(mem.clone()); let mem = sandbox_primitives::ExternEntity::Memory(mem.handle.memory_idx as u32); self.add_entry(module, field, mem); } } pub struct Instance<T> { instance_idx: u32, _retained_memories: Vec<Memory>, _marker: marker::PhantomData<T>, } /// The primary responsibility of this thunk is to deserialize arguments and /// call the original function, specified by the index. extern "C" fn dispatch_thunk<T>( serialized_args_ptr: *const u8, serialized_args_len: usize, state: usize, f: ffi::HostFuncIndex, ) -> u64 { let serialized_args = unsafe { if serialized_args_len == 0 { &[] } else { slice::from_raw_parts(serialized_args_ptr, serialized_args_len) } }; let args = Vec::<Value>::decode(&mut &serialized_args[..]).expect( "serialized args should be provided by the runtime; correctly serialized data should be deserializable; qed", ); unsafe { // This should be safe since `coerce_host_index_to_func` is called with an argument // received in an `dispatch_thunk` implementation, so `f` should point // on a valid host function. let f = ffi::coerce_host_index_to_func(f); // This should be safe since mutable reference to T is passed upon the invocation. let state = &mut *(state as *mut T); // Pass control flow to the designated function. let result = f(state, &args).encode(); // Leak the result vector and return the pointer to return data. let result_ptr = result.as_ptr() as u64; let result_len = result.len() as u64; mem::forget(result); (result_ptr << 32) | result_len } } impl<T> Instance<T> { pub fn new( code: &[u8], env_def_builder: &EnvironmentDefinitionBuilder<T>, state: &mut T, ) -> Result<Instance<T>, Error> { let serialized_env_def: Vec<u8> = env_def_builder.env_def.encode(); // It's very important to instantiate thunk with the right type. let dispatch_thunk = dispatch_thunk::<T>; let result = sandbox::instantiate( dispatch_thunk as u32, code, &serialized_env_def, state as *const T as _, ); let instance_idx = match result { sandbox_primitives::ERR_MODULE => return Err(Error::Module), sandbox_primitives::ERR_EXECUTION => return Err(Error::Execution), instance_idx => instance_idx, }; // We need to retain memories to keep them alive while the Instance is alive. let retained_memories = env_def_builder.retained_memories.clone(); Ok(Instance { instance_idx, _retained_memories: retained_memories, _marker: marker::PhantomData::<T>, }) } pub fn invoke( &mut self, name: &str, args: &[Value], state: &mut T, ) -> Result<ReturnValue, Error> { let serialized_args = args.to_vec().encode(); let mut return_val = vec![0u8; ReturnValue::ENCODED_MAX_SIZE]; let result = sandbox::invoke( self.instance_idx, name, &serialized_args, return_val.as_mut_ptr() as _, return_val.len() as u32, state as *const T as _, ); match result { sandbox_primitives::ERR_OK => { let return_val = ReturnValue::decode(&mut &return_val[..]) .map_err(|_| Error::Execution)?; Ok(return_val) } sandbox_primitives::ERR_EXECUTION => Err(Error::Execution), _ => unreachable!(), } } pub fn get_global_val(&self, name: &str) -> Option<Value> { sandbox::get_global_val(self.instance_idx, name) } } impl<T> Drop for Instance<T> { fn drop(&mut self) { sandbox::instance_teardown(self.instance_idx); } }
use std::error::Error; #[derive(Debug, Clone, Copy, PartialEq)] pub enum FileFormat { /// iNES INes, /// NES 2.0 Nes20, } #[derive(Debug, Clone, Copy, PartialEq)] pub enum Mirroring { Horizontal, Vertical, FourScreen, } #[derive(Debug, Clone, Copy, PartialEq)] pub struct Header { pub format: FileFormat, pub prg_rom_size: usize, pub chr_rom_size: usize, pub mapper_id: u16, pub mirroring: Mirroring, pub has_trainer: bool, pub has_battery: bool, } // Flags 6 const MIRRORING_VERTICAL_MASK: u8 = 0b0000_0001; const MIRRORING_FOUR_SCREEN_MASK: u8 = 0b0000_1000; const HAS_BATTERY_MASK: u8 = 0b0000_0010; const HAS_TRAINER_MASK: u8 = 0b0000_0100; pub fn parse_header(header: &[u8]) -> Result<Header, Box<dyn Error>> { let magic = &header[0..4]; if magic != b"NES\x1a" { return Err("bad format".into()); } // Bits 3-4 are "10" for NES 2.0 let format = if header[7] & 0b0000_1100 == 0b0000_1000 { FileFormat::Nes20 } else { FileFormat::INes }; let prg_rom_size = { let multiplier = 16 * 1024; // 16 kB match format { FileFormat::INes => header[4] as usize * multiplier, FileFormat::Nes20 => { let size_lsb = header[4] as usize; let size_msb = (header[9] as usize & 0b0000_1111) << 8; if size_msb == 0b1111_0000_0000 { let multiplier = size_lsb & 0b0000_0011; let exponent = size_lsb & 0b1111_1100; 2 ^ exponent * (multiplier * 2 + 1) } else { (size_msb | size_lsb) * multiplier } } } }; let chr_rom_size = { let multiplier = 8 * 1024; // 8 kB match format { FileFormat::INes => header[5] as usize * multiplier, FileFormat::Nes20 => { let size_lsb = header[5] as usize; let size_msb = (header[9] as usize & 0b1111_0000) << 4; if size_msb == 0b1111_0000_0000 { let multiplier = size_lsb & 0b0000_0011; let exponent = size_lsb & 0b1111_1100; 2 ^ exponent * (multiplier * 2 + 1) } else { (size_msb | size_lsb) * multiplier } } } }; let mirroring = if header[6] & MIRRORING_FOUR_SCREEN_MASK != 0 { Mirroring::FourScreen } else if header[6] & MIRRORING_VERTICAL_MASK != 0 { Mirroring::Vertical } else { Mirroring::Horizontal }; let has_battery = header[6] & HAS_BATTERY_MASK != 0; let has_trainer = header[6] & HAS_TRAINER_MASK != 0; let (mapper_id, _submapper_id) = match format { FileFormat::INes => { let bits_0_3 = (header[6] & 0b1111_0000) as u16 >> 4; let bits_4_7 = (header[7] & 0b1111_0000) as u16; (bits_4_7 | bits_0_3, 0) } FileFormat::Nes20 => { let bits_0_3 = ((header[6] & 0b1111_0000) as u16) >> 4; let bits_4_7 = (header[7] & 0b1111_0000) as u16; let bits_8_11 = ((header[8] & 0b0000_1111) as u16) << 8; let mapper_id = bits_8_11 | bits_4_7 | bits_0_3; let submapper_id = ((header[8] & 0b1111_0000) as u8) >> 4; (mapper_id, submapper_id) } }; Ok(Header { format, prg_rom_size, chr_rom_size, mapper_id, mirroring, has_trainer, has_battery, }) } #[cfg(test)] mod tests { use super::*; use hex; #[test] #[should_panic(expected = "bad format")] fn err_on_bad_header() { let header = hex::decode("00000000000000000000000000000000").unwrap(); parse_header(&header).unwrap(); } #[test] fn ines_mapper_0_prg_16_chr_8_horz() { let header = hex::decode("4E45531A010100000000000000000000").unwrap(); let header = parse_header(&header).unwrap(); assert_eq!( header, Header { format: FileFormat::INes, mapper_id: 0, prg_rom_size: 16 * 1024, chr_rom_size: 8 * 1024, mirroring: Mirroring::Horizontal, has_trainer: false, has_battery: false, } ) } }
#[macro_use] extern crate lazy_static; use rustyline::error::ReadlineError; use rustyline::Editor; use clap::{AppSettings, Clap}; use std::fs; #[derive(Clap, Debug)] #[clap( version = "1.0", author = "author - Tanay D. Pingalkar <tanaydpingalkar@gmail.com>", about = "a functional programming language" )] #[clap(setting = AppSettings::ColoredHelp)] struct Opts { #[clap(subcommand)] subcommand: Subcommand, } #[derive(Clap, Debug)] #[clap(setting = AppSettings::ColoredHelp)] enum Subcommand { #[clap(about = "to run file, example : `tof run filename`")] Run(Run), #[clap(about = "to enter interactive mode")] Play, } #[derive(Clap, Debug)] #[clap(setting = AppSettings::ColoredHelp)] struct Run { #[clap(about = "file name of .tof extension , example `tof run filename`")] file: String, #[clap(long, about = "show generated tokens", short)] show_tokens: bool, } mod prelude; mod runtime; mod tokenizer; use runtime::Runtime; mod utils; use tokenizer::Tokenizer; fn main() { let matches: Opts = Opts::parse(); match matches.subcommand { Subcommand::Run(Run { file, show_tokens }) => { let string = fs::read_to_string(format!("{}.tof", file)).expect("file not found"); let mut tokenizer = Tokenizer::new(&string); tokenizer.start(); if show_tokens { println!("{:#?}", tokenizer.tokens); } let mut runtime = Runtime::new(); runtime.eval(tokenizer.tokens, 1, vec![], vec![], true); } Subcommand::Play => { let mut rl = Editor::<()>::new(); println!("welcome to interactive mode \npress : Ctrl-C to exit"); let mut runtime = Runtime::new(); let mut i: usize = 1; loop { let readline = rl.readline("-> "); match readline { Ok(line) => { rl.add_history_entry(line.as_str()); let mut lexer = Tokenizer::new(&line); lexer.start(); runtime.eval(lexer.tokens, i, vec![], vec![], false); i = i + 1; } Err(ReadlineError::Interrupted) => { println!("^C"); break; } Err(ReadlineError::Eof) => { println!("^D"); break; } Err(err) => { println!("Error: {:?}", err); break; } } } } } }
use crate::states::game::GameState; use oxygengine::prelude::*; #[derive(Default)] pub struct SplashState; impl State for SplashState { fn on_enter(&mut self, world: &mut World) { let token = world.read_resource::<AppLifeCycle>().current_state_token(); world .create_entity() .with(CompositeCamera::new(CompositeScalingMode::CenterAspect)) .with(CompositeTransform::scale(720.0.into())) .with(NonPersistent(token)) .build(); world .create_entity() .with(CompositeRenderable( Image::new("splash.png").align(0.5.into()).into(), )) .with(CompositeTransform::default()) .with(NonPersistent(token)) .build(); world .create_entity() .with(CompositeRenderable( Text::new("Verdana", "Click to play!") .color(Color::white()) .align(TextAlign::Center) .baseline(TextBaseLine::Bottom) .size(64.0) .into(), )) .with(CompositeTransform::translation([0.5, -32.0].into())) .with(CompositeCameraAlignment([0.5, 1.0].into())) .with(NonPersistent(token)) .build(); } fn on_process(&mut self, world: &mut World) -> StateChange { let input = world.read_resource::<InputController>(); // NOTE: web browsers require user input to be triggered before playing any audio. if input.trigger_or_default("mouse-left") == TriggerState::Pressed { return StateChange::Swap(Box::new(GameState::default())); } StateChange::None } }
impl Default for Config { fn default() -> Config { let split_size = ReadableSize::mb(coprocessor::config::SPLIT_SIZE_MB); Config { sync_log: true, prevote: true, raftdb_path: String::new(), capacity: ReadableSize(0), raft_base_tick_interval: ReadableDuration::secs(1), raft_heartbeat_ticks: 2, raft_election_timeout_ticks: 10, raft_min_election_timeout_ticks: 0, raft_max_election_timeout_ticks: 0, raft_max_size_per_msg: ReadableSize::mb(1), raft_max_inflight_msgs: 256, raft_entry_max_size: ReadableSize::mb(8), raft_log_gc_tick_interval: ReadableDuration::secs(10), raft_log_gc_threshold: 50, // Assume the average size of entries is 1k. raft_log_gc_count_limit: split_size * 3 / 4 / ReadableSize::kb(1), raft_log_gc_size_limit: split_size * 3 / 4, raft_entry_cache_life_time: ReadableDuration::secs(30), raft_reject_transfer_leader_duration: ReadableDuration::secs(3), split_region_check_tick_interval: ReadableDuration::secs(10), region_split_check_diff: split_size / 16, clean_stale_peer_delay: ReadableDuration::minutes(10), region_compact_check_interval: ReadableDuration::minutes(5), region_compact_check_step: 100, region_compact_min_tombstones: 10000, region_compact_tombstones_percent: 30, pd_heartbeat_tick_interval: ReadableDuration::minutes(1), pd_store_heartbeat_tick_interval: ReadableDuration::secs(10), notify_capacity: 40960, snap_mgr_gc_tick_interval: ReadableDuration::minutes(1), snap_gc_timeout: ReadableDuration::hours(4), messages_per_tick: 4096, max_peer_down_duration: ReadableDuration::minutes(5), max_leader_missing_duration: ReadableDuration::hours(2), abnormal_leader_missing_duration: ReadableDuration::minutes(10), peer_stale_state_check_interval: ReadableDuration::minutes(5), leader_transfer_max_log_lag: 10, snap_apply_batch_size: ReadableSize::mb(10), lock_cf_compact_interval: ReadableDuration::minutes(10), lock_cf_compact_bytes_threshold: ReadableSize::mb(256), // Disable consistency check by default as it will hurt performance. // We should turn on this only in our tests. consistency_check_interval: ReadableDuration::secs(0), report_region_flow_interval: ReadableDuration::minutes(1), raft_store_max_leader_lease: ReadableDuration::secs(9), right_derive_when_split: true, allow_remove_leader: false, merge_max_log_gap: 10, merge_check_tick_interval: ReadableDuration::secs(10), use_delete_range: false, cleanup_import_sst_interval: ReadableDuration::minutes(10), local_read_batch_size: 1024, // They are preserved for compatibility check. region_max_size: ReadableSize(0), region_split_size: ReadableSize(0), } } }
use crate::lib::environment::Environment; use crate::lib::error::{DfxError, DfxResult}; use crate::lib::models::canister_id_store::CanisterIdStore; use crate::lib::root_key::fetch_root_key_if_needed; use crate::lib::waiter::waiter_with_exponential_backoff; use crate::util::clap::validators; use crate::util::print_idl_blob; use anyhow::{anyhow, Context}; use clap::Clap; use delay::Waiter; use ic_agent::agent::{Replied, RequestStatusResponse}; use ic_agent::{AgentError, RequestId}; use ic_types::Principal; use std::str::FromStr; /// Requests the status of a specified call from a canister. #[derive(Clap)] pub struct RequestStatusOpts { /// Specifies the request identifier. /// The request identifier is an hexadecimal string starting with 0x. #[clap(validator(validators::is_request_id))] request_id: String, /// Specifies the name or id of the canister onto which the request was made. /// If the request was made to the Management canister, specify the id of the /// canister it is updating/querying. /// If the call was proxied by the wallet, /// i.e. a `dfx canister --wallet=<ID> call --async` flag, /// specify the wallet canister id. canister: String, /// Specifies the format for displaying the method's return result. #[clap(long, possible_values(&["idl", "raw", "pp"]))] output: Option<String>, } pub async fn exec(env: &dyn Environment, opts: RequestStatusOpts) -> DfxResult { let request_id = RequestId::from_str(&opts.request_id[2..]).context("Invalid argument: request_id")?; let agent = env .get_agent() .ok_or_else(|| anyhow!("Cannot get HTTP client from environment."))?; fetch_root_key_if_needed(env).await?; let callee_canister = opts.canister.as_str(); let canister_id_store = CanisterIdStore::for_env(env)?; let canister_id = Principal::from_text(callee_canister) .or_else(|_| canister_id_store.get(callee_canister))?; let mut waiter = waiter_with_exponential_backoff(); let Replied::CallReplied(blob) = async { waiter.start(); let mut request_accepted = false; loop { match agent .request_status_raw(&request_id, canister_id.clone()) .await? { RequestStatusResponse::Replied { reply } => return Ok(reply), RequestStatusResponse::Rejected { reject_code, reject_message, } => { return Err(DfxError::new(AgentError::ReplicaError { reject_code, reject_message, })) } RequestStatusResponse::Unknown => (), RequestStatusResponse::Received | RequestStatusResponse::Processing => { // The system will return Unknown until the request is accepted // and we generally cannot know how long that will take. // State transitions between Received and Processing may be // instantaneous. Therefore, once we know the request is accepted, // we restart the waiter so the request does not time out. if !request_accepted { waiter .restart() .map_err(|_| DfxError::new(AgentError::WaiterRestartError()))?; request_accepted = true; } } RequestStatusResponse::Done => { return Err(DfxError::new(AgentError::RequestStatusDoneNoReply( String::from(request_id), ))) } }; waiter .wait() .map_err(|_| DfxError::new(AgentError::TimeoutWaitingForResponse()))?; } } .await .map_err(DfxError::from)?; let output_type = opts.output.as_deref(); print_idl_blob(&blob, output_type, &None).context("Invalid data: Invalid IDL blob.")?; Ok(()) }
use nu_engine::CallExt; use nu_protocol::{ ast::{Call, CellPath}, engine::{Command, EngineState, Stack}, Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Value, }; #[derive(Clone)] pub struct SubCommand; impl Command for SubCommand { fn name(&self) -> &str { "into filesize" } fn signature(&self) -> Signature { Signature::build("into filesize") .rest( "rest", SyntaxShape::CellPath, "column paths to convert to filesize (for table input)", ) .category(Category::Conversions) } fn usage(&self) -> &str { "Convert value to filesize" } fn search_terms(&self) -> Vec<&str> { vec!["convert", "number", "bytes"] } fn run( &self, engine_state: &EngineState, stack: &mut Stack, call: &Call, input: PipelineData, ) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> { into_filesize(engine_state, stack, call, input) } fn examples(&self) -> Vec<Example> { vec![ Example { description: "Convert string to filesize in table", example: "[[bytes]; ['5'] [3.2] [4] [2kb]] | into filesize bytes", result: None, }, Example { description: "Convert string to filesize", example: "'2' | into filesize", result: Some(Value::Filesize { val: 2, span: Span::test_data(), }), }, Example { description: "Convert decimal to filesize", example: "8.3 | into filesize", result: Some(Value::Filesize { val: 8, span: Span::test_data(), }), }, Example { description: "Convert int to filesize", example: "5 | into filesize", result: Some(Value::Filesize { val: 5, span: Span::test_data(), }), }, Example { description: "Convert file size to filesize", example: "4KB | into filesize", result: Some(Value::Filesize { val: 4000, span: Span::test_data(), }), }, ] } } fn into_filesize( engine_state: &EngineState, stack: &mut Stack, call: &Call, input: PipelineData, ) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> { let head = call.head; let column_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?; input.map( move |v| { if column_paths.is_empty() { action(&v, head) } else { let mut ret = v; for path in &column_paths { let r = ret.update_cell_path(&path.members, Box::new(move |old| action(old, head))); if let Err(error) = r { return Value::Error { error }; } } ret } }, engine_state.ctrlc.clone(), ) } pub fn action(input: &Value, span: Span) -> Value { if let Ok(value_span) = input.span() { match input { Value::Filesize { .. } => input.clone(), Value::Int { val, .. } => Value::Filesize { val: *val, span: value_span, }, Value::Float { val, .. } => Value::Filesize { val: *val as i64, span: value_span, }, Value::String { val, .. } => match int_from_string(val, value_span) { Ok(val) => Value::Filesize { val, span: value_span, }, Err(error) => Value::Error { error }, }, Value::Nothing { .. } => Value::Filesize { val: 0, span: value_span, }, _ => Value::Error { error: ShellError::UnsupportedInput( "'into filesize' for unsupported type".into(), value_span, ), }, } } else { Value::Error { error: ShellError::UnsupportedInput( "'into filesize' for unsupported type".into(), span, ), } } } fn int_from_string(a_string: &str, span: Span) -> Result<i64, ShellError> { match a_string.trim().parse::<bytesize::ByteSize>() { Ok(n) => Ok(n.0 as i64), Err(_) => Err(ShellError::CantConvert( "int".into(), "string".into(), span, None, )), } } #[cfg(test)] mod test { use super::*; #[test] fn test_examples() { use crate::test_examples; test_examples(SubCommand {}) } }
// Copyright 2015 Ted Mielczarek. See the COPYRIGHT // file at the top-level directory of this distribution. use std::env; use std::path::Path; use std::io::Write; extern crate minidump; use minidump::*; const USAGE : &'static str = "Usage: minidump_dump <minidump>"; fn print_minidump_dump(path : &Path) { match Minidump::read_path(path) { Ok(mut dump) => { let stdout = &mut std::io::stdout(); dump.print(stdout).unwrap(); if let Ok(thread_list) = dump.get_stream::<MinidumpThreadList>() { thread_list.print(stdout).unwrap(); } if let Ok(module_list) = dump.get_stream::<MinidumpModuleList>() { module_list.print(stdout).unwrap(); } // TODO: MemoryList if let Ok(exception) = dump.get_stream::<MinidumpException>() { exception.print(stdout).unwrap(); } // TODO: Assertion if let Ok(system_info) = dump.get_stream::<MinidumpSystemInfo>() { system_info.print(stdout).unwrap(); } if let Ok(misc_info) = dump.get_stream::<MinidumpMiscInfo>() { misc_info.print(stdout).unwrap(); } if let Ok(breakpad_info) = dump.get_stream::<MinidumpBreakpadInfo>() { breakpad_info.print(stdout).unwrap(); } // TODO: MemoryInfoList // TODO: raw Linux streams }, Err(err) => { let mut stderr = std::io::stderr(); writeln!(&mut stderr, "Error reading dump: {:?}", err).unwrap(); } } } #[cfg_attr(test, allow(dead_code))] fn main() { if let Some(dump_arg) = env::args().nth(1) { let path = Path::new(&dump_arg); print_minidump_dump(&path); } else { let mut stderr = std::io::stderr(); writeln!(&mut stderr, "{}", USAGE).unwrap(); } }
#[doc = "Register `CMD` reader"] pub type R = crate::R<CMD_SPEC>; #[doc = "Register `CMD` writer"] pub type W = crate::W<CMD_SPEC>; #[doc = "Field `CMDINDEX` reader - Command index. This bit can only be written by firmware when CPSM is disabled (CPSMEN = 0). The command index is sent to the card as part of a command message."] pub type CMDINDEX_R = crate::FieldReader; #[doc = "Field `CMDINDEX` writer - Command index. This bit can only be written by firmware when CPSM is disabled (CPSMEN = 0). The command index is sent to the card as part of a command message."] pub type CMDINDEX_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 6, O>; #[doc = "Field `WAITRESP` reader - Wait for response bits. This bit can only be written by firmware when CPSM is disabled (CPSMEN = 0). They are used to configure whether the CPSM is to wait for a response, and if yes, which kind of response."] pub type WAITRESP_R = crate::FieldReader<WAITRESP_A>; #[doc = "Wait for response bits. This bit can only be written by firmware when CPSM is disabled (CPSMEN = 0). They are used to configure whether the CPSM is to wait for a response, and if yes, which kind of response.\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] #[repr(u8)] pub enum WAITRESP_A { #[doc = "0: No response"] NoResponse = 0, #[doc = "1: Short response"] ShortResponse = 1, #[doc = "2: No reponse"] NoResponse2 = 2, #[doc = "3: Long reponse"] LongResponse = 3, } impl From<WAITRESP_A> for u8 { #[inline(always)] fn from(variant: WAITRESP_A) -> Self { variant as _ } } impl crate::FieldSpec for WAITRESP_A { type Ux = u8; } impl WAITRESP_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> WAITRESP_A { match self.bits { 0 => WAITRESP_A::NoResponse, 1 => WAITRESP_A::ShortResponse, 2 => WAITRESP_A::NoResponse2, 3 => WAITRESP_A::LongResponse, _ => unreachable!(), } } #[doc = "No response"] #[inline(always)] pub fn is_no_response(&self) -> bool { *self == WAITRESP_A::NoResponse } #[doc = "Short response"] #[inline(always)] pub fn is_short_response(&self) -> bool { *self == WAITRESP_A::ShortResponse } #[doc = "No reponse"] #[inline(always)] pub fn is_no_response2(&self) -> bool { *self == WAITRESP_A::NoResponse2 } #[doc = "Long reponse"] #[inline(always)] pub fn is_long_response(&self) -> bool { *self == WAITRESP_A::LongResponse } } #[doc = "Field `WAITRESP` writer - Wait for response bits. This bit can only be written by firmware when CPSM is disabled (CPSMEN = 0). They are used to configure whether the CPSM is to wait for a response, and if yes, which kind of response."] pub type WAITRESP_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 2, O, WAITRESP_A>; impl<'a, REG, const O: u8> WAITRESP_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, REG::Ux: From<u8>, { #[doc = "No response"] #[inline(always)] pub fn no_response(self) -> &'a mut crate::W<REG> { self.variant(WAITRESP_A::NoResponse) } #[doc = "Short response"] #[inline(always)] pub fn short_response(self) -> &'a mut crate::W<REG> { self.variant(WAITRESP_A::ShortResponse) } #[doc = "No reponse"] #[inline(always)] pub fn no_response2(self) -> &'a mut crate::W<REG> { self.variant(WAITRESP_A::NoResponse2) } #[doc = "Long reponse"] #[inline(always)] pub fn long_response(self) -> &'a mut crate::W<REG> { self.variant(WAITRESP_A::LongResponse) } } #[doc = "Field `WAITINT` reader - CPSM waits for interrupt request. If this bit is set, the CPSM disables command timeout and waits for an card interrupt request (Response). If this bit is cleared in the CPSM Wait state, will cause the abort of the interrupt mode."] pub type WAITINT_R = crate::BitReader<WAITINT_A>; #[doc = "CPSM waits for interrupt request. If this bit is set, the CPSM disables command timeout and waits for an card interrupt request (Response). If this bit is cleared in the CPSM Wait state, will cause the abort of the interrupt mode.\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum WAITINT_A { #[doc = "0: Don't wait for interrupt request"] Disabled = 0, #[doc = "1: Wait for interrupt request"] Enabled = 1, } impl From<WAITINT_A> for bool { #[inline(always)] fn from(variant: WAITINT_A) -> Self { variant as u8 != 0 } } impl WAITINT_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> WAITINT_A { match self.bits { false => WAITINT_A::Disabled, true => WAITINT_A::Enabled, } } #[doc = "Don't wait for interrupt request"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == WAITINT_A::Disabled } #[doc = "Wait for interrupt request"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == WAITINT_A::Enabled } } #[doc = "Field `WAITINT` writer - CPSM waits for interrupt request. If this bit is set, the CPSM disables command timeout and waits for an card interrupt request (Response). If this bit is cleared in the CPSM Wait state, will cause the abort of the interrupt mode."] pub type WAITINT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, WAITINT_A>; impl<'a, REG, const O: u8> WAITINT_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Don't wait for interrupt request"] #[inline(always)] pub fn disabled(self) -> &'a mut crate::W<REG> { self.variant(WAITINT_A::Disabled) } #[doc = "Wait for interrupt request"] #[inline(always)] pub fn enabled(self) -> &'a mut crate::W<REG> { self.variant(WAITINT_A::Enabled) } } #[doc = "Field `WAITPEND` reader - PSM Waits for ends of data transfer (CmdPend internal signal). If this bit is set, the CPSM waits for the end of data transfer before it starts sending a command. This feature is available only with Stream data transfer mode SDIO_DCTRL\\[2\\] = 1."] pub type WAITPEND_R = crate::BitReader<WAITPEND_A>; #[doc = "PSM Waits for ends of data transfer (CmdPend internal signal). If this bit is set, the CPSM waits for the end of data transfer before it starts sending a command. This feature is available only with Stream data transfer mode SDIO_DCTRL\\[2\\] = 1.\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum WAITPEND_A { #[doc = "0: Don't wait for data end"] Disabled = 0, #[doc = "1: Wait for end of data transfer signal before sending command"] Enabled = 1, } impl From<WAITPEND_A> for bool { #[inline(always)] fn from(variant: WAITPEND_A) -> Self { variant as u8 != 0 } } impl WAITPEND_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> WAITPEND_A { match self.bits { false => WAITPEND_A::Disabled, true => WAITPEND_A::Enabled, } } #[doc = "Don't wait for data end"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == WAITPEND_A::Disabled } #[doc = "Wait for end of data transfer signal before sending command"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == WAITPEND_A::Enabled } } #[doc = "Field `WAITPEND` writer - PSM Waits for ends of data transfer (CmdPend internal signal). If this bit is set, the CPSM waits for the end of data transfer before it starts sending a command. This feature is available only with Stream data transfer mode SDIO_DCTRL\\[2\\] = 1."] pub type WAITPEND_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, WAITPEND_A>; impl<'a, REG, const O: u8> WAITPEND_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Don't wait for data end"] #[inline(always)] pub fn disabled(self) -> &'a mut crate::W<REG> { self.variant(WAITPEND_A::Disabled) } #[doc = "Wait for end of data transfer signal before sending command"] #[inline(always)] pub fn enabled(self) -> &'a mut crate::W<REG> { self.variant(WAITPEND_A::Enabled) } } #[doc = "Field `CPSMEN` reader - Command path state machine (CPSM) Enable bit This bit is written 1 by firmware, and cleared by hardware when the CPSM enters the Idle state. If this bit is set, the CPSM is enabled. When DTEN = 1, no command will be transfered nor boot procedure will be started. CPSMEN is cleared to 0."] pub type CPSMEN_R = crate::BitReader<CPSMEN_A>; #[doc = "Command path state machine (CPSM) Enable bit This bit is written 1 by firmware, and cleared by hardware when the CPSM enters the Idle state. If this bit is set, the CPSM is enabled. When DTEN = 1, no command will be transfered nor boot procedure will be started. CPSMEN is cleared to 0.\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum CPSMEN_A { #[doc = "0: Command path state machine disabled"] Disabled = 0, #[doc = "1: Command path state machine enabled"] Enabled = 1, } impl From<CPSMEN_A> for bool { #[inline(always)] fn from(variant: CPSMEN_A) -> Self { variant as u8 != 0 } } impl CPSMEN_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> CPSMEN_A { match self.bits { false => CPSMEN_A::Disabled, true => CPSMEN_A::Enabled, } } #[doc = "Command path state machine disabled"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == CPSMEN_A::Disabled } #[doc = "Command path state machine enabled"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == CPSMEN_A::Enabled } } #[doc = "Field `CPSMEN` writer - Command path state machine (CPSM) Enable bit This bit is written 1 by firmware, and cleared by hardware when the CPSM enters the Idle state. If this bit is set, the CPSM is enabled. When DTEN = 1, no command will be transfered nor boot procedure will be started. CPSMEN is cleared to 0."] pub type CPSMEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, CPSMEN_A>; impl<'a, REG, const O: u8> CPSMEN_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Command path state machine disabled"] #[inline(always)] pub fn disabled(self) -> &'a mut crate::W<REG> { self.variant(CPSMEN_A::Disabled) } #[doc = "Command path state machine enabled"] #[inline(always)] pub fn enabled(self) -> &'a mut crate::W<REG> { self.variant(CPSMEN_A::Enabled) } } #[doc = "Field `SDIOSuspend` reader - SD I/O suspend command. If this bit is set, the command to be sent is a suspend command (to be used only with SDIO card)"] pub type SDIOSUSPEND_R = crate::BitReader<SDIOSUSPEND_A>; #[doc = "SD I/O suspend command. If this bit is set, the command to be sent is a suspend command (to be used only with SDIO card)\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum SDIOSUSPEND_A { #[doc = "0: Next command is not a SDIO suspend command"] Disabled = 0, #[doc = "1: Next command send is a SDIO suspend command"] Enabled = 1, } impl From<SDIOSUSPEND_A> for bool { #[inline(always)] fn from(variant: SDIOSUSPEND_A) -> Self { variant as u8 != 0 } } impl SDIOSUSPEND_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> SDIOSUSPEND_A { match self.bits { false => SDIOSUSPEND_A::Disabled, true => SDIOSUSPEND_A::Enabled, } } #[doc = "Next command is not a SDIO suspend command"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == SDIOSUSPEND_A::Disabled } #[doc = "Next command send is a SDIO suspend command"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == SDIOSUSPEND_A::Enabled } } #[doc = "Field `SDIOSuspend` writer - SD I/O suspend command. If this bit is set, the command to be sent is a suspend command (to be used only with SDIO card)"] pub type SDIOSUSPEND_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, SDIOSUSPEND_A>; impl<'a, REG, const O: u8> SDIOSUSPEND_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Next command is not a SDIO suspend command"] #[inline(always)] pub fn disabled(self) -> &'a mut crate::W<REG> { self.variant(SDIOSUSPEND_A::Disabled) } #[doc = "Next command send is a SDIO suspend command"] #[inline(always)] pub fn enabled(self) -> &'a mut crate::W<REG> { self.variant(SDIOSUSPEND_A::Enabled) } } impl R { #[doc = "Bits 0:5 - Command index. This bit can only be written by firmware when CPSM is disabled (CPSMEN = 0). The command index is sent to the card as part of a command message."] #[inline(always)] pub fn cmdindex(&self) -> CMDINDEX_R { CMDINDEX_R::new((self.bits & 0x3f) as u8) } #[doc = "Bits 6:7 - Wait for response bits. This bit can only be written by firmware when CPSM is disabled (CPSMEN = 0). They are used to configure whether the CPSM is to wait for a response, and if yes, which kind of response."] #[inline(always)] pub fn waitresp(&self) -> WAITRESP_R { WAITRESP_R::new(((self.bits >> 6) & 3) as u8) } #[doc = "Bit 8 - CPSM waits for interrupt request. If this bit is set, the CPSM disables command timeout and waits for an card interrupt request (Response). If this bit is cleared in the CPSM Wait state, will cause the abort of the interrupt mode."] #[inline(always)] pub fn waitint(&self) -> WAITINT_R { WAITINT_R::new(((self.bits >> 8) & 1) != 0) } #[doc = "Bit 9 - PSM Waits for ends of data transfer (CmdPend internal signal). If this bit is set, the CPSM waits for the end of data transfer before it starts sending a command. This feature is available only with Stream data transfer mode SDIO_DCTRL\\[2\\] = 1."] #[inline(always)] pub fn waitpend(&self) -> WAITPEND_R { WAITPEND_R::new(((self.bits >> 9) & 1) != 0) } #[doc = "Bit 10 - Command path state machine (CPSM) Enable bit This bit is written 1 by firmware, and cleared by hardware when the CPSM enters the Idle state. If this bit is set, the CPSM is enabled. When DTEN = 1, no command will be transfered nor boot procedure will be started. CPSMEN is cleared to 0."] #[inline(always)] pub fn cpsmen(&self) -> CPSMEN_R { CPSMEN_R::new(((self.bits >> 10) & 1) != 0) } #[doc = "Bit 11 - SD I/O suspend command. If this bit is set, the command to be sent is a suspend command (to be used only with SDIO card)"] #[inline(always)] pub fn sdiosuspend(&self) -> SDIOSUSPEND_R { SDIOSUSPEND_R::new(((self.bits >> 11) & 1) != 0) } } impl W { #[doc = "Bits 0:5 - Command index. This bit can only be written by firmware when CPSM is disabled (CPSMEN = 0). The command index is sent to the card as part of a command message."] #[inline(always)] #[must_use] pub fn cmdindex(&mut self) -> CMDINDEX_W<CMD_SPEC, 0> { CMDINDEX_W::new(self) } #[doc = "Bits 6:7 - Wait for response bits. This bit can only be written by firmware when CPSM is disabled (CPSMEN = 0). They are used to configure whether the CPSM is to wait for a response, and if yes, which kind of response."] #[inline(always)] #[must_use] pub fn waitresp(&mut self) -> WAITRESP_W<CMD_SPEC, 6> { WAITRESP_W::new(self) } #[doc = "Bit 8 - CPSM waits for interrupt request. If this bit is set, the CPSM disables command timeout and waits for an card interrupt request (Response). If this bit is cleared in the CPSM Wait state, will cause the abort of the interrupt mode."] #[inline(always)] #[must_use] pub fn waitint(&mut self) -> WAITINT_W<CMD_SPEC, 8> { WAITINT_W::new(self) } #[doc = "Bit 9 - PSM Waits for ends of data transfer (CmdPend internal signal). If this bit is set, the CPSM waits for the end of data transfer before it starts sending a command. This feature is available only with Stream data transfer mode SDIO_DCTRL\\[2\\] = 1."] #[inline(always)] #[must_use] pub fn waitpend(&mut self) -> WAITPEND_W<CMD_SPEC, 9> { WAITPEND_W::new(self) } #[doc = "Bit 10 - Command path state machine (CPSM) Enable bit This bit is written 1 by firmware, and cleared by hardware when the CPSM enters the Idle state. If this bit is set, the CPSM is enabled. When DTEN = 1, no command will be transfered nor boot procedure will be started. CPSMEN is cleared to 0."] #[inline(always)] #[must_use] pub fn cpsmen(&mut self) -> CPSMEN_W<CMD_SPEC, 10> { CPSMEN_W::new(self) } #[doc = "Bit 11 - SD I/O suspend command. If this bit is set, the command to be sent is a suspend command (to be used only with SDIO card)"] #[inline(always)] #[must_use] pub fn sdiosuspend(&mut self) -> SDIOSUSPEND_W<CMD_SPEC, 11> { SDIOSUSPEND_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "The SDMMC_CMDR register contains the command index and command type bits. The command index is sent to a card as part of a command message. The command type bits control the command path state machine (CPSM).\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cmd::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cmd::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct CMD_SPEC; impl crate::RegisterSpec for CMD_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`cmd::R`](R) reader structure"] impl crate::Readable for CMD_SPEC {} #[doc = "`write(|w| ..)` method takes [`cmd::W`](W) writer structure"] impl crate::Writable for CMD_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets CMD to value 0"] impl crate::Resettable for CMD_SPEC { const RESET_VALUE: Self::Ux = 0; }
use common::tokio::time::Instant; use std::time::Duration; use super::super::TIMER_L; #[derive(Debug)] pub struct Accepted { pub entered_at: Instant, } impl Accepted { pub fn should_terminate(&self) -> bool { self.entered_at.elapsed() > Duration::from_millis(TIMER_L) } } impl Default for Accepted { fn default() -> Self { Self { entered_at: Instant::now(), } } }
extern crate proc_macro; use crate::proc_macro::TokenStream; use quote::quote; use syn::Data::Struct; use syn::Fields; use syn::Type::Path; fn impl_hello_macro(ast: &syn::DeriveInput) -> TokenStream { let name = &ast.ident; let data = &ast.data; // println!("{:#?}", data); let mut defenition = format!("Struct {}", name); if let Struct(def) = data { if let Fields::Named(fields) = &def.fields { for named in &fields.named { let ident = &named.ident; let ty = &named.ty; if let Some(id) = ident { // println!("{}", id); defenition = format!("{}\n {}:", defenition, id) }; if let Path(path) = ty { // println!("{}", path.path.segments[0].ident); defenition = format!("{} {}", defenition, path.path.segments[0].ident) } } } }; let gen = quote! { impl HelloMacro for #name { fn helpify() { println!(#defenition); } } }; gen.into() } #[proc_macro_derive(HelloMacro)] pub fn hello_macro_derive(input: TokenStream) -> TokenStream { let ast = syn::parse(input).unwrap(); impl_hello_macro(&ast) }
use std::collections::HashMap; // game_mode: 22 = role queue // win/loss = +- 20 MMR // Wins: 1061 Lose: 1125 = 2200 MMR #[tokio::main] async fn main() -> Result<(), Box<dyn std::error::Error>> { // steamid = "60374563" // dota // steamid = "76561198020640291" // steam let request_url = format!( "https://api.opendota.com/api/players/{steamid}/wl?game_mode=22", steamid = "60374563" ); // println!("{}", request_url); let response = reqwest::get(&request_url) .await? .json::<HashMap<String, i32>>() .await?; // println!("response: {:?}", response); // println!("lose: {:?}", lose); let wins = match response.get("win"){ Some(x) => *x, None => 0 as i32 }; let lose = match response.get("lose"){ Some(x) => *x, None => 0 as i32 }; let start_mmr = 2200; let diff_wins = 1061 - wins; let diff_lose = 1125 - lose; let curr_mmr = start_mmr + diff_wins * 20 - diff_lose * 20; println!("MMR: {}", curr_mmr); Ok(()) }
// Copyright (c) The Starcoin Core Contributors // SPDX-License-Identifier: Apache-2.0 use anyhow::{ensure, Result}; use once_cell::sync::Lazy; use serde::{Deserialize, Serialize}; use starcoin_accumulator::node::{AccumulatorStoreType, ACCUMULATOR_PLACEHOLDER_HASH}; use starcoin_accumulator::{Accumulator, MerkleAccumulator}; use starcoin_config::ChainNetwork; use starcoin_crypto::{hash::PlainCryptoHash, HashValue}; use starcoin_logger::prelude::*; use starcoin_state_api::ChainState; use starcoin_statedb::ChainStateDB; use starcoin_storage::cache_storage::CacheStorage; use starcoin_storage::storage::StorageInstance; use starcoin_storage::{Storage, Store}; use starcoin_transaction_builder::{build_upgrade_package, StdLibOptions}; use starcoin_types::block::{BlockInfo, BlockState}; use starcoin_types::startup_info::StartupInfo; use starcoin_types::transaction::TransactionInfo; use starcoin_types::{ accumulator_info::AccumulatorInfo, block::Block, transaction::Transaction, vm_error::StatusCode, U256, }; use starcoin_vm_types::account_config::CORE_CODE_ADDRESS; use starcoin_vm_types::transaction::{ RawUserTransaction, SignedUserTransaction, TransactionPayload, }; use std::collections::HashMap; use std::convert::TryInto; use std::fmt::Display; use std::fs::{create_dir_all, File}; use std::io::{Read, Write}; use std::path::Path; use std::sync::Arc; use std::time::Duration; pub static GENESIS_FILE_NAME: &str = "genesis"; pub static GENESIS_GENERATED_DIR: &str = "generated"; const DEV_GENESIS_BYTES: &[u8] = std::include_bytes!("../generated/dev/genesis"); const HALLEY_GENESIS_BYTES: &[u8] = std::include_bytes!("../generated/halley/genesis"); const PROXIMA_GENESIS_BYTES: &[u8] = std::include_bytes!("../generated/proxima/genesis"); const MAIN_GENESIS_BYTES: &[u8] = std::include_bytes!("../generated/main/genesis"); pub static GENERATED_GENESIS: Lazy<HashMap<ChainNetwork, Genesis>> = Lazy::new(|| { let mut genesis = HashMap::new(); for net in ChainNetwork::networks() { genesis.insert(net, Genesis::load_generated(net)); } genesis }); pub static FRESH_GENESIS: Lazy<HashMap<ChainNetwork, Genesis>> = Lazy::new(|| { let mut genesis = HashMap::new(); for net in ChainNetwork::networks() { genesis.insert( net, Genesis::build(net) .unwrap_or_else(|e| panic!("build genesis for {} fail: {:?}", net, e)), ); } genesis }); pub enum GenesisOpt { /// Load generated genesis Generated, /// Regenerate genesis Fresh, } #[derive(Debug, Clone, Hash, Eq, PartialEq, Serialize, Deserialize)] pub struct Genesis { block: Block, } impl Display for Genesis { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "Genesis {{")?; write!(f, "block: {:?}", self.block.header.id())?; write!(f, "}}")?; Ok(()) } } impl Genesis { pub fn load_by_opt(option: GenesisOpt, net: ChainNetwork) -> Result<Self> { let genesis = match option { GenesisOpt::Generated => (&GENERATED_GENESIS).get(&net), GenesisOpt::Fresh => (&FRESH_GENESIS).get(&net), }; Ok(genesis .unwrap_or_else(|| panic!("Genesis for {} must exist.", net)) .clone()) } /// Load pre generated genesis. pub fn load(net: ChainNetwork) -> Result<Self> { Self::load_by_opt(GenesisOpt::Generated, net) } /// Build fresh genesis pub(crate) fn build(net: ChainNetwork) -> Result<Self> { debug!("Init genesis"); let block = Self::build_genesis_block(net)?; assert_eq!(block.header().number(), 0); debug!("Genesis block id : {:?}", block.header().id()); let genesis = Self { block }; Ok(genesis) } fn build_genesis_block(net: ChainNetwork) -> Result<Block> { let chain_config = net.get_config(); let txn = Self::build_genesis_transaction(net)?; let storage = Arc::new(Storage::new(StorageInstance::new_cache_instance( CacheStorage::new(), ))?); let chain_state_db = ChainStateDB::new(storage.clone(), None); let transaction_info = Self::execute_genesis_txn(&chain_state_db, txn.clone())?; let accumulator = MerkleAccumulator::new( *ACCUMULATOR_PLACEHOLDER_HASH, vec![], 0, 0, AccumulatorStoreType::Transaction, storage, )?; let txn_info_hash = transaction_info.crypto_hash(); let (accumulator_root, _) = accumulator.append(vec![txn_info_hash].as_slice())?; accumulator.flush()?; Ok(Block::genesis_block( chain_config.parent_hash, chain_config.timestamp, accumulator_root, transaction_info.state_root_hash(), chain_config.difficulty, chain_config.consensus_header.clone(), txn, )) } pub fn build_genesis_transaction(net: ChainNetwork) -> Result<SignedUserTransaction> { let package = build_upgrade_package(net, StdLibOptions::Staged, true)?; let txn = RawUserTransaction::new( CORE_CODE_ADDRESS, 0, TransactionPayload::Package(package), 0, 0, Duration::from_secs(0), ); let (genesis_private_key, genesis_public_key) = ChainNetwork::genesis_key_pair(); let sign_txn = txn.sign(&genesis_private_key, genesis_public_key)?; Ok(sign_txn.into_inner()) } pub fn execute_genesis_txn( chain_state: &dyn ChainState, txn: SignedUserTransaction, ) -> Result<TransactionInfo> { let txn = Transaction::UserTransaction(txn); let txn_hash = txn.id(); let output = starcoin_executor::execute_transactions(chain_state.as_super(), vec![txn])? .pop() .expect("Execute output must exist."); let (write_set, events, gas_used, status) = output.into_inner(); ensure!( status.vm_status().major_status == StatusCode::EXECUTED, "Genesis txn execute fail for: {:?}", status ); chain_state.apply_write_set(write_set)?; let state_root = chain_state.commit()?; chain_state.flush()?; Ok(TransactionInfo::new( txn_hash, state_root, //TODO genesis event. HashValue::zero(), events, gas_used, status.vm_status().major_status, )) } pub fn block(&self) -> &Block { &self.block } pub fn load_from_dir<P>(data_dir: P) -> Result<Option<Self>> where P: AsRef<Path>, { let genesis_file_path = data_dir.as_ref().join(GENESIS_FILE_NAME); if !genesis_file_path.exists() { return Ok(None); } let mut genesis_file = File::open(genesis_file_path)?; let mut content = vec![]; genesis_file.read_to_end(&mut content)?; let genesis = scs::from_bytes(&content)?; Ok(Some(genesis)) } fn genesis_bytes(net: ChainNetwork) -> &'static [u8] { match net { ChainNetwork::Dev => DEV_GENESIS_BYTES, ChainNetwork::Halley => HALLEY_GENESIS_BYTES, ChainNetwork::Proxima => PROXIMA_GENESIS_BYTES, ChainNetwork::Main => MAIN_GENESIS_BYTES, } } pub fn load_generated(net: ChainNetwork) -> Self { let bytes = Self::genesis_bytes(net); scs::from_bytes(bytes).expect("Deserialize genesis must ok.") } pub fn execute(self, storage: Arc<dyn Store>) -> Result<StartupInfo> { let Genesis { block } = self; let (header, body) = block.clone().into_inner(); let chain_state_db = ChainStateDB::new(storage.clone().into_super_arc(), None); let mut txns: Vec<SignedUserTransaction> = body.into(); ensure!( txns.len() == 1, "Genesis block must only contains one genesis txn." ); let genesis_txn = txns.pop().expect("Genesis txn must exist."); let transaction_info = Self::execute_genesis_txn(&chain_state_db, genesis_txn)?; ensure!( header.state_root() == transaction_info.state_root_hash(), "Genesis block state root mismatch." ); let txn_accumulator = MerkleAccumulator::new( *ACCUMULATOR_PLACEHOLDER_HASH, vec![], 0, 0, AccumulatorStoreType::Transaction, storage.clone().into_super_arc(), )?; let txn_info_hash = transaction_info.crypto_hash(); let (_, _) = txn_accumulator.append(vec![txn_info_hash].as_slice())?; txn_accumulator.flush()?; let txn_accumulator_info: AccumulatorInfo = (&txn_accumulator).try_into()?; ensure!(header.number() == 0, "Genesis block number must is 0."); debug!("Genesis block id : {:?}", header.id()); ensure!( header.accumulator_root() == *txn_accumulator_info.get_accumulator_root(), "Genesis block accumulator root mismatch." ); storage.commit_block(block, BlockState::Executed)?; let startup_info = StartupInfo::new(header.id(), vec![]); let block_info = BlockInfo::new_with_accumulator_info( header.id(), txn_accumulator_info, Self::genesis_block_accumulator_info(header.id(), storage.clone())?, U256::zero(), ); debug!("Genesis block_info: {:?}", block_info); storage.save_block_info(block_info)?; storage.save_startup_info(startup_info.clone())?; Ok(startup_info) } pub fn save<P>(&self, data_dir: P) -> Result<()> where P: AsRef<Path>, { let data_dir = data_dir.as_ref(); if !data_dir.exists() { create_dir_all(data_dir)?; } let genesis_file = data_dir.join(GENESIS_FILE_NAME); let mut file = File::create(genesis_file)?; let contents = scs::to_bytes(self)?; file.write_all(&contents)?; Ok(()) } fn genesis_block_accumulator_info( genesis_block_id: HashValue, storage: Arc<dyn Store>, ) -> Result<AccumulatorInfo> { let accumulator = MerkleAccumulator::new( *ACCUMULATOR_PLACEHOLDER_HASH, vec![], 0, 0, AccumulatorStoreType::Block, storage.clone().into_super_arc(), )?; let (_, _) = accumulator.append(vec![genesis_block_id].as_slice())?; accumulator.flush()?; (&accumulator).try_into() } } #[cfg(test)] mod tests { use super::*; use starcoin_state_api::AccountStateReader; use starcoin_storage::block_info::BlockInfoStore; use starcoin_storage::cache_storage::CacheStorage; use starcoin_storage::storage::StorageInstance; use starcoin_storage::{BlockStore, IntoSuper, Storage}; use starcoin_vm_types::account_config::association_address; use starcoin_vm_types::on_chain_config::{RegisteredCurrencies, VMConfig, Version}; #[stest::test] pub fn test_genesis_load() -> Result<()> { for net in ChainNetwork::networks() { Genesis::load(net)?; } Ok(()) } #[stest::test] pub fn test_genesis() -> Result<()> { for net in ChainNetwork::networks() { do_test_genesis(net)?; } Ok(()) } pub fn do_test_genesis(net: ChainNetwork) -> Result<()> { let temp_dir = starcoin_config::temp_path(); let genesis = Genesis::build(net)?; debug!("build genesis {} for {:?}", genesis, net); genesis.save(temp_dir.as_ref())?; let genesis2 = Genesis::load_from_dir(temp_dir.as_ref())?; assert!(genesis2.is_some(), "load genesis fail."); let genesis2 = genesis2.unwrap(); assert_eq!(genesis, genesis2, "genesis save and load different."); let storage = Arc::new(Storage::new(StorageInstance::new_cache_instance( CacheStorage::new(), ))?); let startup_info = genesis.execute(storage.clone())?; let storage2 = Arc::new(Storage::new(StorageInstance::new_cache_instance( CacheStorage::new(), ))?); let startup_info2 = genesis2.execute(storage2)?; assert_eq!( startup_info, startup_info2, "genesis execute startup info different." ); let genesis_block = storage .get_block(startup_info.master)? .expect("Genesis block must exist."); let state_db = ChainStateDB::new( storage.clone().into_super_arc(), Some(genesis_block.header().state_root()), ); let account_state_reader = AccountStateReader::new(&state_db); let account_resource = account_state_reader.get_account_resource(&association_address())?; assert!( account_resource.is_some(), "association account must exist in genesis state." ); let currencies = account_state_reader.get_on_chain_config::<RegisteredCurrencies>(); assert!( currencies.is_some(), "RegisteredCurrencies on_chain_config should exist." ); assert!( !currencies.unwrap().currency_codes().is_empty(), "RegisteredCurrencies should not empty." ); let vm_config = account_state_reader.get_on_chain_config::<VMConfig>(); assert!( vm_config.is_some(), "VMConfig on_chain_config should exist." ); let version = account_state_reader.get_on_chain_config::<Version>(); assert!(version.is_some(), "Version on_chain_config should exist."); let block_info = storage .get_block_info(genesis_block.header().id())? .expect("Genesis block info must exist."); let txn_accumulator_info = block_info.get_txn_accumulator_info(); let txn_accumulator = MerkleAccumulator::new( *txn_accumulator_info.get_accumulator_root(), txn_accumulator_info.get_frozen_subtree_roots().clone(), txn_accumulator_info.get_num_leaves(), txn_accumulator_info.get_num_nodes(), AccumulatorStoreType::Transaction, storage.clone().into_super_arc(), )?; //ensure block_accumulator can work. txn_accumulator.append(&[HashValue::random()])?; txn_accumulator.flush()?; let block_accumulator_info = block_info.get_block_accumulator_info(); let block_accumulator = MerkleAccumulator::new( *block_accumulator_info.get_accumulator_root(), block_accumulator_info.get_frozen_subtree_roots().clone(), block_accumulator_info.get_num_leaves(), block_accumulator_info.get_num_nodes(), AccumulatorStoreType::Block, storage.into_super_arc(), )?; let hash = block_accumulator.get_leaf(0)?.expect("leaf 0 must exist."); assert_eq!(hash, block_info.block_id); //ensure block_accumulator can work. block_accumulator.append(&[HashValue::random()])?; block_accumulator.flush()?; Ok(()) } }
use std::collections::HashMap; use std::fs; fn gen_graph(input: String) -> (Vec<Vec<usize>>, HashMap<String, usize>) { input .split("\n") .map(|s| { s.split(")") .map(|z| String::from(z)) .collect::<Vec<String>>() }) .fold( (Vec::new(), HashMap::new()), |(mut orbiter_list, mut objects), list| { let c = String::from(&list[0]); let o = String::from(&list[1]); if !objects.contains_key(&c) { orbiter_list.push(Vec::new()); objects.insert(c, orbiter_list.len() - 1); } if !objects.contains_key(&o) { orbiter_list.push(Vec::new()); objects.insert(o, orbiter_list.len() - 1); } orbiter_list[*objects.get(&list[0]).unwrap()].push(*objects.get(&list[1]).unwrap()); (orbiter_list, objects) }, ) } fn main() { let mut input = fs::read_to_string("resources/day6.input").unwrap(); input.pop(); let (orbiter_list, objects) = gen_graph(input); // println!("orbiter list : {:?}", orbiter_list); // println!("object : {:?}", objects); let total_edges = count_connections(&orbiter_list); println!("Total edges : {}", total_edges); let you = *objects.get("YOU").unwrap(); let san = *objects.get("SAN").unwrap(); let total_transfers = count_orbital_transfers(&orbiter_list, you, san); println!("Total transfers : {}", total_transfers); } enum moves { None, You(i32), San(i32), Transfers(i32), } fn count_orbital_transfers(adj_list: &Vec<Vec<usize>>, you: usize, san: usize) -> i32 { fn count_orbital_transfers_rec( adj_list: &Vec<Vec<usize>>, start: usize, visited: &mut Vec<bool>, you: usize, san: usize, ) -> moves { let list = &adj_list[start]; if start == you { println!("YOU at {}", start); return moves::You(0); } if start == san { println!("SAN at {}", start); return moves::San(0); } let mut m = Vec::new(); for &v in list { let x = count_orbital_transfers_rec(adj_list, v, visited, you, san); match x { moves::You(a) => { println!("Y{} at {}", a, start); m.push(x); } moves::San(a) => { println!("S{} at {}", a, start); m.push(x); } moves::Transfers(a) => { println!("T{} at {}", a, start); return x; } moves::None => (), } if m.len() == 2 { println!("Found S & Y at {}", start); return moves::Transfers( (match m.pop().unwrap() { moves::You(t) => t, moves::San(t) => t, _ => 0, }) + (match m.pop().unwrap() { moves::You(t) => t, moves::San(t) => t, _ => 0, }), ); } } visited[start] = true; if let Some(x) = m.pop() { match x { moves::San(a) => moves::San(a + 1), moves::You(a) => moves::You(a + 1), _ => moves::None, } } else { moves::None } } let mut visited = vec![false; adj_list.len()]; for i in 0..adj_list.len() { if !visited[i] { match count_orbital_transfers_rec(adj_list, i, &mut visited, you, san) { moves::Transfers(x) => return x, _ => (), } } } -1 } fn count_connections(adj_list: &Vec<Vec<usize>>) -> i32 { fn count_connections_rec( adj_list: &Vec<Vec<usize>>, start: usize, visited: &mut Vec<bool>, ) -> (i32, i32) { let list = &adj_list[start]; let mut nodes_under = 1; let mut total_edges = 0; for &v in list { if !visited[v] { let (n, t) = count_connections_rec(adj_list, v, visited); nodes_under = nodes_under + n; total_edges = total_edges + t + n; } else { let (n, _) = count_connections_rec(adj_list, v, visited); nodes_under = nodes_under + n; total_edges = total_edges + n; } } visited[start] = true; (nodes_under, total_edges) } let mut visited = vec![false; adj_list.len()]; let mut total_edges = 0; for i in 0..adj_list.len() { if !visited[i] { total_edges = total_edges + count_connections_rec(adj_list, i, &mut visited).1; } } total_edges }
use valis_ds_macros::DebugWith; #[derive(Debug, DebugWith, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct Binder<T>(T); impl<T> Binder<T> { pub fn bind(value: T) -> Self { Binder(value) } pub fn inner_ref(&self) -> &T { &self.0 } pub fn as_ref(&self) -> Binder<&T> { Binder(&self.0) } pub fn as_mut(&mut self) -> Binder<&mut T> { Binder(&mut self.0) } pub fn map<U, F>(self, mut func: F) -> Binder<U> where F: FnMut(T) -> U, { Binder(func(self.0)) } pub fn map_ref<U, F>(&self, mut func: F) -> Binder<U> where F: FnMut(&T) -> U, { Binder(func(&self.0)) } } // Debruijn indices represent the occurences of a type variable valis_ds::typed_index!(@base_no_salsa pub DebruijnIndex core::u32::MAX); impl DebruijnIndex { pub const INNERMOST: DebruijnIndex = unsafe { DebruijnIndex::from_u32_unchecked(0) }; pub fn shifted_in(self, amount: u32) -> DebruijnIndex { DebruijnIndex::from_u32(self.as_u32() + amount) } pub fn shifted_out(self, amount: u32) -> DebruijnIndex { DebruijnIndex::from_u32(self.as_u32() - amount) } pub fn shifted_out_to_binder(self, to_binder: DebruijnIndex) -> Self { self.shifted_out(to_binder.as_u32() - Self::INNERMOST.as_u32()) } }
use std::io::{self,Write}; use std::fs::File; use std::io::{BufRead, BufReader}; const MAP_ROW : usize = 40; const MAP_COL : usize = 28; fn print_board(board : &[u8], score : usize) -> (usize, usize) { let mut ball_pos = (0, 0); for c in 0..MAP_COL { for r in 0..MAP_ROW { let cc = match board[r*MAP_COL + c] { 0 => ' ', 1 => if c == 0 { '_' } else { '|' }, 2 => { '#' }, 3 => 'T', 4 => { ball_pos.0 = c; ball_pos.1 = r; '*' }, _ => unreachable!() }; print!("{}", cc); } println!(); } println!("Score: {}", score); return ball_pos; } struct Intcode { map : Vec<u8>, output_pos : (usize, usize), output_count : usize, score_came : bool, score : usize, ball_pos : (usize, usize), paddle_pos : (usize, usize) } impl Intcode { fn new() -> Intcode { Intcode { map : vec![0u8; MAP_ROW * MAP_COL], output_pos : (0, 0), ball_pos : (0, 0), output_count : 0, score_came : false, score : 0, paddle_pos : (0, 0) } } fn read_input(&mut self, _count : i64) -> i64 { let ball_pos = print_board(&self.map, self.score); let diff_y = ball_pos.1 as i32 - self.ball_pos.1 as i32; // print!("{} - Input: ", _count); // io::stdout().flush().ok().expect("Could not flush stdout"); // println!("diff: {}, prev ball pos: {:?}, new_ball_pos: {:?}, paddle pos: {:?}", diff_y, self.ball_pos, ball_pos, self.paddle_pos); // let mut input_text = String::new(); // io::stdin() // .read_line(&mut input_text) // .expect("failed to read from stdin"); let m; if ball_pos.1 < self.paddle_pos.1 { if diff_y > 0 { m = 0; } else { m = -1; } } else if ball_pos.1 > self.paddle_pos.1 { if diff_y < 0 { m = 0; } else { m = 1; } } else { if ball_pos.0 + 1 == self.paddle_pos.0 { m = 0; } else if diff_y > 0 { m = 1; } else if diff_y < 0 { m = -1; } else { m = 0; } } // println!("move: {}", m); self.ball_pos = ball_pos; return m; } fn write_output(&mut self, _count: i64, value : i64) { match self.output_count % 3 { 0 => { if value == -1 { self.score_came = true; } else { self.output_pos.0 = value as usize; } }, 1 => { if !self.score_came || value != 0 { self.output_pos.1 = value as usize; self.score_came = false; } }, 2 => { if self.score_came { self.score = value as usize; self.score_came = false; } else { self.map[self.output_pos.0 * MAP_COL + self.output_pos.1] = value as u8; if value == 3 { self.paddle_pos.0 = self.output_pos.1; self.paddle_pos.1 = self.output_pos.0; } } }, _ => unreachable!() } self.output_count += 1; } } fn get_parameter(commands: &Vec<i64>, ip : usize, mode : i64, relative_base : i64) -> i64 { match mode { 0 => return commands[commands[ip] as usize], 1 => return commands[ip], 2 => return commands[(relative_base + commands[ip]) as usize], _ => println!("Invalid mode: {}, ip: {}, command: {}, commands: {:?}", mode, ip, commands[ip], commands), } return 0; } fn set_parameter(commands: &mut Vec<i64>, ip : usize, mode : i64, relative_base : i64, value : i64) { match mode { 0 => { let input_idx = commands[ip] as usize; commands[input_idx] = value; }, 1 => println!("Invalid mode: {}, ip: {}, command: {}, commands: {:?}", mode, ip, commands[ip], commands), 2 => { let input_idx = (relative_base + commands[ip]) as usize; commands[input_idx] = value; }, _ => println!("Invalid mode: {}, ip: {}, command: {}, commands: {:?}", mode, ip, commands[ip], commands), }; } fn emulate(commands: &mut Vec<i64>, intcode : &mut Intcode) -> i64 { let mut ip = 0; let mut count = 0; let mut relative_base = 0; loop { if ip >= commands.len() { println!("Outside memory"); break; } count += 1; let full_opcode =commands[ip]; let opcode = full_opcode % 100; let mode1 = (full_opcode / 100) % 10; let mode2 = (full_opcode / 1000) % 10; let mode3 = (full_opcode / 10000) % 10; match opcode { 1 => { if ip + 4 > commands.len() { println!("Outside memory"); break; } let param1 = get_parameter(commands, ip + 1, mode1, relative_base); let param2 = get_parameter(commands, ip + 2, mode2, relative_base); let new_value = param1 + param2; set_parameter(commands, ip + 3, mode3, relative_base, new_value); ip += 4; }, 2 => { if ip + 4 > commands.len() { println!("Outside memory"); break; } let param1 = get_parameter(commands, ip + 1, mode1, relative_base); let param2 = get_parameter(commands, ip + 2, mode2, relative_base); let new_value = param1 * param2; set_parameter(commands, ip + 3, mode3, relative_base, new_value); ip += 4; }, 3 => { if ip + 2 > commands.len() { println!("Outside memory"); break; } let value = intcode.read_input(count); set_parameter(commands, ip + 1, mode1, relative_base, value); ip += 2; }, 4 => { if ip + 2 > commands.len() { println!("Outside memory"); break; } let value = get_parameter(commands, ip + 1, mode1, relative_base); intcode.write_output(count, value); ip += 2; }, 5 => { if ip + 3 > commands.len() { println!("Outside memory"); break; } let param1 = get_parameter(commands, ip + 1, mode1, relative_base); if param1 != 0 { let value = get_parameter(commands, ip + 2, mode2, relative_base); if value < 0 { println!("Invalid jump address {}", value); } ip = value as usize; } else { ip += 3; } }, 6 => { if ip + 3 > commands.len() { println!("Outside memory"); break; } let param1 = get_parameter(commands, ip + 1, mode1, relative_base); if param1 == 0 { let value = get_parameter(commands, ip + 2, mode2, relative_base); if value < 0 { println!("Invalid jump address {}", value); } ip = value as usize; } else { ip += 3; } }, 7 => { if ip + 4 > commands.len() { println!("Outside memory"); break; } let param1 = get_parameter(commands, ip + 1, mode1, relative_base); let param2 = get_parameter(commands, ip + 2, mode2, relative_base); let new_value = if param1 < param2 { 1 } else { 0 }; set_parameter(commands, ip + 3, mode3, relative_base, new_value); ip += 4; }, 8 => { if ip + 4 > commands.len() { println!("Outside memory"); break; } let param1 = get_parameter(commands, ip + 1, mode1, relative_base); let param2 = get_parameter(commands, ip + 2, mode2, relative_base); let new_value = if param1 == param2 { 1 } else { 0 }; set_parameter(commands, ip + 3, mode3, relative_base, new_value); ip += 4; }, 9 => { if ip + 2 > commands.len() { println!("Outside memory"); break; } let param1 = get_parameter(commands, ip + 1, mode1, relative_base); relative_base += param1; ip += 2; }, 99 => { println!("{} - Halting", count); return commands[0]; }, _ => println!("Invalid command: {}, ip: {}, commands: {:?}", commands[ip], ip, commands), } } return 0; } fn main() { let filename = "../part1/src/input"; // Open the file in read-only mode (ignoring errors). let file = File::open(filename).unwrap(); let reader = BufReader::new(file); let mut commands = Vec::new(); // Read the file line by line using the lines() iterator from std::io::BufRead. for (_, line) in reader.lines().enumerate() { let line = line.unwrap(); // Ignore errors. // Show the line and its number. let commands_str = line.split(","); for comm_str in commands_str { let comm: i64 = comm_str.parse().unwrap(); commands.push(comm); } break; } commands.resize(3000, 0); commands[0] = 2; let mut intcode = Intcode::new(); emulate(&mut commands, &mut intcode); println!("Solution: {}", intcode.score); }
use super::PointDataType; use crate::base::PointWriter; use anyhow::{Context, Result}; use pasture_core::containers::{UntypedPoint, UntypedPointSlice}; use pasture_core::layout::{attributes, PointLayout}; use pasture_core::nalgebra::Vector3; // combined trait to handle the PointWriter trait aswell as the AsciiFormat trait pub trait PointWriterFormatting: PointWriter + AsciiFormat {} pub trait AsciiFormat{ fn set_delimiter(&mut self, delimiter: &str); fn set_precision(&mut self, precision: usize); } pub(crate) struct RawAsciiWriter<T: std::io::Write + std::io::Seek> { writer: T, delimiter: String, precision: usize, parse_layout: Vec<PointDataType>, default_layout: PointLayout, } impl<T: std::io::Write + std::io::Seek> RawAsciiWriter<T> { pub fn from_write(write: T, format: &str) -> Result<Self> { Ok(Self { writer: write, delimiter: String::from(", "), precision: 5, parse_layout: PointDataType::get_parse_layout(format)?, default_layout: PointLayout::default(), }) } } impl<T: std::io::Write + std::io::Seek> AsciiFormat for RawAsciiWriter<T> { fn set_delimiter(&mut self, delimiter: &str) { self.delimiter = String::from(delimiter); } fn set_precision(&mut self, precision: usize) { self.precision = precision; } } impl<T: std::io::Write + std::io::Seek> PointWriterFormatting for RawAsciiWriter<T> { } impl<T: std::io::Write + std::io::Seek> PointWriter for RawAsciiWriter<T> { fn write(&mut self, points: &dyn pasture_core::containers::PointBuffer) -> anyhow::Result<()> { //let point = UntypedPointBuffer::new(&self.default_layout); let buffer_layout = points.point_layout(); // Similar to RawLASReader, write points in chunks of a fixed size to prevent overhead of // repeated virtual calls to 'dyn PointBuffer' let size_of_single_point = buffer_layout.size_of_point_entry() as usize; let num_points_in_chunk = 50_000; let num_chunks = (points.len() + (num_points_in_chunk - 1)) / num_points_in_chunk; let mut chunk_buffer: Vec<u8> = vec![0; num_points_in_chunk * size_of_single_point]; for chunk_index in 0..num_chunks { let points_in_cur_chunk = std::cmp::min( num_points_in_chunk, points.len() - (chunk_index * num_points_in_chunk), ); let start_point_index = chunk_index * num_points_in_chunk; points.get_raw_points( start_point_index..(start_point_index + points_in_cur_chunk), &mut chunk_buffer[..points_in_cur_chunk * size_of_single_point], ); //Iterate over each point for point_index_in_chunk in 0..points_in_cur_chunk { let start = point_index_in_chunk * size_of_single_point; let end = start + size_of_single_point; let point = UntypedPointSlice::new(buffer_layout, &mut chunk_buffer[start..end]); //write point for (index, format_literal) in self.parse_layout.iter().enumerate() { match format_literal { PointDataType::Skip => {} PointDataType::CoordinateX => { let pos = point.get_attribute::<Vector3<f64>>(&attributes::POSITION_3D)?; self.writer.write( trim_unnecessary_tailing_zeros(&format!( "{:.1$}", pos.x, self.precision )) .as_bytes(), )?; } PointDataType::CoordinateY => { let pos = point.get_attribute::<Vector3<f64>>(&attributes::POSITION_3D)?; self.writer.write( trim_unnecessary_tailing_zeros(&format!( "{:.1$}", pos.y, self.precision )) .as_bytes(), )?; } PointDataType::CoordinateZ => { let pos = point.get_attribute::<Vector3<f64>>(&attributes::POSITION_3D)?; self.writer.write( trim_unnecessary_tailing_zeros(&format!( "{:.1$}", pos.z, self.precision )) .as_bytes(), )?; } PointDataType::Intensity => { let intensity = point.get_attribute::<u64>(&attributes::INTENSITY)?; self.writer.write(intensity.to_string().as_bytes())?; } PointDataType::ReturnNumber => { let return_number = point.get_attribute::<u64>(&attributes::RETURN_NUMBER)?; self.writer.write(return_number.to_string().as_bytes())?; } PointDataType::NumberOfReturns => { let number_of_returns = point.get_attribute::<u64>(&attributes::NUMBER_OF_RETURNS)?; self.writer .write(number_of_returns.to_string().as_bytes())?; } PointDataType::Classification => { let classification = point.get_attribute::<u64>(&attributes::RETURN_NUMBER)?; self.writer.write(classification.to_string().as_bytes())?; } PointDataType::UserData => { let classification = point.get_attribute::<u64>(&attributes::RETURN_NUMBER)?; self.writer.write(classification.to_string().as_bytes())?; } PointDataType::ColorR => { let color = point.get_attribute::<Vector3<u16>>(&attributes::COLOR_RGB)?; self.writer.write(color[0].to_string().as_bytes())?; } PointDataType::ColorG => { let color = point.get_attribute::<Vector3<u16>>(&attributes::COLOR_RGB)?; self.writer.write(color[1].to_string().as_bytes())?; } PointDataType::ColorB => { let color = point.get_attribute::<Vector3<u16>>(&attributes::COLOR_RGB)?; self.writer.write(color[2].to_string().as_bytes())?; } PointDataType::GpsTime => { let gps_time = point.get_attribute::<f64>(&attributes::GPS_TIME)?; self.writer.write( trim_unnecessary_tailing_zeros(&format!( "{:.1$}", gps_time, self.precision )) .as_bytes(), )?; } PointDataType::PointSourceID => { let point_source_id = point.get_attribute::<u64>(&attributes::POINT_SOURCE_ID)?; self.writer.write(point_source_id.to_string().as_bytes())?; } PointDataType::EdgeOfFlightLine => { let edge_of_flight_line = point.get_attribute::<bool>(&attributes::EDGE_OF_FLIGHT_LINE)?; self.writer .write((if edge_of_flight_line { "1" } else { "0" }).as_bytes())?; } PointDataType::ScanDirectionFlag => { let scan_direction_flag = point.get_attribute::<bool>(&attributes::SCAN_DIRECTION_FLAG)?; self.writer .write((if scan_direction_flag { "1" } else { "0" }).as_bytes())?; } PointDataType::ScanAngleRank => { let scan_angle_rank = point.get_attribute::<i64>(&attributes::SCAN_ANGLE_RANK)?; self.writer.write(scan_angle_rank.to_string().as_bytes())?; } PointDataType::NIR => { let nir = point.get_attribute::<u64>(&attributes::NIR)?; self.writer.write(nir.to_string().as_bytes())?; } } if index != self.parse_layout.len() - 1 { self.writer.write(self.delimiter.as_bytes())?; } } self.writer.write(b"\n")?; } } Ok(()) } fn flush(&mut self) -> anyhow::Result<()> { self.writer.flush().context("Flush failed") } fn get_default_point_layout(&self) -> &PointLayout { &self.default_layout } } fn trim_unnecessary_tailing_zeros(slice: &str) -> &str { let start = 0; let mut end = slice.len(); while slice[start..end].ends_with('0') && !slice[start..end].ends_with(".0") { end -= 1; } &slice[start..end] } #[cfg(test)] mod tests { use std::{ fs::File, io::{BufRead, BufReader, BufWriter}, }; use crate::ascii::{get_test_file_path, test_data_buffer}; use super::*; use anyhow::Result; use itertools::Itertools; use pasture_core::containers::{ InterleavedVecPointStorage, PointBufferWriteable, UntypedPointBuffer, }; use scopeguard::defer; #[test] fn test_write() -> Result<()> { // create point buffer with one point let layout = PointLayout::from_attributes(&[attributes::POSITION_3D, attributes::INTENSITY]); let mut buffer = InterleavedVecPointStorage::new(layout.clone()); let mut point = UntypedPointBuffer::new(&layout); point.set_attribute( &attributes::POSITION_3D, &Vector3::<f32>::new(1.1, 2.2, 3.3), )?; point.set_attribute(&attributes::INTENSITY, &32_u16)?; buffer.push(&point.get_interleaved_point_view()); let out_path = "./test_ascii_writer.txt"; defer! { std::fs::remove_file(out_path).expect("Could not remove test file"); } let mut writer = RawAsciiWriter::from_write(BufWriter::new(File::create(&out_path)?), "ixyz")?; writer.write(&buffer)?; Ok(()) } #[test] fn test_write_all_attribute() -> Result<()> { let out_path = "./test_ascii_writer_attributes.txt"; defer! { std::fs::remove_file(out_path).expect("Could not remove test file"); } let test_data = test_data_buffer()?; { let mut writer = RawAsciiWriter::from_write( BufWriter::new(File::create(&out_path)?), "xyzirncuRGBtpedaI", )?; writer.write(&*test_data)?; writer.flush()?; } //Check result file let result_file = BufReader::new(File::open(out_path)?); let reference_file = BufReader::new(File::open(get_test_file_path( "10_points_ascii_all_attributes.txt", ))?); for (line_first_file, line_second_file) in result_file.lines().zip_eq(reference_file.lines()) { assert_eq!(line_first_file?, line_second_file?); } Ok(()) } #[test] #[should_panic(expected = "FormatError can't interpret format literal")] fn test_error_format_unrecognized_literal() { let path = "./test_ascii_writer_format_error.txt"; defer! { std::fs::remove_file(path).expect("Could not remove test file"); } let writer = BufWriter::new(File::create(path).unwrap()); RawAsciiWriter::from_write(writer, "xyzQ").unwrap(); } #[test] #[should_panic(expected = "Cannot find attribute.")] fn test_attribute_not_found_error() { // create point buffer with one point let layout = PointLayout::from_attributes(&[attributes::POSITION_3D, attributes::INTENSITY]); let mut buffer = InterleavedVecPointStorage::new(layout.clone()); let mut point = UntypedPointBuffer::new(&layout); point.set_attribute(&attributes::INTENSITY, &32_u16).unwrap(); buffer.push(&point.get_interleaved_point_view()); let out_path = "./test_ascii_writer_attribute_error.txt"; defer! { std::fs::remove_file(out_path).expect("Could not remove test file"); } let mut writer = RawAsciiWriter::from_write(BufWriter::new(File::create(&out_path).unwrap()), "e").unwrap(); writer.write(&buffer).unwrap(); } }
use core::pin::Pin; use futures_core::ready; use futures_core::stream::{FusedStream, Stream, TryStream}; use futures_core::task::{Context, Poll}; use pin_project::{pin_project, project}; impl<S: ?Sized + TryStream> TryStreamExt for S {} /// An extension trait for Streams that provides a variety of convenient combinator functions. pub trait TryStreamExt: TryStream { /// Flattens a stream of iterators into one continuous stream. fn try_flatten_iters(self) -> TryFlattenIters<Self> where Self::Ok: IntoIterator, <Self::Ok as IntoIterator>::IntoIter: Unpin, Self: Sized, { TryFlattenIters::new(self) } } /// Stream for the [`try_flatten`](super::TryStreamExt::try_flatten) method. #[pin_project] // #[derive(Debug)] #[must_use = "streams do nothing unless polled"] pub struct TryFlattenIters<St> where St: TryStream, St::Ok: IntoIterator, { #[pin] stream: St, #[pin] next: Option<<St::Ok as IntoIterator>::IntoIter>, } impl<St> TryFlattenIters<St> where St: TryStream, St::Ok: IntoIterator, <St::Ok as IntoIterator>::IntoIter: Unpin, { pub(crate) fn new(stream: St) -> Self { Self { stream, next: None } } /// Acquires a reference to the underlying sink or stream that this combinator is /// pulling from. pub fn get_ref(&self) -> &St { &self.stream } /// Acquires a mutable reference to the underlying sink or stream that this /// combinator is pulling from. /// /// Note that care must be taken to avoid tampering with the state of the /// sink or stream which may otherwise confuse this combinator. pub fn get_mut(&mut self) -> &mut St { &mut self.stream } /// Acquires a pinned mutable reference to the underlying sink or stream that this /// combinator is pulling from. /// /// Note that care must be taken to avoid tampering with the state of the /// sink or stream which may otherwise confuse this combinator. pub fn get_pin_mut(self: core::pin::Pin<&mut Self>) -> core::pin::Pin<&mut St> { self.project().stream } /// Consumes this combinator, returning the underlying sink or stream. /// /// Note that this may discard intermediate state of this combinator, so /// care should be taken to avoid losing resources when this is called. pub fn into_inner(self) -> St { self.stream } } impl<St> FusedStream for TryFlattenIters<St> where St: TryStream + FusedStream, St::Ok: IntoIterator, <St::Ok as IntoIterator>::IntoIter: Unpin, { fn is_terminated(&self) -> bool { self.next.is_none() && self.stream.is_terminated() } } impl<St> Stream for TryFlattenIters<St> where St: TryStream, St::Ok: IntoIterator, <St::Ok as IntoIterator>::IntoIter: Unpin, { type Item = Result<<St::Ok as IntoIterator>::Item, St::Error>; #[project] fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { #[project] let TryFlattenIters { mut stream, mut next, } = self.project(); Poll::Ready(loop { if let Some(mut s) = next.as_mut().as_pin_mut() { if let Some(item) = s.next() { break Some(Ok(item)); } else { next.set(None); } } else if let Some(s) = ready!(stream.as_mut().try_poll_next(cx)?) { next.set(Some(s.into_iter())); } else { break None; } }) } } #[cfg(test)] mod tests { use super::TryStreamExt as _; use futures::stream::{iter, StreamExt, TryStreamExt}; #[derive(Debug, Copy, Clone, Eq, PartialEq)] struct CustomError; #[tokio::test] async fn test_basic() { let input: Vec<Result<Vec<usize>, CustomError>> = vec![ Ok(vec![0_usize, 1, 2]), Ok(vec![3, 4]), Ok(vec![]), Ok(vec![5, 6, 7]), ]; let mut stream = iter(input).try_flatten_iters(); assert_eq!(stream.next().await, Some(Ok(0))); assert_eq!(stream.next().await, Some(Ok(1))); assert_eq!(stream.next().await, Some(Ok(2))); assert_eq!(stream.next().await, Some(Ok(3))); assert_eq!(stream.next().await, Some(Ok(4))); assert_eq!(stream.next().await, Some(Ok(5))); assert_eq!(stream.next().await, Some(Ok(6))); assert_eq!(stream.next().await, Some(Ok(7))); assert_eq!(stream.next().await, None); } #[tokio::test] async fn test_error() { let input: Vec<Result<Vec<usize>, CustomError>> = vec![ Ok(vec![0_usize, 1, 2]), Err(CustomError), Ok(vec![]), Ok(vec![5, 6, 7]), ]; let mut stream = iter(input).try_flatten_iters(); assert_eq!(stream.next().await, Some(Ok(0))); assert_eq!(stream.next().await, Some(Ok(1))); assert_eq!(stream.next().await, Some(Ok(2))); assert_eq!(stream.next().await, Some(Err(CustomError))); assert_eq!(stream.next().await, Some(Ok(5))); assert_eq!(stream.next().await, Some(Ok(6))); assert_eq!(stream.next().await, Some(Ok(7))); assert_eq!(stream.next().await, None); } #[tokio::test] async fn test_error_with_collect() { let input: Vec<Result<Vec<usize>, CustomError>> = vec![ Ok(vec![0_usize, 1, 2]), Ok(vec![3, 4]), Ok(vec![]), Ok(vec![5, 6, 7]), ]; let result: Result<Vec<usize>, CustomError> = iter(input).try_flatten_iters().try_collect().await; assert_eq!(result, Ok(vec![0, 1, 2, 3, 4, 5, 6, 7])); let input: Vec<Result<Vec<usize>, CustomError>> = vec![ Ok(vec![0_usize, 1, 2]), Err(CustomError), Ok(vec![]), Ok(vec![5, 6, 7]), ]; let result: Result<Vec<usize>, CustomError> = iter(input).try_flatten_iters().try_collect().await; assert_eq!(result, Err(CustomError)); } #[tokio::test] async fn test_empty() { let mut stream = iter(Vec::<Result<Vec<String>, CustomError>>::new()).try_flatten_iters(); assert_eq!(stream.next().await, None); } }
#![feature(custom_attribute, plugin)] #![plugin(profile_ext)] #[profile] fn foo() { println!("foo"); } pub fn main() { println!("enter main function"); foo(); println!("exit main function"); }
use std::fmt::Debug; use std::path::PathBuf; use error::*; use source::Source; use data_backend::ReceivedAsset; #[derive(Debug, RustcDecodable, RustcEncodable)] pub struct Auth { pub name: String, pub key: String } #[derive(Debug, RustcDecodable, RustcEncodable)] pub struct ControlPayload { pub auth: Auth, pub command: String } #[derive(Debug, RustcDecodable, RustcEncodable)] pub struct UploadPayload { pub auth: Auth, pub uuid: String, pub md5: String, pub mime: String, pub name: Option<String>, pub tags: Option<String> } pub fn print_payload<T>(payload: &T) where T: Debug { println!("\n\nPAYLOAD {:?}\n", payload); } pub fn payload_to_recv_asset(pl: UploadPayload, path: PathBuf) -> CommandResult<ReceivedAsset> { path.to_str() .ok_or(CommandError::InvalidPathUnicode) .map(|s| s.to_string()) .and_then(Source::new) .map(move |src| ReceivedAsset { uuid: pl.uuid, md5: pl.md5, mime: pl.mime, name: pl.name, source: src, tags: pl.tags.map(|tags| tags.split(' ').map(|s| s.to_string()).collect()) }) }
//! Implements Forsyth–Edwards Notation parsing. use regex::Regex; use board::*; use files::*; use ranks::*; /// Parses Forsyth–Edwards Notation (FEN). /// /// Returns a tuple with the following elements: `0`) a board /// instance, `1`) halfmove clock, `2`) fullmove number. /// /// # Forsyth–Edwards Notation /// /// A FEN string defines a particular position using only the ASCII /// character set. A FEN string contains six fields separated by a /// space. The fields are: /// /// 1. Piece placement (from white's perspective). Each rank is /// described, starting with rank 8 and ending with rank 1. Within /// each rank, the contents of each square are described from file A /// through file H. Following the Standard Algebraic Notation (SAN), /// each piece is identified by a single letter taken from the /// standard English names. White pieces are designated using /// upper-case letters ("PNBRQK") whilst Black uses lowercase /// ("pnbrqk"). Blank squares are noted using digits 1 through 8 /// (the number of blank squares), and "/" separates ranks. /// /// 2. Active color. "w" means white moves next, "b" means black. /// /// 3. Castling availability. If neither side can castle, this is /// "-". Otherwise, this has one or more letters: "K" (White can /// castle kingside), "Q" (White can castle queenside), "k" (Black /// can castle kingside), and/or "q" (Black can castle queenside). /// /// 4. En-passant target square (in algebraic notation). If there's no /// en-passant target square, this is "-". If a pawn has just made /// a 2-square move, this is the position "behind" the pawn. This /// is recorded regardless of whether there is a pawn in position /// to make an en-passant capture. /// /// 5. Halfmove clock. This is the number of halfmoves since the last /// pawn advance or capture. This is used to determine if a draw can /// be claimed under the fifty-move rule. /// /// 6. Fullmove number. The number of the full move. It starts at 1, /// and is incremented after black's move. /// /// ## Example: /// The starting position: `rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w QKqk - 0 1` pub fn parse_fen(s: &str) -> Result<(Board, u8, u16), IllegalBoard> { let fileds: Vec<_> = s.split_whitespace().collect(); if fileds.len() == 6 { let pieces = try!(parse_fen_piece_placement(fileds[0])); let to_move = try!(parse_fen_active_color(fileds[1])); let castling_rights = try!(parse_fen_castling_rights(fileds[2])); let enpassant_file = if let Some(x) = try!(parse_fen_enpassant_square(fileds[3])) { match to_move { WHITE if Board::rank(x) == RANK_6 => Board::file(x), BLACK if Board::rank(x) == RANK_3 => Board::file(x), _ => return Err(IllegalBoard), } } else { 8 }; let halfmove_clock = try!(fileds[4].parse::<u8>().map_err(|_| IllegalBoard)); let fullmove_number = try!(fileds[5].parse::<u16>().map_err(|_| IllegalBoard)); if let 1...9000 = fullmove_number { return Ok((Board { occupied: pieces.color[WHITE] | pieces.color[BLACK], pieces: pieces, to_move: to_move, castling_rights: castling_rights, enpassant_file: enpassant_file, }, halfmove_clock, fullmove_number)); } } Err(IllegalBoard) } /// Parses square's algebraic notation (lowercase only). pub fn parse_square(s: &str) -> Result<Square, IllegalBoard> { lazy_static! { static ref RE: Regex = Regex::new(r"^[a-h][1-8]$").unwrap(); } if RE.is_match(s) { let mut chars = s.chars(); let file = (chars.next().unwrap().to_digit(18).unwrap() - 10) as usize; let rank = (chars.next().unwrap().to_digit(9).unwrap() - 1) as usize; Ok(Board::square(file, rank)) } else { Err(IllegalBoard) } } fn parse_fen_piece_placement(s: &str) -> Result<PiecesPlacement, IllegalBoard> { // These are the possible productions in the grammar. enum Token { Piece(Color, PieceType), EmptySquares(u32), Separator, } // FEN describes the board starting from A8 and going toward H1. let mut file = FILE_A; let mut rank = RANK_8; // We start with an empty board. let mut pieces = PiecesPlacement { piece_type: [0u64; 6], color: [0u64; 2], }; // Then we read `s` character by character, updating `pieces`. for c in s.chars() { let token = match c { 'K' => Token::Piece(WHITE, KING), 'Q' => Token::Piece(WHITE, QUEEN), 'R' => Token::Piece(WHITE, ROOK), 'B' => Token::Piece(WHITE, BISHOP), 'N' => Token::Piece(WHITE, KNIGHT), 'P' => Token::Piece(WHITE, PAWN), 'k' => Token::Piece(BLACK, KING), 'q' => Token::Piece(BLACK, QUEEN), 'r' => Token::Piece(BLACK, ROOK), 'b' => Token::Piece(BLACK, BISHOP), 'n' => Token::Piece(BLACK, KNIGHT), 'p' => Token::Piece(BLACK, PAWN), n @ '1'...'8' => Token::EmptySquares(n.to_digit(9).unwrap()), '/' => Token::Separator, _ => return Err(IllegalBoard), }; match token { Token::Piece(color, piece_type) => { if file > 7 { return Err(IllegalBoard); } let mask = 1 << Board::square(file, rank); pieces.piece_type[piece_type] |= mask; pieces.color[color] |= mask; file += 1; } Token::EmptySquares(n) => { file += n as usize; if file > 8 { return Err(IllegalBoard); } } Token::Separator => { if file == 8 && rank > 0 { file = 0; rank -= 1; } else { return Err(IllegalBoard); } } } } // Make sure that all squares were initialized. if file != 8 || rank != 0 { return Err(IllegalBoard); } Ok(pieces) } fn parse_fen_active_color(s: &str) -> Result<Color, IllegalBoard> { match s { "w" => Ok(WHITE), "b" => Ok(BLACK), _ => Err(IllegalBoard), } } fn parse_fen_castling_rights(s: &str) -> Result<CastlingRights, IllegalBoard> { let mut rights = CastlingRights::new(0); if s != "-" { for c in s.chars() { let (color, side) = match c { 'K' => (WHITE, KINGSIDE), 'Q' => (WHITE, QUEENSIDE), 'k' => (BLACK, KINGSIDE), 'q' => (BLACK, QUEENSIDE), _ => return Err(IllegalBoard), }; if !rights.grant(color, side) { return Err(IllegalBoard); } } } Ok(rights) } fn parse_fen_enpassant_square(s: &str) -> Result<Option<Square>, IllegalBoard> { if s == "-" { Ok(None) } else { parse_square(s).map(|x| Some(x)) } } #[cfg(test)] mod tests { use super::*; #[test] fn parse_fen_string() { assert!(parse_fen("rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR b KQkq e3 0 1").is_ok()); assert!(parse_fen("rnbqkbnr/pppppppp/8/4P3/8/PPPP1PPP/RNBQKBNR b KQkq e3 0 1").is_err()); assert!(parse_fen("nbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR b KQkq e3 0 1").is_err()); assert!(parse_fen("rnbqkbnr1/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR b KQkq e3 0 1").is_err()); assert!(parse_fen("rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBN b KQkq e3 0 1").is_err()); assert!(parse_fen("rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR/ b KQkq e3 0 1").is_err()); assert!(parse_fen("rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNRR b KQkq e3 0 1").is_err()); assert!(parse_fen("rnbqkbnr/pppppppp/8/8/4P3/8/PPP01PPP/RNBQKBNR b KQkq e3 0 1").is_err()); assert!(parse_fen("rnbqkbnr/pppppppp/8/8/4P3/8/PPP91PPP/RNBQKBNR b KQkq e3 0 1").is_err()); assert!(parse_fen("rnbqkbnr/pppppppp/8/8/4P3/8/PPP*1PPP/RNBQKBNR b KQkq e3 0 1").is_err()); assert!(parse_fen("rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR b KQkq e3 * 1").is_err()); assert!(parse_fen("rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR b KQkq e3 0 *").is_err()); assert!(parse_fen("rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR b - e3 0 1").is_ok()); assert!(parse_fen("rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR b KQkq e3 0 1").is_ok()); assert!(parse_fen("rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR b Kkq e3 0 1").is_ok()); assert!(parse_fen("rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR b kq - 0 1").is_ok()); assert!(parse_fen("k7/8/8/8/8/8/8/7K w - - 0 1").is_ok()); assert!(parse_fen("k7/pppppppp/8/8/8/8/PPPPPPPP/7K w - - 0 1").is_ok()); assert!(parse_fen("k7/8/8/8/7P/8/8/7K w - h3 0 1").is_err()); assert!(parse_fen("k7/8/8/7P/8/8/8/7K b - h4 0 1").is_err()); assert!(parse_fen("8/8/8/6k1/7P/8/8/6RK b - h3 0 1").is_ok()); assert!(parse_fen("8/8/8/6k1/7P/8/8/7K b - h3 0 0").is_err()); } }
#[derive(Serialize,Deserialize,Debug)] pub struct CharacterBuilder { name: Option<String>, race: Option<Race>, class: Option<Class>, base_abilities: Option<AbilityScores>, } #[derive(Serialize,Deserialize,Debug)] pub struct AbilityValues<T> { strength: T, dexterity: T, constitution: T, intelligence: T, wisdom: T, charisma: T, }
//! Azure OAuth2 helper crate for the unofficial Microsoft Azure SDK for Rust. This crate is part of a collection of crates: for more information please refer to [https://github.com/azure/azure-sdk-for-rust](https://github.com/azure/azure-sdk-for-rust). //! This crate provides mechanisms for several ways to authenticate against Azure //! //! For example, to authenticate using the client credential flow, you can do the following: //! //! ```no_run //! use azure_identity::client_credentials_flow; //! use oauth2::{ClientId, ClientSecret}; //! use url::Url; //! //! use std::env; //! use std::error::Error; //! //! #[tokio::main] //! async fn main() -> Result<(), Box<dyn Error>> { //! let client_id = //! ClientId::new(env::var("CLIENT_ID").expect("Missing CLIENT_ID environment variable.")); //! let client_secret = ClientSecret::new( //! env::var("CLIENT_SECRET").expect("Missing CLIENT_SECRET environment variable."), //! ); //! let tenant_id = env::var("TENANT_ID").expect("Missing TENANT_ID environment variable."); //! let subscription_id = //! env::var("SUBSCRIPTION_ID").expect("Missing SUBSCRIPTION_ID environment variable."); //! //! let client = reqwest::Client::new(); //! // This will give you the final token to use in authorization. //! let token = client_credentials_flow::perform( //! client, //! &client_id, //! &client_secret, //! &["https://management.azure.com/"], //! &tenant_id, //! ) //! .await?; //! Ok(()) //! } //! ``` //! //! The supported authentication flows are: //! * [Authorization code flow](https://docs.microsoft.com/azure/active-directory/develop/v2-oauth2-auth-code-flow). //! * [Client credentials flow](https://docs.microsoft.com/azure/active-directory/develop/v2-oauth2-client-creds-grant-flow). //! * [Device code flow](https://docs.microsoft.com/azure/active-directory/develop/v2-oauth2-device-code). //! //! This crate also includes utilities for handling refresh tokens and accessing token credentials from many different sources. pub mod authorization_code_flow; pub mod client_credentials_flow; #[cfg(feature = "development")] pub mod development; pub mod device_code_flow; mod errors; pub use errors::Error; pub mod refresh_token; pub mod token_credentials; mod traits; pub use traits::{BearerToken, ExtExpiresIn, RefreshToken};
extern crate liquid; use liquid::LiquidOptions; use liquid::Renderable; use liquid::Context; use liquid::parse; use std::default::Default; macro_rules! compare { ($input:expr, $output:expr) => { let input = $input.replace("…", " "); let expected = $output.replace("…", " "); let options: LiquidOptions = Default::default(); let template = parse(&input, options).unwrap(); let mut data = Context::new(); let output = template.render(&mut data); assert_eq!(output.unwrap(), Some(expected)); } } #[test] pub fn no_whitespace_control() { compare!( " topic1 ……{% assign foo = \"bar\" %} ……{% if foo %} …………-……{{ foo }} ……{% endif %} ", " topic1 …… …… …………-……bar …… " ); } #[test] pub fn simple_whitespace_control() { compare!( " topic1 ……{% assign foo = \"bar\" -%} ……{% if foo -%} …………-……{{- foo }} ……{%- endif %} ", " topic1 ……-bar " ); } #[test] pub fn double_sided_whitespace_control() { compare!( " topic1 ……{%- assign foo = \"bar\" -%} ……-……{{- foo -}}…… ", " topic1-bar\ " ); }
use std::collections::HashMap; #[allow(dead_code)] /// the `KvStore` using a hashmap to store value in the memory pub struct KvStore { map: HashMap<String, String>, } impl KvStore { /// This method used to create a KvStore /// /// # Example /// /// ```rust /// use kvs::KvStore; /// /// let mut _kvstore = KvStore::new(); /// ``` pub fn new() -> KvStore { KvStore { map: HashMap::new(), } } /// This method used to set a new key-value pair, /// It can also be used to update the value of a key /// /// # Example /// /// ```rust /// use kvs::KvStore; /// /// let mut kvstore = KvStore::new(); /// kvstore.set("key01".to_owned(), "value01".to_owned()); /// assert_eq!(kvstore.get("key01".to_owned()), Some("value01".to_owned())); /// kvstore.set("key01".to_owned(), "value02".to_owned()); /// assert_eq!(kvstore.get("key01".to_owned()), Some("value02".to_owned())); /// ``` pub fn set(&mut self, key: String, value: String) { self.map.insert(key, value); } /// This method used to get a value of the key in the Option. /// Key not been set will return None /// /// # Example /// /// ```rust /// use kvs::KvStore; /// /// let mut kvstore = KvStore::new(); /// kvstore.set("key01".to_owned(), "value01".to_owned()); /// assert_eq!(kvstore.get("key01".to_owned()), Some("value01".to_owned())); /// assert_eq!(kvstore.get("key02".to_owned()), None); /// ``` pub fn get(&self, key: String) -> Option<String> { self.map.get(&key).cloned() } /// This method used to remove a key-value pair.alloc /// /// # Example /// /// ```rust /// use kvs::KvStore; /// /// let mut kvstore = KvStore::new(); /// kvstore.set("key01".to_owned(), "value01".to_owned()); /// assert_eq!(kvstore.get("key01".to_owned()), Some("value01".to_owned())); /// kvstore.remove("key01".to_owned()); /// assert_eq!(kvstore.get("key01".to_owned()), None); /// ``` pub fn remove(&mut self, key: String) { self.map.remove(&key); } }
#[doc = "Register `DMACRxCR` reader"] pub type R = crate::R<DMACRX_CR_SPEC>; #[doc = "Register `DMACRxCR` writer"] pub type W = crate::W<DMACRX_CR_SPEC>; #[doc = "Field `SR` reader - Start or Stop Receive Command"] pub type SR_R = crate::BitReader; #[doc = "Field `SR` writer - Start or Stop Receive Command"] pub type SR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `RBSZ` reader - Receive Buffer size"] pub type RBSZ_R = crate::FieldReader<u16>; #[doc = "Field `RBSZ` writer - Receive Buffer size"] pub type RBSZ_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 14, O, u16>; #[doc = "Field `RXPBL` reader - RXPBL"] pub type RXPBL_R = crate::FieldReader; #[doc = "Field `RXPBL` writer - RXPBL"] pub type RXPBL_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 6, O>; #[doc = "Field `RPF` reader - DMA Rx Channel Packet Flush"] pub type RPF_R = crate::BitReader; #[doc = "Field `RPF` writer - DMA Rx Channel Packet Flush"] pub type RPF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl R { #[doc = "Bit 0 - Start or Stop Receive Command"] #[inline(always)] pub fn sr(&self) -> SR_R { SR_R::new((self.bits & 1) != 0) } #[doc = "Bits 1:14 - Receive Buffer size"] #[inline(always)] pub fn rbsz(&self) -> RBSZ_R { RBSZ_R::new(((self.bits >> 1) & 0x3fff) as u16) } #[doc = "Bits 16:21 - RXPBL"] #[inline(always)] pub fn rxpbl(&self) -> RXPBL_R { RXPBL_R::new(((self.bits >> 16) & 0x3f) as u8) } #[doc = "Bit 31 - DMA Rx Channel Packet Flush"] #[inline(always)] pub fn rpf(&self) -> RPF_R { RPF_R::new(((self.bits >> 31) & 1) != 0) } } impl W { #[doc = "Bit 0 - Start or Stop Receive Command"] #[inline(always)] #[must_use] pub fn sr(&mut self) -> SR_W<DMACRX_CR_SPEC, 0> { SR_W::new(self) } #[doc = "Bits 1:14 - Receive Buffer size"] #[inline(always)] #[must_use] pub fn rbsz(&mut self) -> RBSZ_W<DMACRX_CR_SPEC, 1> { RBSZ_W::new(self) } #[doc = "Bits 16:21 - RXPBL"] #[inline(always)] #[must_use] pub fn rxpbl(&mut self) -> RXPBL_W<DMACRX_CR_SPEC, 16> { RXPBL_W::new(self) } #[doc = "Bit 31 - DMA Rx Channel Packet Flush"] #[inline(always)] #[must_use] pub fn rpf(&mut self) -> RPF_W<DMACRX_CR_SPEC, 31> { RPF_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "Channel receive control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dmacrx_cr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`dmacrx_cr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct DMACRX_CR_SPEC; impl crate::RegisterSpec for DMACRX_CR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`dmacrx_cr::R`](R) reader structure"] impl crate::Readable for DMACRX_CR_SPEC {} #[doc = "`write(|w| ..)` method takes [`dmacrx_cr::W`](W) writer structure"] impl crate::Writable for DMACRX_CR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets DMACRxCR to value 0"] impl crate::Resettable for DMACRX_CR_SPEC { const RESET_VALUE: Self::Ux = 0; }
// 使用pub修饰就可以消除未被使用的警告了 pub mod client; pub mod network; #[cfg(test)] mod tests { use super::client; #[test] fn it_works() { // 从跟模块开始 // ::client::connect(); // 或者直接使用super上移到当前模块的父模块 // super::client::connect(); // 或者直接使用use super::client client::connect(); assert_eq!(2 + 2, 4); } } // 最后总结一下模块文件系统的规则 // 1.如果一个叫foo的模块没有子模块,应该将foo的声明放入叫做foo.rs的文件中。 // 2.如果一个叫foo的模块有子模块,应该将foo的声明放入叫做foo/mod.rs的文件中。 // 私有性规则 // 1.如果一个项是公有的,它能被任何父模块访问 // 2.如果一个项是私有的,它能被其直接父模块及其任何子模块访问
//! This is an implementaiton of the GFF3 spec. //! //! https://github.com/The-Sequence-Ontology/Specifications/blob/master/gff3.md // todo: This implementation doesn't implement any of the string excaping rules. // todo: If and when it's extended to support that, consider using a wrapper // around string to enforce escaping // todo: Handle comments use std::fmt::Formatter; use std::fmt::Display; use std::collections::HashMap; use std::str::FromStr; // Fields use `String` rather than `&str` so that a record can live independently // of a parse. // // Howeer, it may make sense to refactor this. #[derive(Debug)] pub struct GffRecord { pub seq_id: String, pub source: String, pub feature_type: String, pub start: OneBased, pub end: OneBased, pub score: Score, pub strand: Strand, pub phase: Phase, pub attributes: Attributes, } impl FromStr for GffRecord { type Err = GffParseError; fn from_str(s: &str) -> Result<Self, Self::Err> { let mut columns = s.trim().split('\t'); let seq_id = columns.next() .ok_or_else(|| GffParseError::new(format!("No seqID column in {}", s))) .map(ToString::to_string)?; let source = columns.next() .ok_or_else(|| GffParseError::new(format!("No source columnin {}", s))) .map(ToString::to_string)?; let feature_type = columns.next() .ok_or_else(|| GffParseError::new(format!("No type column in {}", s))) .map(ToString::to_string)?; let start = columns.next() .ok_or_else(|| GffParseError::new(format!("No start column in {}", s))) .and_then(FromStr::from_str)?; let end = columns.next() .ok_or_else(|| GffParseError::new(format!("No seqID column in {}", s))) .and_then(FromStr::from_str)?; let score = columns.next() .ok_or_else(|| GffParseError::new(format!("No score column in {}", s))) .and_then(FromStr::from_str)?; let strand = columns.next() .ok_or_else(|| GffParseError::new(format!("No strand column in {}", s))) .and_then(FromStr::from_str)?; let phase = columns.next() .ok_or_else(|| GffParseError::new(format!("No phase column in {}", s))) .and_then(FromStr::from_str)?; let attributes = columns.next() .ok_or_else(|| GffParseError::new(format!("No attributes column in {}", s))) .and_then(FromStr::from_str)?; Ok(GffRecord { seq_id, source, feature_type, start, end, score, strand, phase, attributes }) } } // Index counted from 1 rather than 0 #[derive(Debug, PartialEq)] pub struct OneBased(u64); impl OneBased { pub fn new(at: u64) -> OneBased { OneBased(at) } pub fn at(&self) -> u64 { self.0 } } impl FromStr for OneBased { type Err = GffParseError; fn from_str(s: &str) -> Result<Self, Self::Err> { u64::from_str(s).map(OneBased).map_err(|e| GffParseError::because(s, e)) } } #[derive(Debug)] pub struct Score(Option<f64>); impl FromStr for Score { type Err = GffParseError; fn from_str(s: &str) -> Result<Self, Self::Err> { match s { "." => Ok(Score(None)), d => f64::from_str(d).map(|s| Score(Some(s))).map_err(|e| GffParseError::because(s, e)) } } } #[derive(Debug)] pub enum Strand { Positive, Negative, NoStrand, Unknown } impl FromStr for Strand { type Err = GffParseError; fn from_str(s: &str) -> Result<Self, Self::Err> { match s { "+" => Ok(Strand::Positive), "-" => Ok(Strand::Negative), "." => Ok(Strand::NoStrand), "?" => Ok(Strand::Unknown), e => Err(GffParseError::new(format!("Cannot parse `{}` as a strand", e))) } } } // 0, 1, 2 #[derive(Debug)] pub struct Phase(Option<u8>); impl FromStr for Phase { type Err = GffParseError; fn from_str(s: &str) -> Result<Self, Self::Err> { if s == "." { Ok(Phase(None)) } else { u8::from_str(s).map(|p| Phase(Some(p))).map_err(|e| GffParseError::because(s, e)) } } } #[derive(Debug)] pub struct Attributes(HashMap<String, String>); impl FromStr for Attributes { type Err = GffParseError; fn from_str(s: &str) -> Result<Self, Self::Err> { let tvs: HashMap<String, String> = s.split(';').flat_map(|p| { let mut tv = p.split('='); match (tv.next(), tv.next()) { (Some(t), Some(v)) => Ok((t.to_string(), v.to_string())), _ => Err( GffParseError::new( format!("Expected <tag>=<value> but got: {}", p))), } } ).collect(); Ok(Attributes(tvs)) } } #[derive(Debug)] pub struct GffParseError(String); impl GffParseError { pub fn new(msg: String) -> GffParseError { GffParseError(msg) } pub fn because<E : Display>(msg: &str, e: E) -> GffParseError { GffParseError(format!("in input `{}` because {}", msg, e)) } } impl Display for GffParseError { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { write!(f, "Unable to parse GFF3 record because: {}", self.0) } }
use num_derive::FromPrimitive; use num_enum::IntoPrimitive; use num_traits::FromPrimitive; use bytes::{ Bytes, Buf, BytesMut, BufMut }; #[derive(FromPrimitive, IntoPrimitive, Debug, PartialEq, Copy, Clone)] #[repr(u8)] pub enum Status { LearnReady = 0x1, NodeFound = 0x2, AddingSlave = 0x3, AddingController = 0x4, ProtocolDone = 0x5, Done = 0x6, Failed = 0x7, } #[derive(Debug, PartialEq, Clone)] pub struct ProtocolDoneData { pub node_id: u8, } impl ProtocolDoneData { pub fn encode(&self, dst: &mut BytesMut) { dst.put_u8(self.node_id); } pub fn decode(src: &mut Bytes) -> ProtocolDoneData { let node_id = src.get_u8(); ProtocolDoneData { node_id } } } #[derive(Debug, PartialEq, Clone)] pub enum AddNodeToNetworkRequestChip { LearnReady, NodeFound, AddingController, AddingSlave, ProtocolDone(ProtocolDoneData), Done, Failed, } impl AddNodeToNetworkRequestChip { pub fn encode(&self, dst: &mut BytesMut) { dst.put_u8(0x0); // Unknown match self { AddNodeToNetworkRequestChip::LearnReady => { dst.put_u8(Status::LearnReady.into()); }, AddNodeToNetworkRequestChip::NodeFound => { dst.put_u8(Status::NodeFound.into()); }, AddNodeToNetworkRequestChip::AddingController => { dst.put_u8(Status::AddingController.into()); }, AddNodeToNetworkRequestChip::AddingSlave => { dst.put_u8(Status::AddingSlave.into()); }, AddNodeToNetworkRequestChip::ProtocolDone( data ) => { dst.put_u8(Status::ProtocolDone.into()); data.encode(dst); }, AddNodeToNetworkRequestChip::Done => { dst.put_u8(Status::Done.into()); }, AddNodeToNetworkRequestChip::Failed => { dst.put_u8(Status::Failed.into()); }, } } pub fn decode(src: &mut Bytes) -> AddNodeToNetworkRequestChip { src.advance(1); // skip let status : Status = FromPrimitive::from_u8(src.get_u8()).unwrap(); match status { Status::LearnReady => AddNodeToNetworkRequestChip::LearnReady, Status::NodeFound => AddNodeToNetworkRequestChip::NodeFound, Status::AddingController => AddNodeToNetworkRequestChip::AddingController, Status::AddingSlave => AddNodeToNetworkRequestChip::AddingSlave, Status::ProtocolDone => AddNodeToNetworkRequestChip::ProtocolDone(ProtocolDoneData::decode(src)), Status::Done => AddNodeToNetworkRequestChip::Done, Status::Failed => AddNodeToNetworkRequestChip::Failed, } } }
use std::{ env, process::{exit, Command}, }; fn main() { let mut args = env::args().peekable(); args.next(); let prog_name = match args.peek() { Some(opt) if opt == "--gui" => { args.next(); "nvim-qt" } _ => "nvim", }; let res = Command::new(prog_name) .args(args) .env_remove("LC_CTYPE") .spawn() .expect("failed to spawn binary") .wait() .expect("failed running binary"); if !res.success() { if let Some(code) = res.code() { exit(code); } } }
#![warn(clippy::all)] use actix_web::{App, HttpServer}; use dotenv::dotenv; use tracing::{error, info, Level}; use tracing_error::ErrorLayer; use tracing_subscriber::prelude::*; extern crate vaas_server; use std::env; use vaas_server::{db, server}; #[actix_rt::main] async fn main() -> std::io::Result<()> { dotenv().ok(); let database_url = env::var("DATABASE_URL") .expect("DATABASE_URL must be set. e.g: postgres://postgres:postgres@localhost"); let pool = db::new_pool(&database_url).await.unwrap(); server::register_db_actor(pool); server::register_system_actors(); // Global tracing subscriber let subscriber = tracing_subscriber::fmt() .with_max_level(Level::DEBUG) .finish() .with(ErrorLayer::default()); tracing::subscriber::set_global_default(subscriber).unwrap(); if let Err(err) = color_eyre::install() { error!("Failed to install eyre {:#?}", err); } let address = env::var("ADDRESS").unwrap_or_else(|_| "127.0.0.1:8080".to_string()); // Create Http server with websocket support info!("Starting server on {}", address); HttpServer::new(move || App::new().configure(|app| server::configure(app))) .bind(address)? .run() .await }
//! Highest Response Ratio Next use keyed_priority_queue::KeyedPriorityQueue; use crate::scheduling::{Os, PId, Scheduler}; /// In this scheduling, processes with highest response ratio is scheduled. /// This algorithm avoids starvation. /// Mode: Non-Preemptive /// `Response Ratio = (Waiting Time + Burst time) / Burst time` #[derive(Default, Clone)] pub struct HighestResponseRatioNextScheduler { ready_queue: KeyedPriorityQueue<PId, u64> } impl HighestResponseRatioNextScheduler { pub fn new() -> Self { Self::default() } } impl Scheduler for HighestResponseRatioNextScheduler { fn on_process_ready(&mut self, os: &mut Os, pid: usize) { if let Some(process) = os.get_process(pid) { self.ready_queue.push(pid, process.job.response_ratio()); } } fn switch_process(&mut self, os: &mut Os) { os.switch_process(self.ready_queue.pop().map(|(pid, _)| pid)); } fn desc(&self) -> &'static str { "Highest Response Ratio Next; Non-Preemptive; for Job" } }
// Copyright 2017 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #![deny(warnings)] extern crate failure; extern crate fidl; extern crate fuchsia_app as component; extern crate fuchsia_async as async; extern crate fuchsia_zircon as zx; extern crate futures; extern crate fidl_fuchsia_net_oldhttp as http; use failure::{Error, ResultExt}; use futures::prelude::*; use futures::io::AllowStdIo; fn print_headers(resp: &http::UrlResponse) { println!(">>> Headers <<<"); if let Some(status) = &resp.status_line { println!(" {}", status); } if let Some(hdrs) = &resp.headers { for hdr in hdrs { println!(" {}={}", hdr.name, hdr.value); } } } fn main() { if let Err(e) = main_res() { println!("Error: {:?}", e); } } /// Connects to the http service, sends a url request, and prints the response. fn main_res() -> Result<(), Error> { let url = match std::env::args().nth(1) { Some(url) => { if url.find("://").is_none() { ["http://", &url].concat() } else { url } } None => { println!("usage: {} <url>", std::env::args().nth(0).unwrap()); return Ok(()); } }; // Set up async executor let mut exec = async::Executor::new()?; // Connect to the http service let net = component::client::connect_to_service::<http::HttpServiceMarker>()?; // Create a UrlLoader instance let (s, p) = zx::Channel::create().context("failed to create zx channel")?; let proxy = async::Channel::from_channel(p).context("failed to make async channel")?; let loader_server = fidl::endpoints2::ServerEnd::<http::UrlLoaderMarker>::new(s); net.create_url_loader(loader_server)?; // Send the UrlRequest to fetch the webpage let mut req = http::UrlRequest { url: url, method: String::from("GET"), headers: None, body: None, response_body_buffer_size: 0, auto_follow_redirects: true, cache_mode: http::CacheMode::Default, response_body_mode: http::ResponseBodyMode::Stream, }; let loader_proxy = http::UrlLoaderProxy::new(proxy); let fut = loader_proxy.start(&mut req).err_into().and_then(|resp| { if let Some(e) = resp.error { let code = e.code; println!("Got error: {} ({})", code, e.description.unwrap_or("".into())); return None; } print_headers(&resp); match resp.body.map(|x| *x) { Some(http::UrlBody::Stream(s)) => { Some(async::Socket::from_socket(s) .into_future() .err_into()) } Some(http::UrlBody::Buffer(_)) | Some(http::UrlBody::SizedBuffer(_)) | None => None, } }).and_then(|socket_opt| { socket_opt.map(|socket| { // stdout is blocking, but we'll pretend it's okay println!(">>> Body <<<"); // Copy the bytes from the socket to stdout socket.copy_into(AllowStdIo::new(::std::io::stdout())) .map(|_| println!("\n>>> EOF <<<")) .err_into() }) }).map(|_| ()); //// Run the future to completion exec.run_singlethreaded(fut) }
use std::convert::TryInto; use sdl2::{pixels::Color, render::*, video::{self, WindowContext}, VideoSubsystem}; pub struct SdlContext { pub ttf_context: sdl2::ttf::Sdl2TtfContext, pub canvas: Canvas<video::Window>, pub event_pump: sdl2::EventPump, pub texture_creator: TextureCreator<WindowContext>, pub video: VideoSubsystem, } pub fn setup_sdl(width: usize, height: usize) -> Result<SdlContext, String> { let sdl_context = sdl2::init()?; let ttf_context = sdl2::ttf::init().map_err(|e| e.to_string())?; let video = sdl_context .video()?; let sdl_window = video .window("Lith", width as u32, height as u32) // .opengl() .resizable() .build() .unwrap(); let canvas: Canvas<video::Window> = sdl_window .into_canvas() .present_vsync() .build() .unwrap(); let event_pump = sdl_context.event_pump()?; let texture_creator = canvas.texture_creator(); Ok(SdlContext { ttf_context, canvas, event_pump, texture_creator, video, }) } pub fn draw_font_texture(texture_creator: &TextureCreator<WindowContext>, ttf_context: sdl2::ttf::Sdl2TtfContext) -> Result<(Texture<>, usize, usize), String> { let font_path = "/Users/jimmyhmiller/Library/Fonts/Pico-8 mono.ttf"; let font = ttf_context.load_font(font_path, 12)?; let mut text = String::new(); for i in 33..127 { text.push(i as u8 as char); } let surface = font .render(text.as_str()) // This needs to be 255 if I want to change colors .blended(Color::RGBA(255, 255, 255, 255)) .map_err(|e| e.to_string())?; let texture = texture_creator .create_texture_from_surface(&surface) .map_err(|e| e.to_string())?; let TextureQuery { width, height, .. } = texture.query(); let width = (width / text.len() as u32).try_into().unwrap(); Ok((texture, width, height.try_into().unwrap())) }
use crate::client::cover_traffic_stream::LoopCoverTrafficStream; use crate::client::mix_traffic::{MixMessageReceiver, MixMessageSender, MixTrafficController}; use crate::client::provider_poller::{PolledMessagesReceiver, PolledMessagesSender}; use crate::client::received_buffer::{ ReceivedBufferRequestReceiver, ReceivedBufferRequestSender, ReceivedMessagesBufferController, }; use crate::client::topology_control::{ TopologyAccessor, TopologyRefresher, TopologyRefresherConfig, }; use crate::config::persistence::pathfinder::ClientPathfinder; use crate::config::{Config, SocketType}; use crate::sockets::{tcp, websocket}; use crypto::identity::MixIdentityKeyPair; use directory_client::presence; use futures::channel::mpsc; use log::*; use pemstore::pemstore::PemStore; use sfw_provider_requests::AuthToken; use sphinx::route::Destination; use std::net::SocketAddr; use tokio::runtime::Runtime; use topology::NymTopology; mod cover_traffic_stream; mod mix_traffic; mod provider_poller; mod real_traffic_stream; pub(crate) mod received_buffer; pub(crate) mod topology_control; pub(crate) type InputMessageSender = mpsc::UnboundedSender<InputMessage>; pub(crate) type InputMessageReceiver = mpsc::UnboundedReceiver<InputMessage>; pub struct NymClient { config: Config, runtime: Runtime, identity_keypair: MixIdentityKeyPair, // to be used by "send" function or socket, etc input_tx: Option<InputMessageSender>, } #[derive(Debug)] // TODO: make fields private pub(crate) struct InputMessage(pub Destination, pub Vec<u8>); impl NymClient { fn load_identity_keys(config_file: &Config) -> MixIdentityKeyPair { let identity_keypair = PemStore::new(ClientPathfinder::new_from_config(&config_file)) .read_identity() .expect("Failed to read stored identity key files"); println!( "Public identity key: {}\n", identity_keypair.public_key.to_base58_string() ); identity_keypair } pub fn new(config: Config) -> Self { let identity_keypair = Self::load_identity_keys(&config); NymClient { runtime: Runtime::new().unwrap(), config, identity_keypair, input_tx: None, } } pub fn as_mix_destination(&self) -> Destination { Destination::new( self.identity_keypair.public_key().derive_address(), // TODO: what with SURBs? Default::default(), ) } async fn get_provider_socket_address<T: NymTopology>( provider_id: String, mut topology_accessor: TopologyAccessor<T>, ) -> SocketAddr { topology_accessor.get_current_topology_clone().await.as_ref().expect("The current network topology is empty - are you using correct directory server?") .providers() .iter() .find(|provider| provider.pub_key == provider_id) .unwrap_or_else( || panic!("Could not find provider with id {:?} - are you sure it is still online? Perhaps try to run `nym-client init` again to obtain a new provider", provider_id)) .client_listener } // future constantly pumping loop cover traffic at some specified average rate // the pumped traffic goes to the MixTrafficController fn start_cover_traffic_stream<T: 'static + NymTopology>( &self, topology_accessor: TopologyAccessor<T>, mix_tx: MixMessageSender, ) { info!("Starting loop cover traffic stream..."); // we need to explicitly enter runtime due to "next_delay: time::delay_for(Default::default())" // set in the constructor which HAS TO be called within context of a tokio runtime self.runtime .enter(|| { LoopCoverTrafficStream::new( mix_tx, self.as_mix_destination(), topology_accessor, self.config.get_loop_cover_traffic_average_delay(), self.config.get_average_packet_delay(), ) }) .start(self.runtime.handle()); } fn start_real_traffic_stream<T: 'static + NymTopology>( &self, topology_accessor: TopologyAccessor<T>, mix_tx: MixMessageSender, input_rx: InputMessageReceiver, ) { info!("Starting real traffic stream..."); // we need to explicitly enter runtime due to "next_delay: time::delay_for(Default::default())" // set in the constructor which HAS TO be called within context of a tokio runtime self.runtime .enter(|| { real_traffic_stream::OutQueueControl::new( mix_tx, input_rx, self.as_mix_destination(), topology_accessor, self.config.get_average_packet_delay(), self.config.get_message_sending_average_delay(), ) }) .start(self.runtime.handle()); } // buffer controlling all messages fetched from provider // required so that other components would be able to use them (say the websocket) fn start_received_messages_buffer_controller( &self, query_receiver: ReceivedBufferRequestReceiver, poller_receiver: PolledMessagesReceiver, ) { info!("Starting 'received messages buffer controller'..."); ReceivedMessagesBufferController::new(query_receiver, poller_receiver) .start(self.runtime.handle()) } // future constantly trying to fetch any received messages from the provider // the received messages are sent to ReceivedMessagesBuffer to be available to rest of the system fn start_provider_poller<T: NymTopology>( &mut self, topology_accessor: TopologyAccessor<T>, poller_input_tx: PolledMessagesSender, ) { info!("Starting provider poller..."); // we already have our provider written in the config let provider_id = self.config.get_provider_id(); let provider_client_listener_address = self.runtime.block_on( Self::get_provider_socket_address(provider_id, topology_accessor), ); let mut provider_poller = provider_poller::ProviderPoller::new( poller_input_tx, provider_client_listener_address, self.identity_keypair.public_key().derive_address(), self.config .get_provider_auth_token() .map(|str_token| AuthToken::try_from_base58_string(str_token).ok()) .unwrap_or(None), self.config.get_fetch_message_delay(), ); if !provider_poller.is_registered() { info!("Trying to perform initial provider registration..."); self.runtime .block_on(provider_poller.perform_initial_registration()) .expect("Failed to perform initial provider registration"); } provider_poller.start(self.runtime.handle()); } // future responsible for periodically polling directory server and updating // the current global view of topology fn start_topology_refresher<T: 'static + NymTopology>( &mut self, topology_accessor: TopologyAccessor<T>, ) { let healthcheck_keys = MixIdentityKeyPair::new(); let topology_refresher_config = TopologyRefresherConfig::new( self.config.get_directory_server(), self.config.get_topology_refresh_rate(), healthcheck_keys, self.config.get_topology_resolution_timeout(), self.config.get_number_of_healthcheck_test_packets() as usize, self.config.get_node_score_threshold(), ); let mut topology_refresher = TopologyRefresher::new(topology_refresher_config, topology_accessor); // before returning, block entire runtime to refresh the current network view so that any // components depending on topology would see a non-empty view info!( "Obtaining initial network topology from {}", self.config.get_directory_server() ); self.runtime.block_on(topology_refresher.refresh()); info!("Starting topology refresher..."); topology_refresher.start(self.runtime.handle()); } // controller for sending sphinx packets to mixnet (either real traffic or cover traffic) fn start_mix_traffic_controller(&mut self, mix_rx: MixMessageReceiver) { info!("Starting mix trafic controller..."); self.runtime .enter(|| { MixTrafficController::new( self.config.get_packet_forwarding_initial_backoff(), self.config.get_packet_forwarding_maximum_backoff(), mix_rx, ) }) .start(self.runtime.handle()); } fn start_socket_listener<T: 'static + NymTopology>( &self, topology_accessor: TopologyAccessor<T>, received_messages_buffer_output_tx: ReceivedBufferRequestSender, input_tx: InputMessageSender, ) { match self.config.get_socket_type() { SocketType::WebSocket => { websocket::listener::run( self.runtime.handle(), self.config.get_listening_port(), input_tx, received_messages_buffer_output_tx, self.identity_keypair.public_key().derive_address(), topology_accessor, ); } SocketType::TCP => { tcp::start_tcpsocket( self.runtime.handle(), self.config.get_listening_port(), input_tx, received_messages_buffer_output_tx, self.identity_keypair.public_key().derive_address(), topology_accessor, ); } SocketType::None => (), } } /// EXPERIMENTAL DIRECT RUST API /// It's entirely untested and there are absolutely no guarantees about it pub fn send_message(&self, destination: Destination, message: Vec<u8>) { self.input_tx .as_ref() .expect("start method was not called before!") .unbounded_send(InputMessage(destination, message)) .unwrap() } /// blocking version of `start` method. Will run forever (or until SIGINT is sent) pub fn run_forever(&mut self) { self.start(); if let Err(e) = self.runtime.block_on(tokio::signal::ctrl_c()) { error!( "There was an error while capturing SIGINT - {:?}. We will terminate regardless", e ); } println!( "Received SIGINT - the mixnode will terminate now (threads are not YET nicely stopped)" ); } pub fn start(&mut self) { info!("Starting nym client"); // channels for inter-component communication // mix_tx is the transmitter for any component generating sphinx packets that are to be sent to the mixnet // they are used by cover traffic stream and real traffic stream // mix_rx is the receiver used by MixTrafficController that sends the actual traffic let (mix_tx, mix_rx) = mpsc::unbounded(); // poller_input_tx is the transmitter of messages fetched from the provider - used by ProviderPoller // poller_input_rx is the receiver for said messages - used by ReceivedMessagesBuffer let (poller_input_tx, poller_input_rx) = mpsc::unbounded(); // received_messages_buffer_output_tx is the transmitter for *REQUESTS* for messages contained in ReceivedMessagesBuffer - used by sockets // the requests contain a oneshot channel to send a reply on // received_messages_buffer_output_rx is the received for the said requests - used by ReceivedMessagesBuffer let (received_messages_buffer_output_tx, received_messages_buffer_output_rx) = mpsc::unbounded(); // channels responsible for controlling real messages let (input_tx, input_rx) = mpsc::unbounded::<InputMessage>(); // TODO: when we switch to our graph topology, we need to remember to change 'presence::Topology' type let shared_topology_accessor = TopologyAccessor::<presence::Topology>::new(); // the components are started in very specific order. Unless you know what you are doing, // do not change that. self.start_topology_refresher(shared_topology_accessor.clone()); self.start_received_messages_buffer_controller( received_messages_buffer_output_rx, poller_input_rx, ); self.start_provider_poller(shared_topology_accessor.clone(), poller_input_tx); self.start_mix_traffic_controller(mix_rx); self.start_cover_traffic_stream(shared_topology_accessor.clone(), mix_tx.clone()); self.start_real_traffic_stream(shared_topology_accessor.clone(), mix_tx, input_rx); self.start_socket_listener( shared_topology_accessor, received_messages_buffer_output_tx, input_tx.clone(), ); self.input_tx = Some(input_tx); } }
use std::str::FromStr; fn solve(input: &mut [isize], part_two: bool) { let mut line = 0isize; let mut steps = 0; loop { steps += 1; { let jump = &mut input[line as usize]; line += *jump; if part_two && *jump >= 3 { *jump -= 1; } else { *jump += 1; } } if line < 0 || line as usize >= input.len() { break; } } println!("{}", steps); } #[allow(unused)] pub fn run() { let input = include_str!("../input/5"); let mut input: Vec<_> = input.lines().map(|l| isize::from_str(l).unwrap()).collect(); let mut input_2 = input.clone(); solve(&mut input, false); solve(&mut input_2, true); }
use std::str::FromStr; pub fn day_1_input() -> Vec<i64> { include_str!("../resources/day01part01.txt") .lines() .map(|s| i64::from_str(&s).unwrap()) .collect() } pub fn day_2_input() -> Vec<String> { include_str!("../resources/day02part01.txt") .lines() .map(|s| s.to_owned()) .collect() } pub fn day_3_input() -> Vec<String> { include_str!("../resources/day03part01.txt") .lines() .map(|s| s.to_owned()) .collect() } pub fn day_4_input() -> &'static str { include_str!("../resources/day04part01.txt") } pub fn day_5_input() -> &'static str { include_str!("../resources/day05part01.txt") }
use crate::schema::users; use crate::schema::papers; use chrono::NaiveDateTime; #[derive(Debug, Queryable)] pub struct Paper { pub paper_id: i32, pub paper_title: String, pub paper_author: String, pub paper_year: i32, pub user_id: i32, pub created_at: NaiveDateTime, pub updated_at: NaiveDateTime, } #[derive(Debug, Insertable, Deserialize)] #[table_name = "papers"] pub struct NewPaper { pub paper_title: String, pub paper_author: String, pub paper_year: i32, pub user_id: i32, } #[derive(Debug, Queryable)] pub struct User { pub user_id: i32, pub user_name: String, pub user_email: String, pub created_at: NaiveDateTime, pub updated_at: NaiveDateTime, } #[derive(Insertable)] #[table_name = "users"] pub struct NewUser { pub user_name: String, pub user_email: String, }
use bson::{doc, Document, Timestamp}; use serde::Deserialize; use crate::{ client::ClusterTime, cmap::{RawCommandResponse, StreamDescription}, error::{Result, TRANSIENT_TRANSACTION_ERROR}, operation::{CommandErrorBody, CommandResponse, Operation}, options::{ReadPreference, SelectionCriteria}, }; pub(crate) fn handle_response_test<T: Operation>(op: &T, response_doc: Document) -> Result<T::O> { let raw = RawCommandResponse::with_document(response_doc).unwrap(); op.handle_response(raw, &StreamDescription::new_testing()) } pub(crate) fn op_selection_criteria<F, T>(constructor: F) where T: Operation, F: Fn(Option<SelectionCriteria>) -> T, { let op = constructor(None); assert_eq!(op.selection_criteria(), None); let read_pref: SelectionCriteria = ReadPreference::Secondary { options: Default::default(), } .into(); let op = constructor(Some(read_pref.clone())); assert_eq!(op.selection_criteria(), Some(&read_pref)); } #[test] fn response_success() { let cluster_timestamp = Timestamp { time: 123, increment: 345, }; let doc = doc! { "ok": 1, "some": "field", "other": true, "$clusterTime": { "clusterTime": cluster_timestamp, "signature": {} } }; let raw = RawCommandResponse::with_document(doc.clone()).unwrap(); let response: CommandResponse<Document> = raw.body().unwrap(); assert!(response.is_success()); assert_eq!( response.cluster_time(), Some(&ClusterTime { cluster_time: cluster_timestamp, signature: doc! {}, }) ); assert_eq!(response.body, doc! { "some": "field", "other": true }); #[derive(Deserialize, Debug, PartialEq)] struct Body { some: String, #[serde(rename = "other")] o: bool, #[serde(default)] default: Option<i32>, } let raw = RawCommandResponse::with_document(doc).unwrap(); let response: CommandResponse<Body> = raw.body().unwrap(); assert!(response.is_success()); assert_eq!( response.cluster_time(), Some(&ClusterTime { cluster_time: cluster_timestamp, signature: doc! {}, }) ); assert_eq!( response.body, Body { some: "field".to_string(), o: true, default: None, } ); } #[test] fn response_failure() { let cluster_timestamp = Timestamp { time: 123, increment: 345, }; let doc = doc! { "ok": 0, "code": 123, "codeName": "name", "errmsg": "some message", "errorLabels": [TRANSIENT_TRANSACTION_ERROR], "$clusterTime": { "clusterTime": cluster_timestamp, "signature": {} } }; let raw = RawCommandResponse::with_document(doc.clone()).unwrap(); let response: CommandResponse<Document> = raw.body().unwrap(); assert!(!response.is_success()); assert_eq!( response.cluster_time(), Some(&ClusterTime { cluster_time: cluster_timestamp, signature: doc! {}, }) ); assert_eq!( response.body, doc! { "code": 123, "codeName": "name", "errmsg": "some message", "errorLabels": [TRANSIENT_TRANSACTION_ERROR], } ); let raw = RawCommandResponse::with_document(doc).unwrap(); let response: CommandResponse<CommandErrorBody> = raw.body().unwrap(); assert!(!response.is_success()); assert_eq!( response.cluster_time(), Some(&ClusterTime { cluster_time: cluster_timestamp, signature: doc! {}, }) ); let command_error = response.body; assert_eq!(command_error.command_error.code, 123); assert_eq!(command_error.command_error.code_name, "name"); assert_eq!(command_error.command_error.message, "some message"); assert_eq!( command_error.error_labels, Some(vec![TRANSIENT_TRANSACTION_ERROR.to_string()]) ); }
pub mod bitwarden; use std::error::Error; pub trait Authenticator { fn new(master_password: &str) -> Result<Self, Box<dyn Error>> where Self: Sized; fn get(&self, hostname: &str, user: &str) -> Result<&str, Box<dyn Error>>; }
// Copyright (c) The Starcoin Core Contributors // SPDX-License-Identifier: Apache-2.0 use crate::error::AccountServiceError; use crate::{Wallet, WalletAccount}; use starcoin_types::account_address::AccountAddress; use starcoin_types::transaction::{RawUserTransaction, SignedUserTransaction}; pub type ServiceResult<T> = std::result::Result<T, AccountServiceError>; pub trait WalletService: Wallet {} #[async_trait::async_trait] pub trait WalletAsyncService: Clone + std::marker::Unpin + Send + Sync { async fn create_account(self, password: String) -> ServiceResult<WalletAccount>; async fn get_default_account(self) -> ServiceResult<Option<WalletAccount>>; async fn get_accounts(self) -> ServiceResult<Vec<WalletAccount>>; async fn get_account(self, address: AccountAddress) -> ServiceResult<Option<WalletAccount>>; async fn sign_txn( self, raw_txn: RawUserTransaction, signer_address: AccountAddress, ) -> ServiceResult<SignedUserTransaction>; async fn unlock_account( self, address: AccountAddress, password: String, duration: std::time::Duration, ) -> ServiceResult<()>; async fn import_account( self, address: AccountAddress, private_key: Vec<u8>, password: String, ) -> ServiceResult<WalletAccount>; /// Return the private key as bytes for `address` async fn export_account( self, address: AccountAddress, password: String, ) -> ServiceResult<Vec<u8>>; }
use std::{io, num::ParseIntError}; use grid::Grid; use problem::{Problem, ProblemInput, solve}; #[derive(Clone, Copy, Debug)] struct Transform { rotation: u8, reflection: bool, } impl Transform { fn combine(&self, other: &Self) -> Self { if !self.reflection { Self { rotation: (self.rotation + other.rotation) % 4, reflection: other.reflection, } } else { Self { rotation: (self.rotation + 4 - other.rotation) % 4, reflection: self.reflection != other.reflection, } } } fn transform(&self, x: i32, y: i32) -> (i32, i32) { if !self.reflection { let tx = [x, -y, -x, y]; let ty = [y, x, -y, -x]; (tx[self.rotation as usize], ty[self.rotation as usize]) } else { let tx = [y, x, -y, -x]; let ty = [x, -y, -x, y]; (tx[self.rotation as usize], ty[self.rotation as usize]) } } } #[derive(Debug)] struct Tile { id: u64, sides: [u16; 4], inner: Grid<bool>, } impl Tile { fn reversed_sides(&self) -> [u16; 4] { [ self.sides[1].reverse_bits() >> 6, self.sides[0].reverse_bits() >> 6, self.sides[3].reverse_bits() >> 6, self.sides[2].reverse_bits() >> 6, ] } } struct Input { tiles: Vec<Tile>, } #[derive(Debug)] enum ParseInputError { ParseIntError(ParseIntError), IoError(io::Error), MissingTileLine(usize), } impl From<ParseIntError> for ParseInputError { fn from(e: ParseIntError) -> Self { Self::ParseIntError(e) } } impl From<io::Error> for ParseInputError { fn from(e: io::Error) -> Self { Self::IoError(e) } } impl ProblemInput for Input { type Error = ParseInputError; fn parse<R: io::BufRead>(reader: R) -> Result<Self, Self::Error> { let mut lines = reader.lines(); let mut tiles = Vec::new(); while let Some(line) = lines.next() { let id = line?[5..9].parse()?; let mut grid = Grid::new(10, 10); for y in (0..10).rev() { let line = lines.next().ok_or(ParseInputError::MissingTileLine(y))??; for (x, c) in line.chars().enumerate() { *grid.get_mut(x as i32, y as i32) = c == '#'; } } let mut sides = [0, 0, 0, 0]; for i in 0..10 { if *grid.get(9, i) { sides[0] |= 1 << i; } if *grid.get(9 - i, 9) { sides[1] |= 1 << i; } if *grid.get(0, 9 - i) { sides[2] |= 1 << i; } if *grid.get(i, 0) { sides[3] |= 1 << i; } } tiles.push(Tile { id, sides, inner: grid.slice(1, 1, 8, 8), }); lines.next(); } Ok(Self { tiles }) } } fn reconstruct_image(tiles: &Vec<Tile>) -> Option<(Grid<bool>, Grid<u64>)> { let size = (tiles.len() as f32).sqrt().floor() as usize; let mut neighbor_transforms = vec![[None, None, None, None]; tiles.len()]; for (i, tile) in tiles.iter().enumerate() { for (s, side) in tile.sides.iter().enumerate() { let pair = side.reverse_bits() >> 6; for (j, other) in tiles.iter().enumerate().filter(|&(j, _)| j != i) { if let Some(r) = other.sides.iter().position(|&s| s == pair) { neighbor_transforms[i][s] = Some((j, Transform { rotation: (s as u8 + 4 - r as u8 + 2) % 4, reflection: false })); break; } else if let Some(r) = other.reversed_sides().iter().position(|&s| s == pair) { neighbor_transforms[i][s] = Some((j, Transform { rotation: (4 - s as u8 + r as u8 + 2) % 4, reflection: true })); break; } } } } if let Some(corner) = neighbor_transforms.iter().position(|ns| ns.iter().filter(|n| n.is_none()).count() == 2) { if let Some(rotation) = match &neighbor_transforms[corner] { [_, _, None, None] => Some(0), [_, None, None, _] => Some(1), [None, None, _, _] => Some(2), [None, _, _, None] => Some(3), _ => None, } { let mut result_image = Grid::new(size * 8, size * 8); let mut result_ids = Grid::new(size, size); let mut queue = vec![((0, 0), corner, Transform { rotation, reflection: false })]; while let Some(((x, y), index, tile_to_world)) = queue.pop() { if *result_ids.get(x, y) == 0 { let mut image = tiles[index].inner.clone(); match tile_to_world.rotation { 0 => (), 1 => image.rotate_ccw(), 2 => image.rotate_half(), 3 => image.rotate_cw(), _ => unreachable!(), } if tile_to_world.reflection { image.flip_vert(); image.rotate_ccw(); } result_image.blit(x * 8, y * 8, &image); *result_ids.get_mut(x, y) = tiles[index].id; const DX: [i32; 4] = [1, 0, -1, 0]; const DY: [i32; 4] = [0, 1, 0, -1]; for d in 0..4 { if let Some((nindex, neighbor_to_normal)) = &neighbor_transforms[index][d] { let (dx, dy) = tile_to_world.transform(DX[d], DY[d]); let target = neighbor_to_normal.combine(&tile_to_world); queue.push(((x + dx, y + dy), *nindex, target)); } } } } Some((result_image, result_ids)) } else { None } } else { None } } fn check_pattern(grid: &Grid<bool>, x: i32, y: i32, pattern: &Grid<bool>) -> bool { for (px, py) in pattern.enumerate() { if *pattern.get(px, py) && !grid.get(x + px, y + py) { return false; } } true } struct Day20; impl Problem for Day20 { type Input = Input; type Part1Output = u64; type Part2Output = usize; type Error = (); fn part_1(input: &Self::Input) -> Result<Self::Part1Output, Self::Error> { if let Some((_, ids)) = reconstruct_image(&input.tiles) { Ok( ids.get(0, 0) * ids.get(ids.width() as i32 - 1, 0) * ids.get(0, ids.height() as i32 - 1) * ids.get(ids.width() as i32 - 1, ids.height() as i32 - 1) ) } else { Err(()) } } fn part_2(input: &Self::Input) -> Result<Self::Part2Output, Self::Error> { if let Some((mut image, _)) = reconstruct_image(&input.tiles) { const LINES: [&'static str; 3] = [ " # ", "# ## ## ###", " # # # # # # ", ]; let pattern = Grid::new_with(20, 3, |x, y| LINES[2 - y as usize].chars().nth(x as usize).unwrap() == '#'); let mut pattern_count = 0; 'outer: for _ in 0..2 { for _ in 0..4 { for x in 0..image.width() - pattern.width() { for y in 0..image.height() - pattern.height() { if check_pattern(&image, x as i32, y as i32, &pattern) { pattern_count += 1; } } } if pattern_count > 0 { break 'outer; } image.rotate_ccw(); } image.flip_vert(); } Ok(image.enumerate().filter(|&(x, y)| *image.get(x, y)).count() - pattern_count * 15) } else { Err(()) } } } fn main() { solve::<Day20>("input").unwrap(); }
extern crate green; extern crate rustuv; use std::comm::{sync_channel, Receiver, Sender}; use tcp::{start_tcp_handler, WorkerProcSender}; use tcp::{TcpEvent, ConnCreat, Read, Write, ConnClose}; use std::collections::HashMap; use std::sync::{Arc, Mutex}; #[start] fn start(argc: int, argv: *const *const u8) -> int { green::start(argc, argv, rustuv::event_loop, start_irc) } pub struct IRCConfig { channels: Arc<Mutex<HashMap<String, Channel>>>, users: Arc<Mutex<HashMap<String, Sender<TcpEvent>>>> } pub enum Destination { DestChan( Channel ), DestUser( User ) } impl IRCConfig { fn new() -> IRCConfig { IRCConfig { channels: Arc::new(Mutex::new(HashMap::new())), users: Arc::new(Mutex::new(HashMap::new())) } } pub fn send_msg<T: IRCSender>( &self, sender: T, dest: &[u8], msg: &[u8] ) -> Result<(), ()>{ Err( () ) } pub fn kill_user( &self, user: User ) { user.send_msg( ConnClose ); } } impl Clone for IRCConfig { fn clone(&self) -> IRCConfig { IRCConfig { channels: self.channels.clone(), users: self.users.clone() } } fn clone_from( &mut self, source: &IRCConfig ) { self.channels = source.channels.clone(); self.users = source.users.clone(); } } trait IRCSender { fn get_sender( &self ) -> &str; } pub struct Channel { name: String, users: Vec<String> } impl IRCSender for Channel { fn get_sender<'a>( &self ) -> &'a str { "hi" } } pub struct User { a: int, nick: String, sender: Sender<TcpEvent> } impl User { pub fn send_msg( &self, msg: TcpEvent ) { self.sender.send( msg ); } } impl IRCSender for User { fn get_sender<'a>( &self ) -> &'a str { "hi" } } fn start_irc() { let irc_conf = IRCConfig::new(); let (worker_send, worker_recv) = sync_channel(0); let irc_conf_c = irc_conf.clone(); spawn( proc() irc_worker_gen( irc_conf_c, worker_send ) ); start_tcp_handler( "127.0.0.1", 8787, worker_recv ); } fn irc_worker_gen( irc_struct: IRCConfig, worker_send: WorkerProcSender ) { loop { let irc_struct_c = irc_struct.clone(); worker_send.send( proc(a, b) {proc() irc_worker_pipe(irc_struct_c, a, b) } ); } } fn irc_worker_pipe( irc_struct: IRCConfig, write_send: Sender<TcpEvent>, read_recv: Receiver<TcpEvent> ) { match read_recv.recv() { ConnCreat => { write_send.send( ConnCreat ) }, _ => { drop_conn( &write_send ); return; } } // Authenticate user -> add writer to writer_map in irc_struct let user = match accept_conn( &read_recv, &write_send ) { Some(a) => a, None => { drop_conn( &write_send ); return; } }; write_send.send( Write( user ) ); // loop over lines from line_reader // Find sense from the mess // If channel, get channel writer, and send message. // If a pm, get user writer directly, and send message. // If something else, either deal with directly, or make a master thread? // Sine all events should be fast or async, we can deal with timing here (though maybe move up). // If we are flooding, or something unsavoury happens, we should end this thread. // So we send a special message, and either end here, or have just have it after the loop. // End a connection here. We need to remove the writer from irc_struct, and such. Then // send ConnClose. The write task (hopefully) closes the reader for us. Need to do some testing. loop { let line = match read_recv.recv() { ConnCreat => { write_send.send( ConnCreat ); continue; }, Read(m) => format!("{}", m), ConnClose => { write_send.send( ConnClose ); return; }, Write(_) => { // We should never get write, close connection now. write_send.send( ConnClose ); return; } }; print!("{}", line); } } fn accept_conn( read_recv: &Receiver<TcpEvent>, write_send: &Sender<TcpEvent> ) -> Option<String> { let line = match (*read_recv).recv() { Read(a) => a, _ => return None }; if line.as_slice().starts_with( "nick" ) { let offset = match line.as_slice().find(':') { Some(a) => a, None => return None }; let nick = line.as_slice().slice_from( offset+1 ); Some( nick.to_string() ) } else { None } } fn drop_conn( write_send: &Sender<TcpEvent> ) { (*write_send).send( Write( "Dropping Connection".to_string() ) ); (*write_send).send( ConnClose ); println!("A-OK"); }
extern crate bindgen; use std::env; use std::path::{Path, PathBuf}; use std::process::Command; fn main() { let target = env::var("TARGET").unwrap(); let worker_package_dir = PathBuf::from(env::var("OUT_DIR").unwrap()).join("worker_sdk"); let package_name = if target.contains("windows") { "c-static-x86_64-msvc_md-win32" } else if target.contains("apple") { "c-static-x86_64-clang_libcpp-macos" } else if target.contains("linux") { "c-static-x86_64-gcc_libstdcpp-linux" } else { panic!("Unknown platform {}", target); }; unpack_worker_package("worker_sdk", package_name, worker_package_dir.clone()); println!( "cargo:rustc-link-search={}", worker_package_dir.join("lib").display() ); let libs = if target.contains("windows") { vec![ "worker", "grpc++", "grpc", "gpr", "libprotobuf", "RakNetLibStatic", "ssl", "zlibstatic", ] } else { vec![ "worker", "grpc++", "grpc", "gpr", "protobuf", "RakNetLibStatic", "ssl", "z", ] }; for lib in libs { println!("cargo:rustc-link-lib=static={}", lib); } if target.contains("apple") { println!("cargo:rustc-link-lib=dylib=c++"); } else if target.contains("linux") { println!("cargo:rustc-link-lib=dylib=stdc++"); } let bindings = bindgen::Builder::default() .constified_enum_module(".*") .disable_untagged_union() .header( worker_package_dir .join("include/improbable/c_worker.h") .to_str() .unwrap(), ) .header( worker_package_dir .join("include/improbable/c_schema.h") .to_str() .unwrap(), ) .generate() .expect("Unable to generate bindings"); let out_path = PathBuf::from(env::var("OUT_DIR").unwrap()); bindings .write_to_file(out_path.join("bindings.rs")) .expect("Couldn't write bindings!"); } fn unpack_worker_package<P: AsRef<Path>>(package_type: &str, package_name: &str, directory: P) { let output = Command::new("spatial") .current_dir(env::var("OUT_DIR").unwrap()) .args(&[ "worker_package", "unpack-to", package_type, package_name, directory.as_ref().to_str().unwrap(), ]) .output() .expect("failed to execute process"); let stdout = String::from_utf8_lossy(&output.stdout); let stderr = String::from_utf8_lossy(&output.stderr); println!("{}", stdout); println!("{}", stderr); if !output.status.success() { panic!( "spatial worker_package unpack-to failed: {} {}", stdout, stderr ); } }
//! # Store //! //! A storage trait for Tendermock `Node`s. //! //! For now the only available storage is the `InMemoryStore`. //! As its name implies, this resides in volatile memory. However, implementations of //! persistent storage are possible without impacting the rest of the code base as it only relies //! on the `Storage` trait, which may be implemented for new kinds of storage in the future. //! //! A storage has two jobs: //! - persist the state of committed blocks, via the `grow` API. //! - update the state of the pending block and access the state for any block, //! via a `get` and `set` API. //! A storage has two kinds of `Location`s: //! 1. a pending location, which represents the current block being processed, but not yet //! committed; //! 2. a stable location, which is versioned by height. pub use memory::Memory; mod memory; /// Defines a location in a `Storage`. #[derive(Clone, Copy, Debug)] #[allow(dead_code)] pub enum Location { /// Represents the pending location. /// This is the location being manipulated by the `set` method. Pending, /// Represents the location in the stable storage, for the last block. LatestStable, /// Represents the location in the stable storage, for an arbitrary block. Stable(u64), } pub struct PathValue { pub path: Vec<u8>, pub value: Vec<u8>, } /// A concurrent storage for on-chain data, using interior mutability. pub trait Storage: std::fmt::Debug { /// Set a value in the store at the `Pending` location. /// The storage starts up by having height 0 committed (or `Stable`); consequently the mutable /// `Pending` height in the beginning is 1. fn set(&self, path: Vec<u8>, value: Vec<u8>); /// Access the value at a given path and location. /// Returns `None` if nothing found. fn get(&self, loc: Location, path: &[u8]) -> Option<Vec<u8>>; /// Access the value(s) at a given path prefix and location. /// Returns `None` if nothing found. fn get_by_prefix(&self, loc: Location, prefix: &[u8]) -> Vec<PathValue>; /// Freeze the pending store by adding it to the committed chain, and create a new pending. fn grow(&self); }
// ==================================================== // Netlyser Copyright(C) 2019 Furkan Türkal // This program comes with ABSOLUTELY NO WARRANTY; This is free software, // and you are welcome to redistribute it under certain conditions; See // file LICENSE, which is part of this source code package, for details. // ==================================================== use std::{ collections::HashMap, net::{Ipv4Addr}, }; use pnet::util::{MacAddr}; use crate::net; use crate::config; use rusqlite::types::ToSql; use rusqlite::{Connection, Result, NO_PARAMS}; use notify_rust::Notification; use chrono::prelude::*; #[derive(Debug, Clone)] pub struct NotifyInfo { pub host: Host, pub connected: bool, pub disconnected: bool, } #[derive(PartialEq, Debug, Clone)] pub struct Host { pub ip: Ipv4Addr, pub mac: MacAddr, pub name: String, pub device_name: String, } impl Host { pub fn new() -> Host { Host { ip: Ipv4Addr::UNSPECIFIED, mac: MacAddr::zero(), name: String::new(), device_name: String::new(), } } pub fn set_ip(&mut self, ip: Ipv4Addr) { self.ip = ip } pub fn set_mac(&mut self, mac: MacAddr) { self.mac = mac; } pub fn set_info(&mut self, info: &config::HostInfo) { if info.name.to_string().trim().is_empty() { self.name = "Unknown".to_string(); } else { self.name = info.name.to_string(); } if info.device_name.to_string().trim().is_empty() { self.device_name = "Unknown".to_string(); } else { self.device_name = info.device_name.to_string(); } } pub fn set_name(&mut self, name: String) { self.name = name; } pub fn set_device_name(&mut self, name: String) { self.device_name = name; } } pub fn get_notifies(olds: &Vec<Host>, news: &Vec<Host>, conf: &config::Config, db: &String) { let mut rmvs: Vec<Host> = vec![]; let mut adds: Vec<Host> = vec![]; let mut change: bool = false; for old in olds.clone() { if !news.contains(&old) { rmvs.push(old); change = true; } } for nev in news.clone() { if !olds.contains(&nev) { adds.push(nev); change = true; } } if change && (olds.len() != news.len()) { if rmvs.len() > 0 { on_hosts_disconnected(&db, rmvs, &conf); } if adds.len() > 0 { on_hosts_connected(&db, adds, &conf); } } } pub fn on_hosts_connected(db: &String, hosts: Vec<Host>, conf: &config::Config) { for h in hosts { if !conf.is_root && conf.general.notify_on_connect { notify(&h, true); } match add_to_db(&db, &h, true) { Ok(v) => { info!("[db::on_hosts_connected()]: 'add_to_db()' success: {:?}", v); } Err(e) => { warn!("[db::on_hosts_connected()]: error throwed when running 'add_to_db()' function. Err: {}, ", e); } } } } pub fn on_hosts_disconnected(db: &String, hosts: Vec<Host>, conf: &config::Config) { for h in hosts { if !conf.is_root && conf.general.notify_on_disconnect { notify(&h, false); } match add_to_db(&db, &h, false) { Ok(v) => { info!("[db::on_hosts_disconnected()]: 'add_to_db()' success: {:?}", v); } Err(e) => { warn!("[db::on_hosts_disconnected()]: error throwed when running 'add_to_db()' function. Err: {}, ", e); } } } } pub fn notify(host: &Host, con_or_dis: bool){ let not: String = format!("Name: {}\nDevice: {}", host.name, host.device_name); if con_or_dis { Notification::new() .appname("Netlyser") .summary("CONNECT!") .body(&not) .timeout(5000) .show().unwrap(); } else { Notification::new() .appname("Netlyser") .summary("DISCONNECT!") .body(&not) .timeout(5000) .show().unwrap(); } } pub fn add_to_db(db: &String, host: &Host, con_or_dis: bool) -> Result<()>{ let conn = Connection::open(db.to_string())?; let log_type = if con_or_dis { "connect".to_string() } else { "disconnect".to_string() }; let res = conn.execute( "CREATE TABLE IF NOT EXISTS logs ( log_id INTEGER PRIMARY KEY AUTOINCREMENT ,log_name TEXT NOT NULL ,log_device TEXT NOT NULL ,log_ip TEXT NOT NULL ,log_mac TEXT NOT NULL ,log_type TEXT NOT NULL ,log_time INTEGER NOT NULL )", NO_PARAMS, )?; if res != 0 { warn!("[db::add_to_db()]: Unable to add to db. Code: {}, ", res); } info!("[db::add_to_db()]: create function exited with: {:?}", res); let exec = conn.execute( "INSERT INTO logs (log_name, log_device, log_ip, log_mac, log_type, log_time) VALUES (?1, ?2, ?3, ?4, ?5, ?6)", &[&host.name as &ToSql, &host.device_name as &ToSql, &host.ip.to_string() as &ToSql, &host.mac.to_string() as &ToSql, &log_type, &Local::now().timestamp() as &ToSql], )?; if exec != 1 { warn!("[db::add_to_db()]: Unable to execute command. Code: {}, ", exec); } info!("[db::add_to_db()]: execute function exited with: {:?}", exec); Ok(()) } pub fn migrate_to_host_list(macmap: &HashMap<MacAddr, config::HostInfo>, gw: &net::Gateway, result: Vec<net::Host>, arps: HashMap<Ipv4Addr, MacAddr>, pc: Ipv4Addr) -> Vec<Host> { let mut hosts: Vec<Host> = vec![]; info!("[db::migrate_to_host_list()]: migrate len: {:?}", result.len()); for host in result { let mut current: Option<Host> = Some(Host::new()); let h = match &mut current { Some(x) => x, None => continue, }; h.set_ip(host.address[0].addr.parse().unwrap()); if h.ip.eq(&pc) { continue; } if h.ip.eq(&gw.ip){ h.set_name("GATEWAY".to_string()); h.set_device_name("GATEWAY".to_string()); hosts.push(h.clone()); continue; } match arps.get(&h.ip) { Some(&mac) => { h.set_mac(mac); match macmap.get(&mac) { Some(n) => h.set_info(n), None => { h.set_name("Unknown".to_string()); h.set_device_name("Unknown".to_string()) } } } _ => println!("Can't find MAC for IP: {}", h.ip), } hosts.push(h.clone()); } hosts }
use std::env; use std::fs; use regex::{Regex}; fn main(){ let args : Vec<String> = env::args().collect(); if args.len() < 2 { return; } let lines = fs::read_to_string(&args[1]).unwrap(); const H : usize = 6; const W : usize = 50; let mut state = [[false; W]; H]; let re_rect = Regex::new(r"^rect (\d+)x(\d+)").unwrap(); let re_rrow = Regex::new(r"^rotate row y=(\d+) by (\d+)").unwrap(); let re_rcol = Regex::new(r"^rotate column x=(\d+) by (\d+)").unwrap(); for line in lines.split('\n') { if line == "" { continue; } match re_rect.captures(line) { None => {}, Some(i) => { let x : i32 = i.get(1).unwrap().as_str().parse().unwrap(); let y : i32 = i.get(2).unwrap().as_str().parse().unwrap(); for py in 0..y as usize { for px in 0..x as usize { state[py][px] = true; } } } } match re_rrow.captures(line) { None => {}, Some(i) => { let y : usize = i.get(1).unwrap().as_str().parse().unwrap(); let r : usize = i.get(2).unwrap().as_str().parse().unwrap(); for _ in 0..r { let tmp = state[y][W-1]; for x in (1..W as usize).rev() { state[y][x] = state[y][x-1]; } state[y][0] = tmp; } } } match re_rcol.captures(line) { None => {}, Some(i) => { let x : usize = i.get(1).unwrap().as_str().parse().unwrap(); let r : usize = i.get(2).unwrap().as_str().parse().unwrap(); for _ in 0..r { let tmp = state[H-1][x]; for y in (1..H as usize).rev() { state[y][x] = state[y-1][x]; } state[0][x] = tmp; } } } } let mut lit_count = 0; for y in 0..H as usize { for x in 0..W as usize { if state[y][x] { lit_count += 1; } } } println!("{}", lit_count); for y in 0..H as usize { for x in 0..W as usize { match state[y][x] { true => { print!("#") }, false => { print!(" ") } } } println!(); } }
use ipasir_sys::*; use std::ffi::{CStr, c_void}; mod ipasir_signature { use super::*; #[test] fn it_returns_the_name_and_version_of_the_sat_solver() { let c_buffer = unsafe { ipasir_signature() }; let c_string = unsafe { CStr::from_ptr(c_buffer) }; let signature = c_string.to_str().unwrap(); assert_eq!(signature, "cadical-1.3.1"); } } mod ipasir_init { use super::*; #[test] fn it_constructs_a_new_solver_and_returns_a_pointer_to_it() { let _solver: *mut c_void = unsafe { ipasir_init() }; } } mod ipasir_release { use super::*; #[test] fn it_releases_the_solver_and_all_its_resources() { let solver = unsafe { ipasir_init() }; unsafe { ipasir_release(solver) }; } } mod ipasir_add { use super::*; #[test] fn it_adds_a_literal_to_the_current_clause_and_finalises_with_zero() { let solver = unsafe { ipasir_init() }; unsafe { ipasir_add(solver, 1) }; unsafe { ipasir_add(solver, -2) }; unsafe { ipasir_add(solver, 0) }; } } mod ipasir_assume { use super::*; #[test] fn it_adds_an_assumption_for_the_next_sat_search() { let solver = unsafe { ipasir_init() }; unsafe { ipasir_assume(solver, 1) }; } } mod ipasir_solve { use super::*; #[test] fn it_solves_the_formula_and_returns_10_for_sat_and_20_for_unsat() { let solver = unsafe { ipasir_init() }; unsafe { ipasir_add(solver, 1) }; unsafe { ipasir_add(solver, 0) }; let sat_status = unsafe { ipasir_solve(solver) }; assert_eq!(sat_status, 10); unsafe { ipasir_add(solver, -1) }; unsafe { ipasir_add(solver, 0) }; let unsat_status = unsafe { ipasir_solve(solver) }; assert_eq!(unsat_status, 20); } // TODO interrupted } mod ipasir_val { use super::*; #[test] fn it_gets_the_truth_value_of_a_literal_in_the_satisfying_assigment() { let solver = unsafe { ipasir_init() }; unsafe { ipasir_add(solver, 1) }; unsafe { ipasir_add(solver, 0) }; unsafe { ipasir_add(solver, -2) }; unsafe { ipasir_add(solver, 0) }; unsafe { ipasir_solve(solver) }; let true_literal = unsafe { ipasir_val(solver, 1) }; let false_literal = unsafe { ipasir_val(solver, 2) }; assert_eq!(true_literal, 1); assert_eq!(false_literal, -2); } } mod ipasir_failed { use super::*; #[test] fn it_returns_1_if_the_assumption_caused_the_formula_to_be_unsat() { let solver = unsafe { ipasir_init() }; unsafe { ipasir_add(solver, 1) }; unsafe { ipasir_add(solver, 0) }; unsafe { ipasir_assume(solver, -1) }; let unsat_status = unsafe { ipasir_solve(solver) }; assert_eq!(unsat_status, 20); let caused_unsat = unsafe { ipasir_failed(solver, -1) }; assert_eq!(caused_unsat, 1); } #[test] fn it_returns_0_if_the_assumption_did_not_cause_the_formula_to_be_unsat() { let solver = unsafe { ipasir_init() }; unsafe { ipasir_add(solver, 1) }; unsafe { ipasir_add(solver, 0) }; unsafe { ipasir_add(solver, -1) }; unsafe { ipasir_add(solver, 0) }; unsafe { ipasir_assume(solver, 2) }; let unsat_status = unsafe { ipasir_solve(solver) }; assert_eq!(unsat_status, 20); let caused_unsat = unsafe { ipasir_failed(solver, 2) }; assert_eq!(caused_unsat, 0); } #[test] fn it_returns_0_if_the_polarity_of_the_literal_does_not_match_the_assumption() { // This case is slightly ambigious // See: https://github.com/biotomas/ipasir/issues/9 let solver = unsafe { ipasir_init() }; unsafe { ipasir_add(solver, 1) }; unsafe { ipasir_add(solver, 0) }; unsafe { ipasir_assume(solver, -1) }; let unsat_status = unsafe { ipasir_solve(solver) }; assert_eq!(unsat_status, 20); let caused_unsat = unsafe { ipasir_failed(solver, 1) }; // not -1 assert_eq!(caused_unsat, 0); } } mod ipasir_set_terminate { use super::*; #[test] fn it_sets_a_callback_that_determines_whether_the_solver_should_terminate() { let solver = unsafe { ipasir_init() }; // This can be anything and is passed through to the callback. // It is nothing to do with the state machine in the IPASIR spec. let state = 123 as *mut c_void; unsafe { ipasir_set_terminate(solver, state, Some(callback)) }; } extern "C" fn callback(_state: *mut c_void) -> i32 { 0 } } // Cadical doesn't support this IPASIR function so disable the test. #[cfg(feature = "ipasir_set_learn")] mod ipasir_set_learn { use super::*; #[test] fn it_sets_a_callback_that_receives_learned_clauses_up_to_a_given_length() { let solver = unsafe { ipasir_init() }; let state = 123 as *mut c_void; let max_length = 3; unsafe { ipasir_set_learn(solver, state, max_length, Some(callback)) }; } extern "C" fn callback(_state: *mut c_void, _clause: *mut i32) { } }
#[doc = "Register `GICD_ITARGETSR7` reader"] pub type R = crate::R<GICD_ITARGETSR7_SPEC>; #[doc = "Field `CPU_TARGETS0` reader - CPU_TARGETS0"] pub type CPU_TARGETS0_R = crate::FieldReader; #[doc = "Field `CPU_TARGETS1` reader - CPU_TARGETS1"] pub type CPU_TARGETS1_R = crate::FieldReader; #[doc = "Field `CPU_TARGETS2` reader - CPU_TARGETS2"] pub type CPU_TARGETS2_R = crate::FieldReader; #[doc = "Field `CPU_TARGETS3` reader - CPU_TARGETS3"] pub type CPU_TARGETS3_R = crate::FieldReader; impl R { #[doc = "Bits 0:1 - CPU_TARGETS0"] #[inline(always)] pub fn cpu_targets0(&self) -> CPU_TARGETS0_R { CPU_TARGETS0_R::new((self.bits & 3) as u8) } #[doc = "Bits 8:9 - CPU_TARGETS1"] #[inline(always)] pub fn cpu_targets1(&self) -> CPU_TARGETS1_R { CPU_TARGETS1_R::new(((self.bits >> 8) & 3) as u8) } #[doc = "Bits 16:17 - CPU_TARGETS2"] #[inline(always)] pub fn cpu_targets2(&self) -> CPU_TARGETS2_R { CPU_TARGETS2_R::new(((self.bits >> 16) & 3) as u8) } #[doc = "Bits 24:25 - CPU_TARGETS3"] #[inline(always)] pub fn cpu_targets3(&self) -> CPU_TARGETS3_R { CPU_TARGETS3_R::new(((self.bits >> 24) & 3) as u8) } } #[doc = "For existing SGIs and PPIs, read of CPU targets field returns the number of the processor performing the read.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`gicd_itargetsr7::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct GICD_ITARGETSR7_SPEC; impl crate::RegisterSpec for GICD_ITARGETSR7_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`gicd_itargetsr7::R`](R) reader structure"] impl crate::Readable for GICD_ITARGETSR7_SPEC {} #[doc = "`reset()` method sets GICD_ITARGETSR7 to value 0"] impl crate::Resettable for GICD_ITARGETSR7_SPEC { const RESET_VALUE: Self::Ux = 0; }
use std::sync::mpsc; use std::sync::{Arc, Barrier}; use std::thread; use crossbeam_channel; fn main() { let barrier = Arc::new(Barrier::new(3)); let (snd, rcv) = mpsc::channel::<i32>(); { let b = barrier.clone(); thread::spawn(move || { b.wait(); snd.send(4); println!("{}", "a"); }); } { let b = barrier.clone(); thread::spawn(move || { b.wait(); rcv.recv().unwrap(); println!("{}", "b"); }); } barrier.wait(); println!("{}", "c"); }
extern crate hello; use hello::ThreadPool; use std::fs; use std::io::prelude::*; use std::net::TcpStream; use std::net::TcpListener; use std::thread; use std::time::Duration; /* Cargo Process- cargo new --bin hello cd hello/ -> both html files in root -> src/lib.rs -> src/bin/main.rs cargo run -> open window go to http://127.0.0.1:7878/ */ fn main() { //listen for TCP coneections let listener = TcpListener::bind("127.0.0.1:7878").unwrap(); let pool = ThreadPool::new(4); for stream in listener.incoming().take(2) { //terminate if stream has any errors let stream = stream.unwrap(); //println!("Connection established!"); pool.execute(|| { handle_connection(stream); }); } println!("Shutting down."); } //read data from the TCP stream and print it so we can see the data being sent from the browser fn handle_connection(mut stream: TcpStream) { let mut buffer = [0; 512]; //read bytes from the TCPStream and put them in the buffer stream.read(&mut buffer).unwrap(); let get = b"GET / HTTP/1.1\r\n"; let sleep = b"GET /sleep HTTP/1.1\r\n"; let (status_line, filename) = if buffer.starts_with(get) { ("HTTP/1.1 200 OK\r\n\r\n", "hello.html") } else if buffer.starts_with(sleep) { thread::sleep(Duration::from_secs(5)); ("HTTP/1.1 200 OK\r\n\r\n", "hello.html") } else { ("HTTP/1.1 404 NOT FOUND\r\n\r\n", "404.html") }; let contents = fs::read_to_string(filename).unwrap(); //holds the success message's data let response = format!("{}{}", status_line, contents); //as_bytes() - convert the string data to bytes //write - takes a &[u8] and sends those bytes directly down the connection. stream.write(response.as_bytes()).unwrap(); stream.flush().unwrap(); }
//! Helper methods for composing widget layouts. use crate::geometry::{Bounds, BoundsMut, VAlign}; /// Bounds extension for placing widgets relative to others. pub trait Layout: BoundsMut { fn left_of<B: Bounds>(&mut self, other: &B, spacing: u32) -> &mut Self { let pos = self .get_position() .with_x(other.get_position().x - (self.get_size().w + spacing) as i32); self.set_position(pos); self } fn right_of<B: Bounds>(&mut self, other: &B, spacing: u32) -> &mut Self { let pos = self .get_position() .with_x(other.get_position().x + (other.get_size().w + spacing) as i32); self.set_position(pos); self } fn above<B: Bounds>(&mut self, other: &B, spacing: u32) -> &mut Self { let pos = self .get_position() .with_y(other.get_position().y - (self.get_size().h + spacing) as i32); self.set_position(pos); self } fn below<B: Bounds>(&mut self, other: &B, spacing: u32) -> &mut Self { let pos = self .get_position() .with_y(other.get_position().y + (other.get_size().h + spacing) as i32); self.set_position(pos); self } fn align_left<B: Bounds>(&mut self, other: &B, offset: i32) -> &mut Self { let pos = self.get_position().with_x(other.get_position().x + offset); self.set_position(pos); self } fn align_right<B: Bounds>(&mut self, other: &B, offset: i32) -> &mut Self { let pos = self .get_position() .with_x(other.get_position().x + other.get_size().w as i32 - self.get_size().w as i32 + offset); self.set_position(pos); self } fn align_top<B: Bounds>(&mut self, other: &B, offset: i32) -> &mut Self { let pos = self.get_position().with_y(other.get_position().y + offset); self.set_position(pos); self } fn align_bottom<B: Bounds>(&mut self, other: &B, offset: i32) -> &mut Self { let pos = self .get_position() .with_y(other.get_position().y + other.get_size().h as i32 - self.get_size().h as i32 + offset); self.set_position(pos); self } fn align_hcenter<B: Bounds>(&mut self, other: &B, offset: i32) -> &mut Self { let pos = self .get_position() .with_x(other.get_position().x + (other.get_size().w as i32 - self.get_size().w as i32) / 2 + offset); self.set_position(pos); self } fn align_vcenter<B: Bounds>(&mut self, other: &B, offset: i32) -> &mut Self { let pos = self .get_position() .with_y(other.get_position().y + (other.get_size().h as i32 - self.get_size().h as i32) / 2 + offset); self.set_position(pos); self } fn align_hf<B: Bounds>(&mut self, other: &B, val: f32, offset: i32) -> &mut Self { let dx = (other.get_size().w as i32 - self.get_size().w as i32) as f32 * val; let pos = self.get_position().with_x(other.get_position().x + dx as i32 + offset); self.set_position(pos); self } fn align_vf<B: Bounds>(&mut self, other: &B, val: f32, offset: i32) -> &mut Self { let dy = (other.get_size().h as i32 - self.get_size().h as i32) as f32 * val; let pos = self.get_position().with_y(other.get_position().y + dy as i32 + offset); self.set_position(pos); self } fn center_inside<B: Bounds>(&mut self, other: &B) -> &mut Self { let b = other.get_bounds(); self.set_position(b.pos + (b.size.as_point() - self.get_size().as_point()) / 2); self } fn offset(&mut self, dx: i32, dy: i32) -> &mut Self { self.set_position(self.get_position().offset(dx, dy)); self } } impl<T: BoundsMut> Layout for T {} /// Runs a layout expression for a collection of widgets. pub fn foreach<'a, T: Bounds + 'a, F>(items: impl IntoIterator<Item = &'a mut T>, mut f: F) where F: FnMut(&'a mut T, &T, &T) -> &'a T, { let mut iter = items.into_iter(); if let Some(first) = iter.next() { let first = &*first; iter.fold(first, |prev, item| f(item, &prev, first)); } } /// Places a collection of widgets horizontally in sequence, starting a new row if necessary. pub fn flow_horiz<'a, T, I>(items: I, valign: VAlign, max_width: u32, hspacing: u32, vspacing: u32) where T: BoundsMut + 'a, I: IntoIterator<Item = &'a mut T>, { let align_val = match valign { VAlign::Top => 0.0, VAlign::Center => 0.5, VAlign::Bottom => 1.0, }; let mut row_items = vec![]; let mut iter = items.into_iter(); if let Some(first_) = iter.next() { let mut first = first_.get_bounds(); row_items.push(first_); let mut prev = first; let mut row = first; for item in iter { // if we exceeded the max_width, then place this widget on a new row if row.size.w + item.get_size().w + hspacing > max_width { // check if we're overlapping the previous row let offset = first.pos.y - row.pos.y; if offset > 0 { row.pos.y += offset; // displace the previous widgets to the fixed row position for w in &mut row_items { w.offset(0, offset); } } // place this widget below the current row item.below(&row, vspacing).align_left(&first, 0); // start the next row first = item.get_bounds(); row_items.clear(); row_items.push(item); prev = first; row = first; } else { // place this widget next to the previous one item.right_of(&prev, hspacing).align_vf(&prev, align_val, 0); // expand the current row with this widget's bounds let item_bounds = item.get_bounds(); row_items.push(item); prev = item_bounds; row = row.merge(item_bounds); } } // fix the last row's position if needed let offset = first.pos.y - row.pos.y; if offset > 0 { for w in &mut row_items { w.offset(0, offset); } } } }
//给定 n 个非负整数 a1,a2,...,an,每个数代表坐标中的一个点 (i, ai) 。在坐标内画 n 条垂直线,垂直线 i 的两个端点分别为 (i, ai) 和 (i, 0)。找出其中的两条线,使得它们与 x 轴共同构成的容器可以容纳最多的水。 // //说明:你不能倾斜容器,且 n 的值至少为 2。 // // // //图中垂直线代表输入数组 [1,8,6,2,5,4,8,3,7]。在此情况下,容器能够容纳水(表示为蓝色部分)的最大值为 49。 // //  // //示例: // //输入: [1,8,6,2,5,4,8,3,7] //输出: 49 use std::cmp::min; fn abs_i32(mut num: i32) -> i32 { if num < 0 { num = -num; } num } fn max_area(num_vec: Vec<i32>) -> i32 { let mut max_ar = 0; let mut mid_num: Vec<(i32, i32)> = vec![]; for i in 0..num_vec.len() { mid_num.push((num_vec[i], i as i32)); } mid_num.sort_by(|(v1, _r1), (v2, _r2)| v2.cmp(v1)); let mut idx = 1; for (v, r) in mid_num.iter() { for i in idx..mid_num.len() { let cur_area = min(*v, mid_num[i].0) * abs_i32(r - mid_num[i].1); if cur_area > max_ar { max_ar = cur_area; } } idx += 1; } max_ar } fn main() { let test_array = vec![1,8,6,2,5,4,8,3,7]; println!("{}", max_area(test_array)); }
use crate::contract::Contract; use crate::deck::Deck; use crate::errors::TarotErrorKind; use crate::game::Game; use crate::game_started::GameStarted; use crate::options::Options; use crate::player::Player; use crate::player_in_game::PlayerInGame; use crate::role::Role; use crate::team::Team; use itertools::{Either, Itertools}; use std::fmt; use strum::IntoEnumIterator; pub struct GameDistributed<'a, const MODE: usize> { game: &'a mut Game<MODE>, options: Options, dog: Deck, players_in_game: [PlayerInGame; MODE], } impl<const MODE: usize> fmt::Display for GameDistributed<'_, MODE> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { writeln!(f, "Game {} with dog {}", self.game.mode(), self.dog) } } impl<'a, const MODE: usize> GameDistributed<'a, MODE> { pub fn new( game: &'a mut Game<MODE>, dog: Deck, players_in_game: [PlayerInGame; MODE], options: Options, ) -> Self { Self { game, options, dog, players_in_game } } pub fn game(&mut self) -> &mut Game<MODE> { self.game } pub fn players_and_their_game_mut(&mut self) -> (&[Player; MODE], &mut [PlayerInGame; MODE]) { (self.game.players(), &mut self.players_in_game) } #[must_use] pub fn player(&self, index: usize) -> &Player { self.game.player(index) } #[must_use] pub fn player_and_his_game(&self, index: usize) -> (&Player, &PlayerInGame) { (self.game.player(index), &self.players_in_game[index]) } pub fn player_and_his_game_mut(&mut self, index: usize) -> (&Player, &mut PlayerInGame) { (self.game.player(index), &mut self.players_in_game[index]) } pub fn finished(&self) -> bool { self.players_in_game.iter().all(PlayerInGame::last_turn) } pub fn rotate_at(&mut self, index: usize) { self.players_in_game.rotate_left(index); self.game.rotate_at(index); } pub fn bidding_and_discard( &'a mut self, ) -> Result<Option<GameStarted<'a, MODE>>, TarotErrorKind> { let mut contracts: Vec<Contract> = Contract::iter().collect(); let mut slam_index: Option<usize> = None; let mut taker_index: Option<usize> = None; let mut contract: Option<Contract> = None; for (current_player_index, current_player_in_game) in self.players_in_game.iter_mut().enumerate() { let current_player = &self.game.player(current_player_index); let player_contract = current_player_in_game.choose_contract_among(current_player, &contracts); match (contract, player_contract) { (None | Some(_), None) => {} (None | Some(_), Some(player_contract)) => { taker_index = Some(current_player_index); if !self.options.quiet { println!( "Player {} has chosen contract {player_contract}", current_player.name() ); } contracts.retain(|other_contract| { other_contract.multiplier() > player_contract.multiplier() }); if current_player_in_game.announce_slam()? { if !self.options.quiet { println!("Player {current_player} announced a slam"); } slam_index = Some(current_player_index); } contract = Some(player_contract); } }; } let Some(contract) = contract else { return Ok(None); }; let Some(taker_index) = taker_index else { return Ok(None); }; // RULE: player who slammed must start if let Some(slammer) = slam_index { if !self.options.quiet { println!("Chelem announced so {slammer} must start."); } self.rotate_at(slammer); } let callee = self.players_in_game[taker_index].call(); for (current_player_index, current_player) in self.players_in_game.iter_mut().enumerate() { current_player.set_callee(callee); current_player.set_team(Team::Defense); current_player.set_role(Role::Defenser); if current_player_index == taker_index { current_player.set_team(Team::Attack); current_player.set_role(Role::Taker); } else if let Some(ref card) = callee { if current_player_index != taker_index && current_player.has(card) { current_player.set_team(Team::Attack); current_player.set_role(Role::Ally); } } } let (attackers, defensers): (Vec<_>, Vec<_>) = self .players_in_game .iter() .enumerate() .partition_map(|(i, player)| { if player.is_attack() { Either::Left(i) } else { Either::Right(i) } }); for attacker_index in attackers { if !self.players_in_game[attacker_index].is_taker() { continue; } match contract { Contract::GardeSans => { if !self.options.quiet { println!("Attacker keeps dog because GardeSans"); } self.players_in_game[attacker_index].set_discard(&self.dog); } Contract::GardeContre => { if let Some(first_defenser_index) = defensers.first() { if !self.options.quiet { println!("Attacker gives dog to first defenser because GardeContre"); } self.players_in_game[*first_defenser_index].set_discard(&self.dog); } } _ => { if !self.options.quiet { let taker_name = self.player(attacker_index).name(); println!("In the dog, there was : {}", self.dog); println!("Taker {taker_name} received the dog"); } self.players_in_game[attacker_index].extend_hand(&self.dog); self.players_in_game[attacker_index].discard(); } } } let game_started = GameStarted::new(self, taker_index, contract, self.options); Ok(Some(game_started)) } }
mod avx; mod camera; mod fallback; mod hit_record; mod hittable; mod material; mod object; mod object_list; mod ray; mod sphere; mod vec3; use camera::Camera; use material::{Dielectric, Lambertian, Metal}; use object_list::ObjectList; use rand::Rng; use ray::Ray; use sphere::Sphere; use vec3::{Color, Vec3}; fn ray_color(ray: &Ray, world: &ObjectList, depth: i32) -> Color { if depth <= 0 { return Vec3::new(0.0, 0.0, 0.0); } if let Some((obj, hr)) = world.hit(ray, 0.001, 1.0 / 0.0) { if let Some(sr) = obj.scatter(ray, &hr) { return sr.attenuation * ray_color(&sr.scattered_ray, world, depth - 1); } } let unit_direction = ray.direction.unit_vector(); let t = 0.5 * (unit_direction.y() + 1.0); // Normalize to [0.0, 1.0] let white = Vec3::new(1.0, 1.0, 1.0); let sky_blue = Vec3::new(0.5, 0.7, 1.0); return (1.0 - t) * white + t * sky_blue; } fn print_ppm_header(w: i32, h: i32) { println!("P3"); println!("{} {}", w, h); println!("{}", 255); } fn clamp(v: f64, min: f64, max: f64) -> f64 { if v < min { min } else if v > max { max } else { v } } fn print_ppm_pixel(color: &Color) { let (r, g, b) = avx::color::to_ppm_color(color); println!("{} {} {}", r, g, b) } fn main() { let aspect_ratio = 16.0 / 9.0; let w = 400; let h = ((w as f64) / aspect_ratio) as i32; let sphere1 = Sphere { center: Vec3::new(0.0, 0.0, -1.0), radius: 0.5, }; let sphere2 = Sphere { center: Vec3::new(0.0, -100.5, -1.0), radius: 100.0, }; let sphere_left = Sphere { center: Vec3::new(-1.0, 0.0, -1.0), radius: 0.5, }; let sphere_right = Sphere { center: Vec3::new(1.0, 0.0, -1.0), radius: 0.5, }; let material_ground = Lambertian { albedo: Vec3::new(0.8, 0.8, 0.0), }; let material = Lambertian { albedo: Vec3::new(0.7, 0.3, 0.3), }; let material_left = Dielectric { refraction_index: 1.5, }; let material_right = Metal { albedo: Vec3::new(0.8, 0.6, 0.2), fuzz: 0.0, }; let world = ObjectList { objects: vec![ Box::new(object::new(&sphere1, &material)), Box::new(object::new(&sphere2, &material_ground)), Box::new(object::new(&sphere_left, &material_left)), Box::new(object::new(&sphere_right, &material_right)), ], }; let look_from = Vec3::new(-2.0, 2.0, 1.0); let look_at = Vec3::new(0.0, 0.0, -1.0); let up = Vec3::new(0.0, 1.0, 0.0); let camera = Camera::new(look_from, look_at, up, 20.0, aspect_ratio); print_ppm_header(w, h); let mut rng = rand::thread_rng(); for j in (0..h).rev() { eprint!("\rScanlines remaining: {} ", j); for i in 0..w { let color = (0..100) .map(|_| { let u = (i as f64 + rng.gen::<f64>()) / ((w - 1) as f64); let v = (j as f64 + rng.gen::<f64>()) / ((h - 1) as f64); let ray = camera.get_ray(u, v); ray_color(&ray, &world, 50) }) .fold(Vec3::new(0.0, 0.0, 0.0), |a, b| a + b) / 100.0; print_ppm_pixel(&color); } } eprintln!("Done"); }
/// Cron represents a Cron task #[derive(Debug, Default, Clone, Serialize, Deserialize)] pub struct Cron { pub exec_times: Option<i64>, pub name: Option<String>, pub next: Option<String>, pub prev: Option<String>, pub schedule: Option<String>, } impl Cron { /// Create a builder for this object. #[inline] pub fn builder() -> CronBuilder { CronBuilder { body: Default::default(), } } #[inline] pub fn admin_cron_list() -> CronGetBuilder { CronGetBuilder { param_page: None, param_limit: None, } } } impl Into<Cron> for CronBuilder { fn into(self) -> Cron { self.body } } /// Builder for [`Cron`](./struct.Cron.html) object. #[derive(Debug, Clone)] pub struct CronBuilder { body: self::Cron, } impl CronBuilder { #[inline] pub fn exec_times(mut self, value: impl Into<i64>) -> Self { self.body.exec_times = Some(value.into()); self } #[inline] pub fn name(mut self, value: impl Into<String>) -> Self { self.body.name = Some(value.into()); self } #[inline] pub fn next(mut self, value: impl Into<String>) -> Self { self.body.next = Some(value.into()); self } #[inline] pub fn prev(mut self, value: impl Into<String>) -> Self { self.body.prev = Some(value.into()); self } #[inline] pub fn schedule(mut self, value: impl Into<String>) -> Self { self.body.schedule = Some(value.into()); self } } /// Builder created by [`Cron::admin_cron_list`](./struct.Cron.html#method.admin_cron_list) method for a `GET` operation associated with `Cron`. #[derive(Debug, Clone)] pub struct CronGetBuilder { param_page: Option<i64>, param_limit: Option<i64>, } impl CronGetBuilder { /// page number of results to return (1-based) #[inline] pub fn page(mut self, value: impl Into<i64>) -> Self { self.param_page = Some(value.into()); self } /// page size of results #[inline] pub fn limit(mut self, value: impl Into<i64>) -> Self { self.param_limit = Some(value.into()); self } } impl<Client: crate::client::ApiClient + Sync + 'static> crate::client::Sendable<Client> for CronGetBuilder { type Output = Vec<Cron>; const METHOD: http::Method = http::Method::GET; fn rel_path(&self) -> std::borrow::Cow<'static, str> { "/admin/cron".into() } fn modify(&self, req: Client::Request) -> Result<Client::Request, crate::client::ApiError<Client::Response>> { use crate::client::Request; Ok(req .query(&[ ("page", self.param_page.as_ref().map(std::string::ToString::to_string)), ("limit", self.param_limit.as_ref().map(std::string::ToString::to_string)) ])) } } impl crate::client::ResponseWrapper<Vec<Cron>, CronGetBuilder> { #[inline] pub fn message(&self) -> Option<String> { self.headers.get("message").and_then(|v| String::from_utf8_lossy(v.as_ref()).parse().ok()) } #[inline] pub fn url(&self) -> Option<String> { self.headers.get("url").and_then(|v| String::from_utf8_lossy(v.as_ref()).parse().ok()) } }
use file; pub fn run() { let inputs = file::read_inputs("Day9.txt"); println!("{:?}", solve(&"{{{},{},{{}}}}")); println!("{:?}", solve(&"{<a>,<a>,<a>,<a>}")); println!("{:?}", solve(&"{{<ab>},{<ab>},{<ab>},{<ab>}}")); println!("{:?}", solve(&"{{<!!>},{<!!>},{<!!>},{<!!>}}")); println!("{:?}", solve(&"{{<a!>},{<a!>},{<a!>},{<ab>}}")); println!("{:?}", solve(&inputs)); } enum State { Start, Garbage(usize, usize, usize), Group(usize, usize, usize), Cancelled(Box<State>), } fn solve(inputs: &str) -> (usize, usize) { let stream: Vec<char> = inputs.chars().collect(); let mut state = State::Start; for c in stream { match state { State::Start => { if c == '{' { state = State::Group(1, 1, 0); } else { panic!("Invalid data") } }, State::Garbage(nest_level, score, garbage_score) => { if c == '!' { state = State::Cancelled(Box::new(state)); } else if c == '>' { state = State::Group(nest_level, score, garbage_score); } else { state = State::Garbage(nest_level, score, garbage_score + 1); } }, State::Group(nest_level, score, garbage_score) => { if c == '{' { state = State::Group(nest_level + 1, score + nest_level + 1, garbage_score); } else if c == '}' { state = State::Group(nest_level - 1, score, garbage_score); } else if c == '<' { state = State::Garbage(nest_level, score, garbage_score); } else if c == '!' { state = State::Cancelled(Box::new(state)); } else if c == ',' { // just move on } else { panic!("Invalid data") } }, State::Cancelled(old_state) => { state = *old_state; }, } } match state { State::Group(_, score, garbage_score) => return (score, garbage_score), _ => panic!("Invalid state") } }
use serde::{Deserialize, Serialize}; #[repr(u8)] #[derive(Copy, Clone, Debug, Hash, PartialEq, Serialize, Deserialize)] pub enum OpCode { VOID = 0, PUSH = 1, LOOKUP = 2, IF = 3, JMP = 4, FUNC = 5, SCLOSURE = 6, ECLOSURE = 7, STRUCT = 8, POP = 9, BIND = 10, SDEF = 11, EDEF = 12, PASS = 13, PUSHCONST = 14, NDEFS = 15, EVAL = 16, PANIC = 17, CLEAR = 18, TAILCALL = 19, APPLY, SET, COLLECT, TRANSDUCE, READ, COLLECTTO, METALOOKUP, CALLCC, READLOCAL, SETLOCAL, READUPVALUE, SETUPVALUE, FILLUPVALUE, FILLLOCALUPVALUE, CLOSEUPVALUE, // Should be 1 for close, 0 for not TCOJMP, CALLGLOBAL, CALLGLOBALTAIL, LOADINT0, // Load const 0 LOADINT1, LOADINT2, CGLOCALCONST, INNERSTRUCT, }
use z80::Z80; /* ** SBC A, $xx|(HL)|register */ pub fn sbc(z80: &mut Z80, op: u8) { let sub = match op { 0xDE => { z80.r.pc += 1; z80.mmu.rb(z80.r.pc - 1) }, 0x9E => z80.mmu.rb(z80.r.get_hl()), 0x9F => z80.r.a, 0x98 => z80.r.b, 0x99 => z80.r.c, 0x9A => z80.r.d, 0x9B => z80.r.e, 0x9C => z80.r.h, 0x9D => z80.r.l, _ => 0, }.wrapping_add(z80.r.get_carry()); let val = z80.r.a; z80.r.clear_flags(); z80.r.set_subtract(true); if sub > val { z80.r.set_carry(true); } z80.r.a = val.wrapping_sub(sub); if z80.r.a == 0 { z80.r.set_zero(true); } if (z80.r.a ^ (val + 1) ^ sub) & 0x10 != 0 { z80.r.set_half_carry(true); } z80.set_register_clock(1); }
pub mod grid; pub mod board; pub mod game; pub use board::*; pub use game::*; #[test] fn it_works() { }
use crate::core::Client; use crate::queue::clients::QueueAccountClient; use crate::queue::PopReceipt; use crate::requests; use crate::HasStorageClient; use std::borrow::Cow; use std::fmt::Debug; #[derive(Debug, Clone)] pub struct QueueClient<C> where C: Client + Clone, { queue_account_client: QueueAccountClient<C>, queue_name: String, } impl<C> HasStorageClient for QueueClient<C> where C: Client + Clone, { type StorageClient = C; fn storage_client(&self) -> &C { self.queue_account_client.storage_client() } } impl<C> QueueClient<C> where C: Client + Clone, { pub(crate) fn new(queue_account_client: QueueAccountClient<C>, queue_name: String) -> Self { Self { queue_account_client, queue_name, } } pub fn queue_name(&self) -> &str { self.queue_name.as_ref() } pub fn queue_account_client(&self) -> &QueueAccountClient<C> { &self.queue_account_client } pub fn create_queue(&self) -> requests::CreateQueueBuilder<'_, C> { crate::requests::CreateQueueBuilder::new(self) } pub fn delete_queue(&self) -> requests::DeleteQueueBuilder<'_, C> { crate::requests::DeleteQueueBuilder::new(self) } pub fn put_message<'a, MB>(&'a self, message_body: MB) -> requests::PutMessageBuilder<'a, C> where MB: Into<Cow<'a, str>>, { requests::PutMessageBuilder::new(self, message_body) } pub fn get_messages(&self) -> requests::GetMessagesBuilder<'_, C> { requests::GetMessagesBuilder::new(self) } pub fn peek_messages(&self) -> requests::PeekMessagesBuilder<'_, C> { requests::PeekMessagesBuilder::new(self) } pub fn delete_message( &self, pop_receipt: Box<dyn PopReceipt>, ) -> requests::DeleteMessageBuilder<'_, C> { requests::DeleteMessageBuilder::new(self, pop_receipt) } pub fn clear_messages(&self) -> requests::ClearMessagesBuilder<'_, C> { requests::ClearMessagesBuilder::new(self) } }
// Copyright 2020 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use {anyhow::Result, ffx_core::ffx_plugin, ffx_preflight_args::PreflightCommand}; #[ffx_plugin()] pub async fn preflight_cmd(_cmd: PreflightCommand) -> Result<()> { println!("Hello, Fuchsia!"); Ok(()) } #[cfg(test)] mod test { use super::*; #[fuchsia_async::run_singlethreaded(test)] async fn test_preflight_cmd() -> Result<()> { let response = preflight_cmd(PreflightCommand {}).await; assert!(response.unwrap() == ()); Ok(()) } }
use std::collections::{HashMap, HashSet}; use std::ops::Range; fn main() -> std::io::Result<()> { let input = std::fs::read_to_string("examples/16/input.txt")?; let mut parts = input.split("\n\n"); let fields: Vec<_> = parts .next() .unwrap() .lines() .map(|line| Field::from(line)) .collect(); let my_ticket = parts .next() .unwrap() .lines() .skip(1) .map(|line| Ticket::from(line)) .next() .unwrap(); let nearby: Vec<_> = parts .next() .unwrap() .lines() .skip(1) .map(|line| Ticket::from(line)) .collect(); let mut error_rate = 0; for ticket in &nearby { 'a: for n in &ticket.numbers { for field in &fields { for range in &field.ranges { if range.contains(&(*n as usize)) { continue 'a; } } } error_rate += n; } } println!("{}", error_rate); // Part 2 let valid: Vec<_> = nearby .iter() .filter(|ticket| { 'a: for n in &ticket.numbers { for field in &fields { for range in &field.ranges { if range.contains(&(*n as usize)) { continue 'a; } } } return false; } true }) .collect(); let mut d: Vec<HashSet<usize>> = (0..valid[0].numbers.len()) .map(|_| ((0..fields.len()).collect())) .collect(); for ticket in &valid { for (i, n) in ticket.numbers.iter().enumerate() { for (j, field) in fields.iter().enumerate() { let mut in_range = false; for range in &field.ranges { if range.contains(&(*n as usize)) { in_range = true; break; } } if !in_range { d[i].remove(&j); } } } } let mut d = d.iter().enumerate().collect::<Vec<_>>(); d.sort_by(|x, y| x.1.len().cmp(&y.1.len())); let mut map = HashMap::new(); let mut found = HashSet::<usize>::new(); for (i, s) in &d { let field = s.difference(&found).next().unwrap().clone(); map.insert(i, &fields[field]); found.insert(field); } let solution = map.iter() .filter(|x| x.1.field.contains("departure")) .map(|x| my_ticket.numbers[**x.0]) .product::<u64>(); println!("{:?}", solution); Ok(()) } #[derive(Debug)] struct Field<'a> { field: &'a str, ranges: Vec<Range<usize>>, } impl<'a> From<&'a str> for Field<'a> { fn from(s: &'a str) -> Self { let mut it = s.split(':').map(|x| x.trim()); let field = it.next().unwrap(); let ranges = it.next().unwrap().split("or").map(|x| x.trim()); let ranges = ranges .map(|range| { let mut it = range.split('-'); let start = it.next().unwrap().parse::<usize>().unwrap(); let end = it.next().unwrap().parse::<usize>().unwrap(); Range { start, end: end + 1, } }) .collect(); Field { field, ranges } } } #[derive(Debug)] struct Ticket { numbers: Vec<u64>, } impl From<&str> for Ticket { fn from(s: &str) -> Self { Ticket { numbers: s.split(',').map(|x| x.parse::<u64>().unwrap()).collect(), } } }
#![doc = "generated by AutoRust 0.1.0"] #![allow(unused_mut)] #![allow(unused_variables)] #![allow(unused_imports)] use crate::models::*; use reqwest::StatusCode; use snafu::{ResultExt, Snafu}; pub mod report_config { use crate::models::*; use reqwest::StatusCode; use snafu::{ResultExt, Snafu}; pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, ) -> std::result::Result<ReportConfigListResult, list::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/providers/Microsoft.CostManagement/reportconfigs", &operation_config.base_path, subscription_id ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(list::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(list::BuildRequestError)?; let rsp = client.execute(req).await.context(list::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; let rsp_value: ReportConfigListResult = serde_json::from_slice(&body).context(list::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list::DeserializeError { body })?; list::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod list { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn list_by_resource_group_name( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, ) -> std::result::Result<ReportConfigListResult, list_by_resource_group_name::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.CostManagement/reportconfigs", &operation_config.base_path, subscription_id, resource_group_name ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(list_by_resource_group_name::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(list_by_resource_group_name::BuildRequestError)?; let rsp = client .execute(req) .await .context(list_by_resource_group_name::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(list_by_resource_group_name::ResponseBytesError)?; let rsp_value: ReportConfigListResult = serde_json::from_slice(&body).context(list_by_resource_group_name::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(list_by_resource_group_name::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_resource_group_name::DeserializeError { body })?; list_by_resource_group_name::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod list_by_resource_group_name { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, report_config_name: &str, ) -> std::result::Result<ReportConfig, get::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/providers/Microsoft.CostManagement/reportconfigs/{}", &operation_config.base_path, subscription_id, report_config_name ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(get::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(get::BuildRequestError)?; let rsp = client.execute(req).await.context(get::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?; let rsp_value: ReportConfig = serde_json::from_slice(&body).context(get::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?; get::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod get { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn create_or_update( operation_config: &crate::OperationConfig, subscription_id: &str, report_config_name: &str, parameters: &ReportConfig, ) -> std::result::Result<create_or_update::Response, create_or_update::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/providers/Microsoft.CostManagement/reportconfigs/{}", &operation_config.base_path, subscription_id, report_config_name ); let mut req_builder = client.put(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(create_or_update::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.json(parameters); let req = req_builder.build().context(create_or_update::BuildRequestError)?; let rsp = client.execute(req).await.context(create_or_update::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?; let rsp_value: ReportConfig = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?; Ok(create_or_update::Response::Ok200(rsp_value)) } StatusCode::CREATED => { let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?; let rsp_value: ReportConfig = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?; Ok(create_or_update::Response::Created201(rsp_value)) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?; create_or_update::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod create_or_update { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug)] pub enum Response { Ok200(ReportConfig), Created201(ReportConfig), } #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, report_config_name: &str, ) -> std::result::Result<(), delete::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/providers/Microsoft.CostManagement/reportconfigs/{}", &operation_config.base_path, subscription_id, report_config_name ); let mut req_builder = client.delete(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(delete::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(delete::BuildRequestError)?; let rsp = client.execute(req).await.context(delete::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => Ok(()), status_code => { let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(delete::DeserializeError { body })?; delete::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod delete { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn get_by_resource_group_name( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, report_config_name: &str, ) -> std::result::Result<ReportConfig, get_by_resource_group_name::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.CostManagement/reportconfigs/{}", &operation_config.base_path, subscription_id, resource_group_name, report_config_name ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(get_by_resource_group_name::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(get_by_resource_group_name::BuildRequestError)?; let rsp = client.execute(req).await.context(get_by_resource_group_name::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(get_by_resource_group_name::ResponseBytesError)?; let rsp_value: ReportConfig = serde_json::from_slice(&body).context(get_by_resource_group_name::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(get_by_resource_group_name::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get_by_resource_group_name::DeserializeError { body })?; get_by_resource_group_name::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod get_by_resource_group_name { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn create_or_update_by_resource_group_name( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, report_config_name: &str, parameters: &ReportConfig, ) -> std::result::Result<create_or_update_by_resource_group_name::Response, create_or_update_by_resource_group_name::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.CostManagement/reportconfigs/{}", &operation_config.base_path, subscription_id, resource_group_name, report_config_name ); let mut req_builder = client.put(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(create_or_update_by_resource_group_name::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.json(parameters); let req = req_builder .build() .context(create_or_update_by_resource_group_name::BuildRequestError)?; let rsp = client .execute(req) .await .context(create_or_update_by_resource_group_name::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp .bytes() .await .context(create_or_update_by_resource_group_name::ResponseBytesError)?; let rsp_value: ReportConfig = serde_json::from_slice(&body).context(create_or_update_by_resource_group_name::DeserializeError { body })?; Ok(create_or_update_by_resource_group_name::Response::Ok200(rsp_value)) } StatusCode::CREATED => { let body: bytes::Bytes = rsp .bytes() .await .context(create_or_update_by_resource_group_name::ResponseBytesError)?; let rsp_value: ReportConfig = serde_json::from_slice(&body).context(create_or_update_by_resource_group_name::DeserializeError { body })?; Ok(create_or_update_by_resource_group_name::Response::Created201(rsp_value)) } status_code => { let body: bytes::Bytes = rsp .bytes() .await .context(create_or_update_by_resource_group_name::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(create_or_update_by_resource_group_name::DeserializeError { body })?; create_or_update_by_resource_group_name::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod create_or_update_by_resource_group_name { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug)] pub enum Response { Ok200(ReportConfig), Created201(ReportConfig), } #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn delete_by_resource_group_name( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, report_config_name: &str, ) -> std::result::Result<(), delete_by_resource_group_name::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.CostManagement/reportconfigs/{}", &operation_config.base_path, subscription_id, resource_group_name, report_config_name ); let mut req_builder = client.delete(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(delete_by_resource_group_name::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(delete_by_resource_group_name::BuildRequestError)?; let rsp = client .execute(req) .await .context(delete_by_resource_group_name::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => Ok(()), status_code => { let body: bytes::Bytes = rsp.bytes().await.context(delete_by_resource_group_name::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(delete_by_resource_group_name::DeserializeError { body })?; delete_by_resource_group_name::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod delete_by_resource_group_name { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } } pub mod billing_account_dimensions { use crate::models::*; use reqwest::StatusCode; use snafu::{ResultExt, Snafu}; pub async fn list( operation_config: &crate::OperationConfig, billing_account_id: &str, filter: Option<&str>, expand: Option<&str>, skiptoken: Option<&str>, top: Option<i64>, ) -> std::result::Result<DimensionsListResult, list::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/providers/Microsoft.Billing/billingAccounts/{}/providers/Microsoft.CostManagement/dimensions", &operation_config.base_path, billing_account_id ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(list::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); if let Some(filter) = filter { req_builder = req_builder.query(&[("$filter", filter)]); } if let Some(expand) = expand { req_builder = req_builder.query(&[("$expand", expand)]); } if let Some(skiptoken) = skiptoken { req_builder = req_builder.query(&[("$skiptoken", skiptoken)]); } if let Some(top) = top { req_builder = req_builder.query(&[("$top", top)]); } let req = req_builder.build().context(list::BuildRequestError)?; let rsp = client.execute(req).await.context(list::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; let rsp_value: DimensionsListResult = serde_json::from_slice(&body).context(list::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list::DeserializeError { body })?; list::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod list { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } } pub mod subscription_dimensions { use crate::models::*; use reqwest::StatusCode; use snafu::{ResultExt, Snafu}; pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, filter: Option<&str>, expand: Option<&str>, skiptoken: Option<&str>, top: Option<i64>, ) -> std::result::Result<DimensionsListResult, list::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/providers/Microsoft.CostManagement/dimensions", &operation_config.base_path, subscription_id ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(list::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); if let Some(filter) = filter { req_builder = req_builder.query(&[("$filter", filter)]); } if let Some(expand) = expand { req_builder = req_builder.query(&[("$expand", expand)]); } if let Some(skiptoken) = skiptoken { req_builder = req_builder.query(&[("$skiptoken", skiptoken)]); } if let Some(top) = top { req_builder = req_builder.query(&[("$top", top)]); } let req = req_builder.build().context(list::BuildRequestError)?; let rsp = client.execute(req).await.context(list::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; let rsp_value: DimensionsListResult = serde_json::from_slice(&body).context(list::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list::DeserializeError { body })?; list::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod list { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } } pub mod resource_group_dimensions { use crate::models::*; use reqwest::StatusCode; use snafu::{ResultExt, Snafu}; pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, filter: Option<&str>, expand: Option<&str>, skiptoken: Option<&str>, top: Option<i64>, ) -> std::result::Result<DimensionsListResult, list::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.CostManagement/dimensions", &operation_config.base_path, subscription_id, resource_group_name ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(list::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); if let Some(filter) = filter { req_builder = req_builder.query(&[("$filter", filter)]); } if let Some(expand) = expand { req_builder = req_builder.query(&[("$expand", expand)]); } if let Some(skiptoken) = skiptoken { req_builder = req_builder.query(&[("$skiptoken", skiptoken)]); } if let Some(top) = top { req_builder = req_builder.query(&[("$top", top)]); } let req = req_builder.build().context(list::BuildRequestError)?; let rsp = client.execute(req).await.context(list::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; let rsp_value: DimensionsListResult = serde_json::from_slice(&body).context(list::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list::DeserializeError { body })?; list::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod list { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } } pub async fn query_subscription( operation_config: &crate::OperationConfig, subscription_id: &str, parameters: &ReportConfigDefinition, ) -> std::result::Result<QueryResult, query_subscription::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/providers/Microsoft.CostManagement/Query", &operation_config.base_path, subscription_id ); let mut req_builder = client.post(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(query_subscription::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.json(parameters); let req = req_builder.build().context(query_subscription::BuildRequestError)?; let rsp = client.execute(req).await.context(query_subscription::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(query_subscription::ResponseBytesError)?; let rsp_value: QueryResult = serde_json::from_slice(&body).context(query_subscription::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(query_subscription::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(query_subscription::DeserializeError { body })?; query_subscription::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod query_subscription { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn query_resource_group( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, parameters: &ReportConfigDefinition, ) -> std::result::Result<QueryResult, query_resource_group::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.CostManagement/Query", &operation_config.base_path, subscription_id, resource_group_name ); let mut req_builder = client.post(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(query_resource_group::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.json(parameters); let req = req_builder.build().context(query_resource_group::BuildRequestError)?; let rsp = client.execute(req).await.context(query_resource_group::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(query_resource_group::ResponseBytesError)?; let rsp_value: QueryResult = serde_json::from_slice(&body).context(query_resource_group::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(query_resource_group::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(query_resource_group::DeserializeError { body })?; query_resource_group::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod query_resource_group { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn query_billing_account( operation_config: &crate::OperationConfig, billing_account_id: &str, parameters: &ReportConfigDefinition, ) -> std::result::Result<QueryResult, query_billing_account::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/providers/Microsoft.Billing/billingAccounts/{}/providers/Microsoft.CostManagement/Query", &operation_config.base_path, billing_account_id ); let mut req_builder = client.post(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(query_billing_account::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.json(parameters); let req = req_builder.build().context(query_billing_account::BuildRequestError)?; let rsp = client.execute(req).await.context(query_billing_account::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(query_billing_account::ResponseBytesError)?; let rsp_value: QueryResult = serde_json::from_slice(&body).context(query_billing_account::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(query_billing_account::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(query_billing_account::DeserializeError { body })?; query_billing_account::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod query_billing_account { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub mod operations { use crate::models::*; use reqwest::StatusCode; use snafu::{ResultExt, Snafu}; pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<OperationListResult, list::Error> { let client = &operation_config.client; let uri_str = &format!("{}/providers/Microsoft.CostManagement/operations", &operation_config.base_path,); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(list::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(list::BuildRequestError)?; let rsp = client.execute(req).await.context(list::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; let rsp_value: OperationListResult = serde_json::from_slice(&body).context(list::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list::DeserializeError { body })?; list::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod list { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } }