text
stringlengths
8
4.13M
#![doc = "generated by AutoRust 0.1.0"] #![allow(non_camel_case_types)] #![allow(unused_imports)] use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ImageTemplateListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<ImageTemplate>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ImageTemplateSource { #[serde(rename = "type")] pub type_: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ImageTemplateIsoSource { #[serde(flatten)] pub image_template_source: ImageTemplateSource, #[serde(rename = "sourceURI")] pub source_uri: String, #[serde(rename = "sha256Checksum")] pub sha256_checksum: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ImageTemplatePlatformImageSource { #[serde(flatten)] pub image_template_source: ImageTemplateSource, #[serde(default, skip_serializing_if = "Option::is_none")] pub publisher: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub offer: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub sku: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub version: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ImageTemplateManagedImageSource { #[serde(flatten)] pub image_template_source: ImageTemplateSource, #[serde(rename = "imageId")] pub image_id: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ImageTemplateCustomizer { #[serde(rename = "type")] pub type_: String, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ImageTemplateShellCustomizer { #[serde(flatten)] pub image_template_customizer: ImageTemplateCustomizer, #[serde(default, skip_serializing_if = "Option::is_none")] pub script: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub inline: Vec<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ImageTemplateRestartCustomizer { #[serde(flatten)] pub image_template_customizer: ImageTemplateCustomizer, #[serde(rename = "restartCommand", default, skip_serializing_if = "Option::is_none")] pub restart_command: Option<String>, #[serde(rename = "restartCheckCommand", default, skip_serializing_if = "Option::is_none")] pub restart_check_command: Option<String>, #[serde(rename = "restartTimeout", default, skip_serializing_if = "Option::is_none")] pub restart_timeout: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ImageTemplatePowerShellCustomizer { #[serde(flatten)] pub image_template_customizer: ImageTemplateCustomizer, #[serde(default, skip_serializing_if = "Option::is_none")] pub script: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub inline: Vec<String>, #[serde(rename = "validExitCodes", default, skip_serializing_if = "Vec::is_empty")] pub valid_exit_codes: Vec<i64>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ImageTemplateDistributor { #[serde(rename = "type")] pub type_: String, #[serde(rename = "runOutputName")] pub run_output_name: String, #[serde(rename = "artifactTags", default, skip_serializing_if = "Option::is_none")] pub artifact_tags: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ImageTemplateManagedImageDistributor { #[serde(flatten)] pub image_template_distributor: ImageTemplateDistributor, #[serde(rename = "imageId")] pub image_id: String, pub location: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ImageTemplateSharedImageDistributor { #[serde(flatten)] pub image_template_distributor: ImageTemplateDistributor, #[serde(rename = "galleryImageId")] pub gallery_image_id: String, #[serde(rename = "replicationRegions")] pub replication_regions: Vec<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ImageTemplateVhdDistributor { #[serde(flatten)] pub image_template_distributor: ImageTemplateDistributor, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ProvisioningState { Creating, Succeeded, Failed, Deleting, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ProvisioningError { #[serde(rename = "provisioningErrorCode", default, skip_serializing_if = "Option::is_none")] pub provisioning_error_code: Option<provisioning_error::ProvisioningErrorCode>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, } pub mod provisioning_error { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ProvisioningErrorCode { BadSourceType, #[serde(rename = "BadPIRSource")] BadPirSource, #[serde(rename = "BadISOSource")] BadIsoSource, BadManagedImageSource, BadCustomizerType, UnsupportedCustomizerType, NoCustomizerScript, BadDistributeType, BadSharedImageDistribute, ServerError, Other, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ImageTemplateLastRunStatus { #[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")] pub start_time: Option<String>, #[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")] pub end_time: Option<String>, #[serde(rename = "runState", default, skip_serializing_if = "Option::is_none")] pub run_state: Option<image_template_last_run_status::RunState>, #[serde(rename = "runSubState", default, skip_serializing_if = "Option::is_none")] pub run_sub_state: Option<image_template_last_run_status::RunSubState>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, } pub mod image_template_last_run_status { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum RunState { Running, Succeeded, PartiallySucceeded, Failed, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum RunSubState { Queued, Building, Customizing, Distributing, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ImageTemplateProperties { pub source: ImageTemplateSource, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub customize: Vec<ImageTemplateCustomizer>, pub distribute: Vec<ImageTemplateDistributor>, #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<ProvisioningState>, #[serde(rename = "provisioningError", default, skip_serializing_if = "Option::is_none")] pub provisioning_error: Option<ProvisioningError>, #[serde(rename = "lastRunStatus", default, skip_serializing_if = "Option::is_none")] pub last_run_status: Option<ImageTemplateLastRunStatus>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RunOutputProperties { #[serde(rename = "artifactId", default, skip_serializing_if = "Option::is_none")] pub artifact_id: Option<String>, #[serde(rename = "artifactUri", default, skip_serializing_if = "Option::is_none")] pub artifact_uri: Option<String>, #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<ProvisioningState>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ImageTemplate { #[serde(flatten)] pub resource: Resource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<ImageTemplateProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ImageTemplateUpdateParameters { #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RunOutput { #[serde(flatten)] pub sub_resource: SubResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<RunOutputProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RunOutputCollection { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<RunOutput>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Resource { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, pub location: String, #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SubResource { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, pub name: String, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Operation { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub display: Option<operation::Display>, #[serde(default, skip_serializing_if = "Option::is_none")] pub origin: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<serde_json::Value>, } pub mod operation { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Display { #[serde(default, skip_serializing_if = "Option::is_none")] pub provider: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub operation: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub description: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct OperationListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<Operation>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ApiErrorBase { #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub target: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct InnerError { #[serde(rename = "exceptionType", default, skip_serializing_if = "Option::is_none")] pub exception_type: Option<String>, #[serde(rename = "errorDetail", default, skip_serializing_if = "Option::is_none")] pub error_detail: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ApiError { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub details: Vec<ApiErrorBase>, #[serde(rename = "innerError", default, skip_serializing_if = "Option::is_none")] pub inner_error: Option<InnerError>, #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub target: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, }
// This pallet use The Open Runtime Module Library (ORML) which is a community maintained collection of Substrate runtime modules. // Thanks to all contributors of orml. // https://github.com/open-web3-stack/open-runtime-module-library #![cfg_attr(not(feature = "std"), no_std)] use codec::{Decode, Encode}; use frame_support::{ dispatch::{DispatchResult, DispatchResultWithPostInfo}, ensure, pallet_prelude::*, traits::{Currency, ExistenceRequirement, Get, ReservableCurrency}, }; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; use auction_manager::Auction; use frame_system::pallet_prelude::*; use orml_nft::Pallet as NftModule; use primitives::{AssetId, GroupCollectionId}; use sp_runtime::RuntimeDebug; use sp_runtime::{ traits::{AccountIdConversion, One}, DispatchError, ModuleId, }; use sp_std::vec::Vec; #[cfg(feature = "runtime-benchmarks")] pub mod benchmarking; #[cfg(test)] mod mock; #[cfg(test)] mod tests; pub use pallet::*; pub mod default_weight; pub use default_weight::WeightInfo; #[derive(Encode, Decode, Clone, RuntimeDebug, PartialEq, Eq)] pub struct NftGroupCollectionData { pub name: Vec<u8>, // Metadata from ipfs pub properties: Vec<u8>, } #[derive(Encode, Decode, Clone, RuntimeDebug, PartialEq, Eq)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct NftClassData<Balance> { // Minimum balance to create a collection of Asset pub deposit: Balance, // Metadata from ipfs pub metadata: Vec<u8>, pub token_type: TokenType, pub collection_type: CollectionType, pub total_supply: u64, pub initial_supply: u64, } #[derive(Encode, Decode, Clone, RuntimeDebug, PartialEq, Eq)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct NftAssetData<Balance> { // Deposit balance to create each token pub deposit: Balance, pub name: Vec<u8>, pub description: Vec<u8>, pub properties: Vec<u8>, } #[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum TokenType { Transferable, BoundToAddress, } impl TokenType { pub fn is_transferable(&self) -> bool { match *self { TokenType::Transferable => true, _ => false, } } } impl Default for TokenType { fn default() -> Self { TokenType::Transferable } } #[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum CollectionType { Collectable, Wearable, Executable, } // Collection extension for fast retrieval impl CollectionType { pub fn is_collectable(&self) -> bool { match *self { CollectionType::Collectable => true, _ => false, } } pub fn is_executable(&self) -> bool { match *self { CollectionType::Executable => true, _ => false, } } pub fn is_wearable(&self) -> bool { match *self { CollectionType::Wearable => true, _ => false, } } } impl Default for CollectionType { fn default() -> Self { CollectionType::Collectable } } pub use pallet::*; #[frame_support::pallet] pub mod pallet { use super::*; #[pallet::pallet] pub struct Pallet<T>(PhantomData<T>); #[pallet::config] pub trait Config: frame_system::Config + orml_nft::Config< TokenData = NftAssetData<BalanceOf<Self>>, ClassData = NftClassData<BalanceOf<Self>>, > { type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>; /// The minimum balance to create class #[pallet::constant] type CreateClassDeposit: Get<BalanceOf<Self>>; /// The minimum balance to create token #[pallet::constant] type CreateAssetDeposit: Get<BalanceOf<Self>>; /// Currency type for reserve/unreserve balance type Currency: Currency<Self::AccountId> + ReservableCurrency<Self::AccountId>; //NFT Module Id #[pallet::constant] type ModuleId: Get<ModuleId>; /// Weight info type WeightInfo: WeightInfo; /// Auction Handler type AuctionHandler: Auction<Self::AccountId, Self::BlockNumber>; type AssetsHandler: AssetHandler; } type ClassIdOf<T> = <T as orml_nft::Config>::ClassId; type TokenIdOf<T> = <T as orml_nft::Config>::TokenId; type BalanceOf<T> = <<T as Config>::Currency as Currency<<T as frame_system::Config>::AccountId>>::Balance; #[pallet::storage] #[pallet::getter(fn get_asset)] pub(super) type Assets<T: Config> = StorageMap<_, Blake2_128Concat, AssetId, (ClassIdOf<T>, TokenIdOf<T>), OptionQuery>; #[pallet::storage] #[pallet::getter(fn get_assets_by_owner)] pub(super) type AssetsByOwner<T: Config> = StorageMap<_, Blake2_128Concat, T::AccountId, Vec<AssetId>, ValueQuery>; #[pallet::storage] #[pallet::getter(fn get_group_collection)] pub(super) type GroupCollections<T: Config> = StorageMap<_, Blake2_128Concat, GroupCollectionId, NftGroupCollectionData, OptionQuery>; #[pallet::storage] #[pallet::getter(fn get_class_collection)] pub(super) type ClassDataCollection<T: Config> = StorageMap<_, Blake2_128Concat, ClassIdOf<T>, GroupCollectionId, ValueQuery>; #[pallet::storage] #[pallet::getter(fn next_group_collection_id)] pub(super) type NextGroupCollectionId<T: Config> = StorageValue<_, u64, ValueQuery>; #[pallet::storage] #[pallet::getter(fn all_nft_collection_count)] pub(super) type AllNftGroupCollection<T: Config> = StorageValue<_, u64, ValueQuery>; #[pallet::storage] #[pallet::getter(fn get_class_type)] pub(super) type ClassDataType<T: Config> = StorageMap<_, Blake2_128Concat, ClassIdOf<T>, TokenType, ValueQuery>; #[pallet::storage] #[pallet::getter(fn next_asset_id)] pub(super) type NextAssetId<T: Config> = StorageValue<_, AssetId, ValueQuery>; #[pallet::storage] #[pallet::getter(fn get_asset_supporters)] pub(super) type AssetSupporters<T: Config> = StorageMap<_, Blake2_128Concat, AssetId, Vec<T::AccountId>, OptionQuery>; #[pallet::event] #[pallet::generate_deposit(pub (super) fn deposit_event)] #[pallet::metadata( < T as frame_system::Config >::AccountId = "AccountId", ClassIdOf < T > = "ClassId", TokenIdOf < T > = "TokenId", )] pub enum Event<T: Config> { /// New NFT Group Collection created NewNftCollectionCreated(GroupCollectionId), /// New NFT Collection/Class created NewNftClassCreated(<T as frame_system::Config>::AccountId, ClassIdOf<T>), /// Emit event when new nft minted - show the first and last asset mint NewNftMinted( AssetId, AssetId, <T as frame_system::Config>::AccountId, ClassIdOf<T>, u32, TokenIdOf<T>, ), /// Successfully transfer NFT TransferedNft( <T as frame_system::Config>::AccountId, <T as frame_system::Config>::AccountId, TokenIdOf<T>, ), /// Signed on NFT SignedNft(TokenIdOf<T>, <T as frame_system::Config>::AccountId), } #[pallet::error] pub enum Error<T> { /// Attempted to initialize the bitcountry after it had already been initialized. AlreadyInitialized, /// Asset Info not found AssetInfoNotFound, /// Asset Id not found AssetIdNotFound, /// No permission NoPermission, /// No available collection id NoAvailableCollectionId, /// Collection id is not exist CollectionIsNotExist, /// Class Id not found ClassIdNotFound, /// Non Transferable NonTransferable, /// Invalid quantity InvalidQuantity, /// No available asset id NoAvailableAssetId, /// Asset Id is already exist AssetIdAlreadyExist, /// Asset Id is currently in an auction AssetAlreadyInAuction, /// Sign your own Asset SignOwnAsset, } #[pallet::call] impl<T: Config> Pallet<T> { #[pallet::weight(10_000)] pub fn create_group( origin: OriginFor<T>, name: Vec<u8>, properties: Vec<u8>, ) -> DispatchResultWithPostInfo { ensure_root(origin)?; let next_group_collection_id = Self::do_create_group_collection(name.clone(), properties.clone())?; let collection_data = NftGroupCollectionData { name, properties }; GroupCollections::<T>::insert(next_group_collection_id, collection_data); let all_collection_count = Self::all_nft_collection_count(); let new_all_nft_collection_count = all_collection_count .checked_add(One::one()) .ok_or("Overflow adding a new collection to total collection")?; AllNftGroupCollection::<T>::set(new_all_nft_collection_count); Self::deposit_event(Event::<T>::NewNftCollectionCreated( next_group_collection_id, )); Ok(().into()) } #[pallet::weight(10_000)] pub fn create_class( origin: OriginFor<T>, metadata: Vec<u8>, collection_id: GroupCollectionId, token_type: TokenType, collection_type: CollectionType, ) -> DispatchResultWithPostInfo { let sender = ensure_signed(origin)?; let next_class_id = NftModule::<T>::next_class_id(); ensure!( GroupCollections::<T>::contains_key(collection_id), Error::<T>::CollectionIsNotExist ); // Class fund let class_fund: T::AccountId = T::ModuleId::get().into_sub_account(next_class_id); // Secure deposit of token class owner -- TODO - support customise deposit let class_deposit = T::CreateClassDeposit::get(); // Transfer fund to pot <T as Config>::Currency::transfer( &sender, &class_fund, class_deposit, ExistenceRequirement::KeepAlive, )?; <T as Config>::Currency::reserve( &class_fund, <T as Config>::Currency::free_balance(&class_fund), )?; let class_data = NftClassData { deposit: class_deposit, token_type, collection_type, metadata: metadata.clone(), total_supply: Default::default(), initial_supply: Default::default(), }; NftModule::<T>::create_class(&sender, metadata, class_data)?; ClassDataCollection::<T>::insert(next_class_id, collection_id); Self::deposit_event(Event::<T>::NewNftClassCreated(sender, next_class_id)); Ok(().into()) } #[pallet::weight(< T as Config >::WeightInfo::mint(* quantity))] pub fn mint( origin: OriginFor<T>, class_id: ClassIdOf<T>, name: Vec<u8>, description: Vec<u8>, metadata: Vec<u8>, quantity: u32, ) -> DispatchResultWithPostInfo { let sender = ensure_signed(origin)?; ensure!(quantity >= 1, Error::<T>::InvalidQuantity); let class_info = NftModule::<T>::classes(class_id).ok_or(Error::<T>::ClassIdNotFound)?; ensure!(sender == class_info.owner, Error::<T>::NoPermission); let deposit = T::CreateAssetDeposit::get(); let class_fund: T::AccountId = T::ModuleId::get().into_sub_account(class_id); let total_deposit = deposit * Into::<BalanceOf<T>>::into(quantity); <T as Config>::Currency::transfer( &sender, &class_fund, total_deposit, ExistenceRequirement::KeepAlive, )?; <T as Config>::Currency::reserve(&class_fund, total_deposit)?; let new_nft_data = NftAssetData { deposit, name, description, properties: metadata.clone(), }; let mut new_asset_ids: Vec<AssetId> = Vec::new(); let mut last_token_id: TokenIdOf<T> = Default::default(); for _ in 0..quantity { let asset_id = NextAssetId::<T>::try_mutate(|id| -> Result<AssetId, DispatchError> { let current_id = *id; *id = id .checked_add(One::one()) .ok_or(Error::<T>::NoAvailableAssetId)?; Ok(current_id) })?; new_asset_ids.push(asset_id); if AssetsByOwner::<T>::contains_key(&sender) { AssetsByOwner::<T>::try_mutate(&sender, |asset_ids| -> DispatchResult { /// Check if the asset_id already in the owner ensure!( !asset_ids.iter().any(|i| asset_id == *i), Error::<T>::AssetIdAlreadyExist ); asset_ids.push(asset_id); Ok(()) })?; } else { let mut assets = Vec::<AssetId>::new(); assets.push(asset_id); AssetsByOwner::<T>::insert(&sender, assets) } let token_id = NftModule::<T>::mint( &sender, class_id, metadata.clone(), new_nft_data.clone(), )?; Assets::<T>::insert(asset_id, (class_id, token_id)); last_token_id = token_id; } Self::deposit_event(Event::<T>::NewNftMinted( *new_asset_ids.first().unwrap(), *new_asset_ids.last().unwrap(), sender, class_id, quantity, last_token_id, )); Ok(().into()) } #[pallet::weight(10_000)] pub fn transfer( origin: OriginFor<T>, to: T::AccountId, asset_id: AssetId, ) -> DispatchResultWithPostInfo { let sender = ensure_signed(origin)?; ensure!( !T::AssetsHandler::check_item_in_auction(asset_id), Error::<T>::AssetAlreadyInAuction ); let token_id = Self::do_transfer(&sender, &to, asset_id)?; Self::deposit_event(Event::<T>::TransferedNft(sender, to, token_id)); Ok(().into()) } #[pallet::weight(10_000)] pub fn transfer_batch( origin: OriginFor<T>, tos: Vec<(T::AccountId, AssetId)>, ) -> DispatchResultWithPostInfo { let sender = ensure_signed(origin)?; for (_i, x) in tos.iter().enumerate() { let item = &x; let owner = &sender.clone(); let asset = Assets::<T>::get(item.1).ok_or(Error::<T>::AssetIdNotFound)?; let class_info = NftModule::<T>::classes(asset.0).ok_or(Error::<T>::ClassIdNotFound)?; let data = class_info.data; match data.token_type { TokenType::Transferable => { let asset_info = NftModule::<T>::tokens(asset.0, asset.1) .ok_or(Error::<T>::AssetInfoNotFound)?; ensure!(owner.clone() == asset_info.owner, Error::<T>::NoPermission); Self::handle_asset_ownership_transfer(&owner, &item.0, item.1); NftModule::<T>::transfer(&owner, &item.0, (asset.0, asset.1))?; Self::deposit_event(Event::<T>::TransferedNft( owner.clone(), item.0.clone(), asset.1.clone(), )); } _ => (), }; } Ok(().into()) } #[pallet::weight(10_000)] pub fn sign_asset(origin: OriginFor<T>, asset_id: AssetId) -> DispatchResultWithPostInfo { let sender = ensure_signed(origin)?; let asset_by_owner: Vec<AssetId> = Self::get_assets_by_owner(&sender); ensure!( !asset_by_owner.contains(&asset_id), Error::<T>::SignOwnAsset ); if AssetSupporters::<T>::contains_key(&asset_id) { AssetSupporters::<T>::try_mutate(asset_id, |supporters| -> DispatchResult { let mut supporters = supporters.as_mut().ok_or("Empty supporters")?; supporters.push(sender); Ok(()) }); Ok(().into()) } else { let mut new_supporters = Vec::new(); new_supporters.push(sender); AssetSupporters::<T>::insert(asset_id, new_supporters); Ok(().into()) } } } #[pallet::hooks] impl<T: Config> Hooks<T::BlockNumber> for Pallet<T> {} } impl<T: Config> Module<T> { fn do_create_group_collection( name: Vec<u8>, properties: Vec<u8>, ) -> Result<GroupCollectionId, DispatchError> { let next_group_collection_id = NextGroupCollectionId::<T>::try_mutate( |collection_id| -> Result<GroupCollectionId, DispatchError> { let current_id = *collection_id; *collection_id = collection_id .checked_add(One::one()) .ok_or(Error::<T>::NoAvailableCollectionId)?; Ok(current_id) }, )?; let collection_data = NftGroupCollectionData { name, properties }; <GroupCollections<T>>::insert(next_group_collection_id, collection_data); Ok(next_group_collection_id) } fn handle_asset_ownership_transfer( sender: &T::AccountId, to: &T::AccountId, asset_id: AssetId, ) -> DispatchResult { //Remove asset from sender AssetsByOwner::<T>::try_mutate(&sender, |asset_ids| -> DispatchResult { /// Check if the asset_id already in the owner let asset_index = asset_ids.iter().position(|x| *x == asset_id).unwrap(); asset_ids.remove(asset_index); Ok(()) })?; // Insert asset to recipient if AssetsByOwner::<T>::contains_key(to) { AssetsByOwner::<T>::try_mutate(&to, |asset_ids| -> DispatchResult { // Check if the asset_id already in the owner ensure!( !asset_ids.iter().any(|i| asset_id == *i), Error::<T>::AssetIdAlreadyExist ); asset_ids.push(asset_id); Ok(()) })?; } else { let mut asset_ids = Vec::<AssetId>::new(); asset_ids.push(asset_id); AssetsByOwner::<T>::insert(&to, asset_ids); } Ok(()) } pub fn do_transfer( sender: &T::AccountId, to: &T::AccountId, asset_id: AssetId, ) -> Result<<T as orml_nft::Config>::TokenId, DispatchError> { let asset = Assets::<T>::get(asset_id).ok_or(Error::<T>::AssetIdNotFound)?; let class_info = NftModule::<T>::classes(asset.0).ok_or(Error::<T>::ClassIdNotFound)?; let data = class_info.data; match data.token_type { TokenType::Transferable => { let check_ownership = Self::check_nft_ownership(&sender, &asset_id)?; ensure!(check_ownership, Error::<T>::NoPermission); Self::handle_asset_ownership_transfer(&sender, &to, asset_id); NftModule::<T>::transfer(&sender, &to, asset.clone())?; Ok(asset.1) } TokenType::BoundToAddress => Err(Error::<T>::NonTransferable.into()), } } pub fn check_nft_ownership( sender: &T::AccountId, asset_id: &AssetId, ) -> Result<bool, DispatchError> { let asset = Assets::<T>::get(asset_id).ok_or(Error::<T>::AssetIdNotFound)?; let class_info = NftModule::<T>::classes(asset.0).ok_or(Error::<T>::ClassIdNotFound)?; let data = class_info.data; let asset_info = NftModule::<T>::tokens(asset.0, asset.1).ok_or(Error::<T>::AssetInfoNotFound)?; if sender == &asset_info.owner { return Ok(true); } return Ok(false); } } pub trait AssetHandler { /// Checks if item is already in an auction fn check_item_in_auction(asset_id: AssetId) -> bool; } impl<T: Config> AssetHandler for Module<T> { fn check_item_in_auction(asset_id: AssetId) -> bool { return T::AuctionHandler::check_item_in_auction(asset_id); } }
use std::env; use std::thread; use std::sync::mpsc::{Sender, channel}; use std::net::{IpAddr, TcpStream}; use std::str::FromStr; struct UserInput { ip_address: IpAddr, threads: u16, starting_port: u16, //starting and ending port form a range of ports to scan ending_port: u16, } impl UserInput { fn new(args: &[String]) -> Result<UserInput, &'static str> { let ip = args[1].clone(); let threads = args[2].parse::<u16>().expect("Error encountered while parsing number of threads").clone(); let starting_port = args[3].parse::<u16>().expect("Error encountered while parsing starting port").clone(); let ending_port = args[4].parse::<u16>().expect("Error encountered while parsing ending port").clone(); //If user inputs a number larger that 65535 (maximum port), unsigned 16bit int overflows and program panics match IpAddr::from_str(&ip) { // if IpAddr can't be constructed from user input then it's not a valid ipv4 or ipv6 address Ok(ip_address) => return Ok(UserInput {ip_address, threads, starting_port, ending_port}), Err(_) => return Err("Invalid IP"), } } } fn scan(tx: Sender<u16>, starting_port: u16, address: IpAddr, num_threads: u16, ending_port: u16) { let mut port: u16 = starting_port; loop { match TcpStream::connect((address, port)) { Ok(_) => { tx.send(port).unwrap(); } Err(_) => {} } if (ending_port - port) <= num_threads { break; } port += num_threads; } } fn main() { let args: Vec<String> = env::args().collect(); let arguments = UserInput::new(&args).expect("Problem parsing arguments"); let threads = arguments.threads; let ip_address = arguments.ip_address; let starting_port = arguments.starting_port; let ending_port = arguments.ending_port; let (tx, rx) = channel(); for mut i in 0..threads { let tx = tx.clone(); i += starting_port; thread::spawn(move || { scan(tx, i, ip_address, threads, ending_port); }); } let mut output = Vec::new(); drop(tx); for p in rx { output.push(p); } println!(""); output.sort(); for v in output { println!("{} is open", v); } }
use std::error::Error; use std::fmt; /// Error that can happen when swapping buffers. #[derive(Debug, Clone, PartialEq)] pub enum SwapBuffersError { /// The corresponding context has been lost and needs to be recreated. /// /// All the objects associated to it (textures, buffers, programs, etc.) /// need to be recreated from scratch. /// /// Operations will have no effect. Functions that read textures, buffers, etc. /// will return uninitialized data instead. ContextLost, /// The buffers have already been swapped. /// /// This error can be returned when `swap_buffers` has been called multiple times /// without any modification in between. AlreadySwapped, /// Unknown error Unknown(u32), } impl fmt::Display for SwapBuffersError { fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { use std::error::Error; write!(formatter, "{}", self.description()) } } impl Error for SwapBuffersError { fn description(&self) -> &str { match *self { SwapBuffersError::ContextLost => "The context has been lost, it needs to be recreated", SwapBuffersError::AlreadySwapped => { "Buffers are already swapped, swap_buffers was called too many times" } SwapBuffersError::Unknown(_) => "Unknown error occurred", } } fn cause(&self) -> Option<&dyn Error> { None } }
#![cfg_attr(not(feature = "std"), no_std)] pub use primitive_types::{U256}; pub mod balance; use sp_runtime::{ DispatchError, traits::{ StaticLookup, }, }; use system::{ ensure_signed, ensure_root, }; use crate::cipher::{ EGICipher, CipherFunctor, }; use crate::proof::{ CipherProof, }; pub use crate::balance::{ CipherText, CipherBalance, }; use frame_support::{ decl_storage, decl_module, decl_event, dispatch }; use codec::{Encode, Decode}; mod primering; mod cipher; mod proof; #[derive(Encode, Decode, Default, Clone, PartialEq)] struct CipherInfo(u128, u128); impl CipherInfo { fn to_cipher(self) -> EGICipher<u128> { EGICipher {gamma:self.0, prime:self.1} } } /// The module's configuration trait. pub trait Trait<I: Instance = DefaultInstance>: system::Trait { type Balance; type Event: From<Event<Self>> + Into<<Self as system::Trait>::Event>; } /* impl<T:Trait<I>, I: Instance> Trait<I> for T { type Event = Event<T>; type TransferHistory = TransferHistory; type OnNewAccount = OnNewAccount<Self::AccountId>; } */ /* We need implement Balance trait as follows * One * Zero * CheckedMul * CheckedDiv * CheckedSquareRoot * CheckedAdd * CheckedSub impl<T:Trait<I>, I: Instance> Currency<T::AccountId> for Module<T,I> where T::Balance: History<u128> { type Balance = T::Balance; fn total_balance(who: &T::AccountId) -> Self::Balance { 0 } } */ impl<T:Trait<I>, I: Instance> Module<T,I> { fn initialize_primeset(prime: &u128) { } } decl_storage! { trait Store for Module<T:Trait<I>, I:Instance = DefaultInstance> as Token { pub ProofSetting build(|config: &GenesisConfig| { config.primeset }): u128; pub Rel: u32; pub Cipher: CipherInfo; BalanceMap get(balance_balance_getter): map hasher(blake2_256) T::AccountId => CipherText<u128>; } add_extra_genesis { config(primeset): u128; build(|config| Module::<T,I>::initialize_primeset(&config.primeset)) } } decl_module! { pub struct Module<T: Trait<I>, I: Instance = DefaultInstance> for enum Call where origin: T::Origin { fn deposit_event() = default; /** * Standard transfer function, release the locked amount * and transfer it into the recv's accout. */ fn transfer(origin, amount:u128, recv: <T::Lookup as StaticLookup>::Source ) -> dispatch::DispatchResult { let cipher = Cipher::<I>::get().to_cipher(); let src = ensure_signed(origin)?; let src_balance = <BalanceMap<T,I>>::get(src.clone()); let dest = T::Lookup::lookup(recv)?; /* * Set the new balance for dest * Create an account if dest account does not exist. */ if !<BalanceMap<T,I>>::contains_key(dest.clone()) { Err(DispatchError::Other("Account does not exists")) } else { let src_new = src_balance.release_locked(&cipher, amount)?; let dest_balance = <BalanceMap<T,I>>::get(dest.clone()); let dest_new = dest_balance.increase(&cipher, amount); // once we reach this spot, no chance to raise exception <BalanceMap<T,I>>::insert(src, src_new); <BalanceMap<T,I>>::insert(dest, dest_new); Ok(()) } } /** * To prevent frequent accound creating attack, * we require a limit amount of balance is transfered into the new account. * The suter client might receive an error code from transfer of non-existence * account. In such case, please use the create account api and provide a * public key and relative r. fn create_account(origin, amount:u128, pubkey:u128, r:u128, recv: <T::Lookup as StaticLookup>::Source ) -> dispatch::DispatchResult { let cipher = Cipher::<I>::get().to_cipher(); let src = ensure_signed(origin)?; let src_balance = <BalanceMap<T,I>>::get(src.clone()); let dest = T::Lookup::lookup(recv)?; /* * Set the new balance for dest * Create an account if dest account does not exist. */ let dest_new = if <BalanceMap<T,I>>::exists(dest) { Err(()) } else { Self::new_account(dest, amount); let dest_balance = <BalanceMap<T,I>>::get(dest.clone()); let dest_new = dest_balance.increase(&cipher, amount); let src_new = src_balance.release_locked(&cipher, amount)?; // once we reach this spot, no chance to raise exception <BalanceMap<T,I>>::insert(src, src_new); <BalanceMap<T,I>>::insert(dest, dest_new); Ok(()) } } */ /** * Before transfer, we need to lock enough balanced in * our account so that all the transfer transaction from * a particular account is well ordered */ fn lock_balance( origin, amount:u128, s:u128, proof:[(u128,u128);4], ) { let who = ensure_signed(origin)?; let cipher = Cipher::<I>::get().to_cipher(); let balance = <BalanceMap<T,I>>::get(who.clone()); let delta = cipher.encode(balance.pubkey, amount, balance.rel); let remain_cipher = cipher.minus(balance.current, delta); /* TODO: need to port zkrp in ING * Currently we assume the highest bit of one is less * then 64, thus x < 2^64 - 1 */ cipher.within_exp(s, s, remain_cipher, proof.to_vec()); let who_new = balance.lock(&cipher, amount); <BalanceMap<T,I>>::insert(who, who_new); } fn reset_balance( origin, amount:u128, ) { let who = ensure_signed(origin)?; let cipher = Cipher::<I>::get().to_cipher(); let who_balance = <BalanceMap<T,I>>::get(who.clone()); let who_new = who_balance.set(&cipher, amount); <BalanceMap<T,I>>::insert(who, who_new); } fn set_pubkey( origin, key:u128, ) { let who = ensure_signed(origin)?; let cipher = Cipher::<I>::get().to_cipher(); let who_balance = <BalanceMap<T,I>>::get(who.clone()); let who_new = who_balance.switch(&cipher, key); <BalanceMap<T,I>>::insert(who, who_new); } fn TestStorage( origin, amount:u32, ) { //let who = ensure_signed(origin)?; <Rel::<I>>::put(amount); //Self::deposit_event(RawEvent::TokenEvent(who)); } } } decl_event!( pub enum Event<T> where <T as system::Trait>::AccountId, { TokenEvent(AccountId), } );
#[doc = "Reader of register CM0_INT_CTL3"] pub type R = crate::R<u32, super::CM0_INT_CTL3>; #[doc = "Writer for register CM0_INT_CTL3"] pub type W = crate::W<u32, super::CM0_INT_CTL3>; #[doc = "Register CM0_INT_CTL3 `reset()`'s with value 0xf0f0_f0f0"] impl crate::ResetValue for super::CM0_INT_CTL3 { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0xf0f0_f0f0 } } #[doc = "Reader of field `MUX0_SEL`"] pub type MUX0_SEL_R = crate::R<u8, u8>; #[doc = "Write proxy for field `MUX0_SEL`"] pub struct MUX0_SEL_W<'a> { w: &'a mut W, } impl<'a> MUX0_SEL_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0xff) | ((value as u32) & 0xff); self.w } } #[doc = "Reader of field `MUX1_SEL`"] pub type MUX1_SEL_R = crate::R<u8, u8>; #[doc = "Write proxy for field `MUX1_SEL`"] pub struct MUX1_SEL_W<'a> { w: &'a mut W, } impl<'a> MUX1_SEL_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0xff << 8)) | (((value as u32) & 0xff) << 8); self.w } } #[doc = "Reader of field `MUX2_SEL`"] pub type MUX2_SEL_R = crate::R<u8, u8>; #[doc = "Write proxy for field `MUX2_SEL`"] pub struct MUX2_SEL_W<'a> { w: &'a mut W, } impl<'a> MUX2_SEL_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0xff << 16)) | (((value as u32) & 0xff) << 16); self.w } } #[doc = "Reader of field `MUX3_SEL`"] pub type MUX3_SEL_R = crate::R<u8, u8>; #[doc = "Write proxy for field `MUX3_SEL`"] pub struct MUX3_SEL_W<'a> { w: &'a mut W, } impl<'a> MUX3_SEL_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0xff << 24)) | (((value as u32) & 0xff) << 24); self.w } } impl R { #[doc = "Bits 0:7 - System interrupt select for CPU interrupt source 12."] #[inline(always)] pub fn mux0_sel(&self) -> MUX0_SEL_R { MUX0_SEL_R::new((self.bits & 0xff) as u8) } #[doc = "Bits 8:15 - System interrupt select for CPU interrupt source 13."] #[inline(always)] pub fn mux1_sel(&self) -> MUX1_SEL_R { MUX1_SEL_R::new(((self.bits >> 8) & 0xff) as u8) } #[doc = "Bits 16:23 - System interrupt select for CPU interrupt source 14."] #[inline(always)] pub fn mux2_sel(&self) -> MUX2_SEL_R { MUX2_SEL_R::new(((self.bits >> 16) & 0xff) as u8) } #[doc = "Bits 24:31 - System interrupt select for CPU interrupt source 15."] #[inline(always)] pub fn mux3_sel(&self) -> MUX3_SEL_R { MUX3_SEL_R::new(((self.bits >> 24) & 0xff) as u8) } } impl W { #[doc = "Bits 0:7 - System interrupt select for CPU interrupt source 12."] #[inline(always)] pub fn mux0_sel(&mut self) -> MUX0_SEL_W { MUX0_SEL_W { w: self } } #[doc = "Bits 8:15 - System interrupt select for CPU interrupt source 13."] #[inline(always)] pub fn mux1_sel(&mut self) -> MUX1_SEL_W { MUX1_SEL_W { w: self } } #[doc = "Bits 16:23 - System interrupt select for CPU interrupt source 14."] #[inline(always)] pub fn mux2_sel(&mut self) -> MUX2_SEL_W { MUX2_SEL_W { w: self } } #[doc = "Bits 24:31 - System interrupt select for CPU interrupt source 15."] #[inline(always)] pub fn mux3_sel(&mut self) -> MUX3_SEL_W { MUX3_SEL_W { w: self } } }
// Copyright 2018 Steven Bosnick // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE-2.0 or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms extern crate luther; use std::default; use luther::Lexer; use luther::spanned::StrExt; #[derive(Debug, PartialEq)] enum Tokens { Ab, Acc(String), } // Start luther-derive exemplar // dfa matches ["ab", "acc*"] #[derive(PartialEq, Debug, Clone, Copy)] enum TokensDfa { State0, // ["ab", "acc*"] State1, // [ null, null ] State2, // ["b", "cc*"] State3, // ["", null] State4, // [null, "c*"] } impl default::Default for TokensDfa { fn default() -> Self { TokensDfa::State0 } } impl luther::dfa::Dfa<Tokens> for TokensDfa { fn is_error(&self) -> bool { *self == TokensDfa::State1 } fn transition(&self, c: char) -> Self { use TokensDfa::*; match (*self, c) { (State0, 'a') => State2, (State2, 'b') => State3, (State2, 'c') => State4, (State4, 'c') => State4, (_, _) => State1, } } fn accept(&self, matched: &str) -> Option<Tokens> { use TokensDfa::*; match *self { State3 => Some(Tokens::Ab), State4 => Some(Tokens::Acc(matched.parse().unwrap_or_default())), _ => None, // COV_EXCL_LINE } } } impl luther::Lexer for Tokens { type Dfa = TokensDfa; } // End luther-dervie exemplar #[test] fn luther_matches_for_tokens_ab_and_accc() { let input = "abaccc".spanned_chars(); let sut = Tokens::lexer(input).map(|r| r.map(|s| s.into_inner().1)); let result: Result<Vec<_>, _> = sut.collect(); let result = result.expect("unexpected lexer error"); assert_eq!(result, vec![Tokens::Ab, Tokens::Acc("accc".to_string())]); }
use std::fmt; #[allow(dead_code)] #[derive(Debug)] enum Status { Online, Offline, } #[allow(dead_code)] #[derive(Debug)] enum Color { Red, Green, Blue, Orange, Custom(String), } impl fmt::Display for Color { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match &*self { Color::Red => write!(f, "#FF0000"), Color::Green => write!(f, "#008000"), Color::Blue => write!(f, "#0000FF"), Color::Orange => write!(f, "#FFA500"), Color::Custom(color) => write!(f, "{}", color), } } } #[allow(dead_code)] #[derive(Debug)] struct Point { x: i32, y: i32, } #[allow(dead_code)] #[derive(Debug)] enum Direction { Up(Point), Down(Point), Left(Point), Right(Point), } #[allow(dead_code)] #[derive(Debug)] enum Keys { UpKey(String), DownKey(String), LeftKey(String), RightKey(String), } impl Direction { fn match_direction(&self) -> Keys { match *self { Direction::Up(_) => Keys::UpKey(String::from("Pressed w")), Direction::Down(_) => Keys::DownKey(String::from("Pressed s")), Direction::Left(_) => Keys::LeftKey(String::from("Pressed a")), Direction::Right(_) => Keys::RightKey(String::from("Pressed d")), } } } impl Keys { fn destruct(&self) -> &String { match *self { Keys::UpKey(ref s) => s, Keys::DownKey(ref s) => s, Keys::LeftKey(ref s) => s, Keys::RightKey(ref s) => s, } } } #[allow(dead_code)] #[derive(Debug)] enum Shape { Rectangle { width: u32, height: u32 }, Square(u32), Circle(f64), } impl Shape { fn area(&self) -> f64 { match *self { Shape::Rectangle { width, height } => (width * height) as f64, Shape::Square(ref s) => (s * s) as f64, Shape::Circle(ref r) => 3.14 * (r * r), } } } fn main() { let status = Status::Online; println!("{:?}", status); println!("{:?}", status as i32); let favorite = Color::Red; println!("{}", favorite); let blue = Color::Blue; println!("{}", blue); let pink = Color::Custom("#FFC0CB".to_string()); println!("{}", pink); let up = Direction::Up(Point { x: 0, y: 1 }); let up_direction = up.match_direction(); println!("{:?}", up_direction); let key = up_direction.destruct(); println!("{:?}", key); let rectangle = Shape::Rectangle { width: 10, height: 70, }; let square = Shape::Square(10); let circle = Shape::Circle(4.5); let rectangle_area = rectangle.area(); println!("{}", rectangle_area); let square_area = square.area(); println!("{}", square_area); let circle_area = circle.area(); println!("{}", circle_area); }
use x25519_dalek; use rand::RngCore; use rand::CryptoRng; use std::fmt; use std::cmp::Eq; use std::ops::Deref; use std::str::FromStr; use std::hash::{Hash, Hasher}; use std::convert::AsRef; #[derive(Copy, Clone)] pub struct PresharedSecret([u8; 32]); impl FromStr for PresharedSecret { type Err = (); // TODO: better error type fn from_str(s: &str) -> Result<Self, Self::Err> { match hex::decode(s) { Ok(v) => if v.len() == 32 { let mut psk = [0u8; 32]; psk.copy_from_slice(&v); Ok(PresharedSecret(psk)) } else { Err(()) } , Err(_) => Err(()) } } } impl Default for PresharedSecret { fn default() -> Self { Self([0u8; 32]) } } impl AsRef<[u8]> for PresharedSecret { fn as_ref(&self) -> &[u8] { self.0.as_ref() } } impl PresharedSecret { pub fn as_bytes(&self) -> &[u8; 32] { &self.0 } } // required traits for PublicKey #[derive(Copy, Clone)] pub struct PublicKey(x25519_dalek::PublicKey); impl Hash for PublicKey { fn hash<H: Hasher>(&self, state: &mut H) { self.as_bytes().hash(state); } } impl PartialEq for PublicKey { fn eq(&self, other: &Self) -> bool { self.as_bytes() == other.as_bytes() } } impl Eq for PublicKey {} impl Deref for PublicKey { type Target = x25519_dalek::PublicKey; fn deref(&self) -> &x25519_dalek::PublicKey { return &self.0 } } impl FromStr for PublicKey { type Err = (); // TODO: better error type fn from_str(s: &str) -> Result<Self, Self::Err> { match hex::decode(s) { Ok(v) => if v.len() == 32 { let mut pk = [0u8; 32]; pk.copy_from_slice(&v); Ok(PublicKey(x25519_dalek::PublicKey::from(pk))) } else { Err(()) } , Err(_) => Err(()) } } } impl fmt::Debug for PublicKey { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "PublicKey({})", hex::encode(self.as_bytes())) } } impl From<[u8; 32]> for PublicKey { fn from(pk : [u8; 32]) -> Self { PublicKey(x25519_dalek::PublicKey::from(pk)) } } impl From<&StaticSecret> for PublicKey { fn from(sk : &StaticSecret) -> Self { PublicKey(x25519_dalek::PublicKey::from(&sk.0)) } } // required traits for StaticSecret #[derive(Clone)] pub struct StaticSecret(x25519_dalek::StaticSecret); impl Deref for StaticSecret { type Target = x25519_dalek::StaticSecret; fn deref(&self) -> &Self::Target { return &self.0 } } impl StaticSecret { pub fn new<T>(csprng: &mut T) -> Self where T: RngCore + CryptoRng { StaticSecret(x25519_dalek::StaticSecret::new(csprng)) } } impl From<[u8; 32]> for StaticSecret { fn from(v : [u8; 32]) -> Self { StaticSecret(x25519_dalek::StaticSecret::from(v)) } } impl fmt::Debug for StaticSecret { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "StaticSecret({})", hex::encode(self.to_bytes())) } } impl FromStr for StaticSecret { type Err = (); // TODO: better error type fn from_str(s: &str) -> Result<Self, Self::Err> { match hex::decode(s) { Ok(v) => if v.len() == 32 { let mut sk = [0u8; 32]; sk.copy_from_slice(&v); Ok(StaticSecret(x25519_dalek::StaticSecret::from(sk))) } else { Err(()) } , Err(_) => Err(()) } } } // required traits for SharedSecret pub type SharedSecret = x25519_dalek::SharedSecret;
use serde::Serialize; #[derive(Serialize)] pub struct Divider { #[serde(rename = "type")] block_type: String, } #[derive(Serialize)] pub struct Text { #[serde(rename = "type")] block_type: String, text: String, } impl Text { pub fn new(text: String) -> Text { Text { block_type: "mrkdwn".to_owned(), text, } } } #[derive(Serialize)] pub struct PlainText { #[serde(rename = "type")] block_type: String, text: String, } impl PlainText { pub fn new(text: String) -> PlainText { PlainText { block_type: "plain_text".to_owned(), text, } } } #[derive(Serialize)] pub struct AccessoryImage { #[serde(rename = "type")] block_type: String, image_url: String, alt_text: String, } impl AccessoryImage { pub fn new(url: String) -> AccessoryImage { AccessoryImage { block_type: "image".to_owned(), image_url: url, alt_text: "irrelephant".to_owned(), } } } #[derive(Serialize)] pub struct Section { #[serde(rename = "type")] block_type: String, text: Text, accessory: AccessoryImage, } #[derive(Serialize)] pub struct Action { #[serde(rename = "type")] pub block_type: String, pub action_id: String, pub value: String, pub text: PlainText, } #[derive(Serialize)] pub struct Actions { #[serde(rename = "type")] block_type: String, elements: Vec<Action>, } #[derive(Serialize)] #[serde(untagged)] pub enum Block { Divider(Divider), Section(Section), Actions(Actions), } #[derive(Serialize)] pub struct BlocksRoot { pub blocks: Vec<Block>, } impl BlocksRoot { pub fn new() -> BlocksRoot { BlocksRoot { blocks: Vec::new() } } pub fn add_divider(&mut self) -> &mut Self { let divider = Divider { block_type: "divider".to_owned(), }; self.add_block(Block::Divider(divider)); self } pub fn add_section(&mut self, text: Text, accessory: AccessoryImage) -> &mut Self { let section = Section { block_type: "section".to_owned(), text, accessory, }; self.add_block(Block::Section(section)); self } pub fn add_action(&mut self, actions: Vec<Action>) -> &mut Self { let actions = Actions { block_type: "actions".to_owned(), elements: actions, }; self.add_block(Block::Actions(actions)); self } fn add_block(&mut self, block: Block) { self.blocks.push(block); } }
extern crate bincode; extern crate csv; #[macro_use] extern crate serde_derive; extern crate sled; use std::io; use bincode::{deserialize, serialize}; use csv::ReaderBuilder; use sled::{ConfigBuilder, Tree}; #[derive(Debug, Serialize, Deserialize)] struct Passwd { username: String, password: String, uid: i32, gid: i32, gecos: String, home: String, shell: String, } fn open_sled() -> Result<Tree, io::Error> { let config = ConfigBuilder::new().path("./sled.db").build(); let tree = Tree::start(config); match tree { Ok(t) => Ok(t), Err(_) => Err(io::Error::new(io::ErrorKind::Other, "Error Opening Tree")), } } fn read_passwd(tree: &Tree) -> Result<(), io::Error> { let mut rdr = ReaderBuilder::new() .delimiter(b':') .has_headers(false) .from_path("/etc/passwd") .unwrap(); for row in rdr.deserialize() { let usr: Passwd = row?; println!("User: {:?}", usr); tree.set(serialize(&usr.username).unwrap(), serialize(&usr).unwrap()) .unwrap(); } Ok(()) } fn main() { let tree = open_sled().expect("DB Error"); match read_passwd(&tree) { Err(err) => println!("Error: {:?}", err), _ => {} }; let k = serialize(&"context").unwrap(); let rec = tree.get(&k).unwrap(); match rec { Some(r) => { let u: Passwd = deserialize(&r).unwrap(); println!("Pulled User: {:?}", u); } None => println!("User not found"), } }
use std::collections::HashMap; #[derive(Debug)] pub struct SymbolTable(HashMap<String, usize>); impl SymbolTable { pub fn new() -> Self { let mut hashmap = HashMap::new(); hashmap.insert("SP".to_string(), 0); hashmap.insert("LCL".to_string(), 1); hashmap.insert("ARG".to_string(), 2); hashmap.insert("THIS".to_string(), 3); hashmap.insert("THAT".to_string(), 4); for i in 0..=15 { let key = format!("R{}", i); hashmap.insert(key, i); } hashmap.insert("SCREEN".to_string(), 16384); hashmap.insert("KBD".to_string(), 24576); SymbolTable(hashmap) } pub fn add_entry(&mut self, s: &str, n: usize) { self.0.insert(s.to_string(), n); } pub fn contains(&self, s: &str) -> bool { self.0.contains_key(s) } pub fn get_address(&self, s: &str) -> usize { *self.0.get(s).unwrap() } }
impl Solution { pub fn remove_duplicates(s: String) -> String { let mut stack = vec![]; for byte in s.bytes() { match stack.last() { Some(&last) if last == byte => { stack.pop(); } _ => stack.push(byte), }; } unsafe { String::from_utf8_unchecked(stack) } } }
use std::fs::File; use super::data::{Data, Storage}; use errors::Result; pub struct DataBuilder { buffer: Vec<u8>, } impl DataBuilder { pub fn new() -> Self { DataBuilder { buffer: Vec::new() } } pub fn write_blob(&mut self, data: &Data) -> Result<()> { // TODO: Assert that data is blob match data.storage() { &Storage::Memory(ref bytes) => self.write(&bytes[..]), &Storage::Path(ref path) => { let mem = unsafe { ::memmap::Mmap::map(&File::open(&path.path)?) }?; self.write(&mem); } } Ok(()) } pub fn set_size(&mut self, size: usize) { // If size bigger than a threadshold, create directly a file self.buffer.reserve(size); } pub fn write(&mut self, data: &[u8]) { self.buffer.extend_from_slice(data); } pub fn build(&mut self) -> Data { Data::new(Storage::Memory(::std::mem::replace( &mut self.buffer, Vec::new(), ))) } }
extern crate precod_compdag; use precod_compdag::mat_mul; use precod_compdag::BranchNode; use precod_compdag::InputWithErrorBackProp; use precod_compdag::LeafNode; use precod_compdag::OutputWithErrorBackProp; use precod_compdag::RootNode; use rand; use rand::prelude::*; use rand_distr::Uniform; use std::borrow::BorrowMut; use std::fmt::Display; use std::fmt::Formatter; use std::fs::File; use std::io::prelude::*; use std::io::Read; use std::ops::Mul; use std::path::Path; use std::sync::Arc; use std::sync::Mutex; use std::thread; use thiserror::Error; const IMAGE_DIM: usize = 28; const MAX_U8_AS_F64: f64 = 256.0; const VEC_DIM: usize = 784; // IMAGE_DIM * IMAGE_DIM const MNIST_CLASSES: usize = 10; const HIDDEN_LAYERS: usize = 128; const EMPTY_VEC: Vec<usize> = Vec::<usize>::new(); const TOL: f64 = 1.0e-7; const ERR_LEARNING_RATE: f64 = 0.4; const WEIGHT_LEARNING_RATE: f64 = 0.1; #[derive(Error, Debug)] enum Error { #[error("No error at index")] NoData(usize), } #[derive(Debug, Clone)] struct MNISTImage(mat_mul::Matrix<u8, IMAGE_DIM, IMAGE_DIM>); #[derive(Debug, Clone)] struct MNISTVector(mat_mul::Vector<f64, VEC_DIM>); #[derive(Debug, Clone)] struct MNISTClass(mat_mul::Vector<bool, MNIST_CLASSES>); #[derive(Debug)] struct MNISTClassSmoother(mat_mul::Vector<f64, MNIST_CLASSES>); #[derive(Debug)] struct Hidden(mat_mul::Vector<f64, HIDDEN_LAYERS>); impl MNISTClass { fn smooth(&self) -> MNISTClassSmoother { let MNISTClass(data) = self; MNISTClassSmoother(data.map(|x| (*x as i32) as f64)) } fn get_index(&self) -> usize { let MNISTClass(data) = self; data.iter() .enumerate() .find(|(_, val)| **val) .map(|(index, _)| index) .expect("Should've had one non-zero entry") } } impl Default for MNISTClass { fn default() -> Self { let mut data = [false; MNIST_CLASSES]; data[0] = true; // 0 < MNIST_CLASSES MNISTClass(mat_mul::Vector::from_iter(data.into_iter().map(|x| *x as i8)).map(|x| *x > 0)) } } impl MNISTImage { fn to_f32_arr(&self) -> MNISTVector { let MNISTImage(data) = self; MNISTVector(mat_mul::Vector::from_iter( data.iter().map(|datum| *datum as f64), )) } } fn u8_to_grey(u: &u8) -> &str { match *u / 64 { 3 => "▓▓", 2 => "▒▒", 1 => "░░", _ => " ", } } impl Display for MNISTImage { fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> { write!(f, "\n")?; self.0 .row_iter() .map(|row| -> Result<(), std::fmt::Error> { row.iter() .map(|col| -> Result<(), std::fmt::Error> { let pixel = u8_to_grey(col); write!(f, "{}", pixel)?; Ok(()) }) .collect::<Result<Vec<()>, std::fmt::Error>>()?; write!(f, "\n")?; Ok(()) }) .collect::<Result<Vec<()>, std::fmt::Error>>()?; Ok(()) } } fn main() { let path = Path::new("data/train-images-idx3-ubyte"); let mut file = File::open(path).expect("Missing train set"); let mut byte_vec = Vec::<u8>::new(); file.read_to_end(&mut byte_vec).unwrap(); let images = byte_vec[16..] .chunks(VEC_DIM) .map(|image_data| { let matrix_data: mat_mul::Matrix<u8, IMAGE_DIM, IMAGE_DIM> = mat_mul::Matrix::from_iter(image_data.iter().map(|x| *x)); MNISTImage(matrix_data) }) .collect::<Vec<_>>(); let mut rng = rand::thread_rng(); let test_label_path = Path::new("data/train-labels-idx1-ubyte"); let file = File::open(test_label_path).expect("Missing train labels"); let labels = file .bytes() .skip(8) .map(|b| { let mut class: [bool; MNIST_CLASSES] = [false; MNIST_CLASSES]; let index = b.unwrap(); let value = class.get_mut(index as usize).unwrap(); *value = true; MNISTClass(mat_mul::Vector::from_iter(class.iter().map(|x| *x as i8)).map(|x| *x > 0)) }) .collect::<Vec<_>>(); println!("Hello, world! {:?}", &byte_vec[0..4]); for _ in 0..4 { let index = (&mut rng).gen_range(0..images.len()); println!("{}", &images[index]); println!("LABEL: {:?}", (&labels)[index].get_index()); } let mut indexes_by_class = [EMPTY_VEC; VEC_DIM]; for (i, class) in labels.iter().enumerate() { let class_index = class.get_index(); let class_list = (&mut indexes_by_class) .get_mut(class_index) .expect("Def need to have an index less than VEC_DIM"); class_list.push(i) } let (mnist_input_send, root_inp_rcv) = crossbeam::channel::unbounded::<MNISTVector>(); let (hidden_err_sender, root_err_rcv) = crossbeam::channel::unbounded::<Hidden>(); let (root_out_send, hidden_input_rcv) = crossbeam::channel::unbounded::<Hidden>(); let (leaf_err_sender, hidde_err_rcv) = crossbeam::channel::unbounded::<MNISTClassSmoother>(); let (hidden_output_sender, leaf_input_rcv) = crossbeam::channel::unbounded::<MNISTClassSmoother>(); let (ground_truth_sender, leaf_ground_truth_rcv) = crossbeam::channel::unbounded::<MNISTClass>(); let (progress_sender, progress_receiver) = crossbeam::channel::unbounded::<f64>(); let mnist_input = RootNode { forward_input: root_inp_rcv, forward_output: OutputWithErrorBackProp { forward_data: root_out_send, reverse_error_prop: root_err_rcv, }, }; let mnist_hidden = BranchNode { input_channels: InputWithErrorBackProp { forward_data: hidden_input_rcv, reverse_error_prop: hidden_err_sender, }, output_channels: OutputWithErrorBackProp { forward_data: hidden_output_sender, reverse_error_prop: hidde_err_rcv, }, }; let mnist_output = LeafNode { node_data: MNISTClassSmoother(mat_mul::Vector::new()), input_channels: InputWithErrorBackProp { forward_data: leaf_input_rcv, reverse_error_prop: leaf_err_sender, }, ground_truth_channel: leaf_ground_truth_rcv, }; let mnist_input_thr = thread::spawn(move || { let mut rng = thread_rng(); let scale = (0.25 / ((VEC_DIM * HIDDEN_LAYERS) as i32 as f64)).sqrt(); let RootNode { forward_input, forward_output: OutputWithErrorBackProp { forward_data, reverse_error_prop, }, } = mnist_input; let input_layer_weights = mat_mul::Matrix::<f64, HIDDEN_LAYERS, VEC_DIM>::from_distribution( &mut rng, &Uniform::new(-scale, scale), ); struct BackwardOwned { weights: mat_mul::Matrix<f64, HIDDEN_LAYERS, VEC_DIM>, predictions: mat_mul::Vector<f64, VEC_DIM>, } struct ForwardOwned { input: mat_mul::Vector<f64, VEC_DIM>, activation: mat_mul::Vector<f64, HIDDEN_LAYERS>, prediction_errors: mat_mul::Vector<f64, VEC_DIM>, } let forward_owned_data = Arc::new(Mutex::new(ForwardOwned { input: mat_mul::Vector::new(), activation: mat_mul::Vector::new(), prediction_errors: mat_mul::Vector::new(), })); let read_forward_data_for_back = Arc::clone(&forward_owned_data); let backward_owned_data = Arc::new(Mutex::new(BackwardOwned { weights: input_layer_weights, predictions: mat_mul::Vector::new(), })); let read_backward_data_for_fore = Arc::clone(&backward_owned_data); let input_mutex = Arc::new(Mutex::new(mat_mul::Vector::<f64, VEC_DIM>::new())); let back_prop_input = Arc::clone(&input_mutex); let predictions_mutex = Arc::new(Mutex::new(mat_mul::Vector::<f64, VEC_DIM>::new())); let back_prop_predictions = Arc::clone(&predictions_mutex); let predictions_errors_mutex = Arc::new(Mutex::new(mat_mul::Vector::<f64, VEC_DIM>::new())); let back_prop_pred_err = Arc::clone(&predictions_errors_mutex); let activations_mutex = Arc::new(Mutex::new(mat_mul::Vector::<f64, HIDDEN_LAYERS>::new())); let back_prop_activations = Arc::clone(&activations_mutex); // backprop thread thread::spawn(move || { reverse_error_prop.iter().for_each(|error_from_next| { let Hidden(data) = error_from_next; let ForwardOwned { input, activation, prediction_errors, } = &*read_forward_data_for_back.lock().unwrap(); let BackwardOwned { weights, predictions, } = &mut *backward_owned_data.lock().unwrap(); let backprop = weights.transpose() * (activation.component_mul(&data) + (&*activation * ((&*activation * &data) * -1.0))); // todo impl sub ops let delta = &*prediction_errors + (&backprop * (-1.0)); delta .iter() .zip(prediction_errors.iter()) .zip(predictions.iter_mut()) .zip(backprop.iter()) .zip(weights.row_iter_mut()) .for_each( |((((node_delta, prediction_error), prediction), backprop_row), row)| { if node_delta.abs() > 0.1 * prediction_error.abs() { *prediction -= ERR_LEARNING_RATE * node_delta } else { let scale = -1.0 * *backprop_row * WEIGHT_LEARNING_RATE * *prediction_error; let boost = &*input * scale; // println!("UPDATING WEIGHTS!"); // println!("{:?}, {:?}", row, boost); *row += &boost; } }, ); }); }); forward_input.iter().for_each(|mnist_vec| { let ForwardOwned { input, activation, prediction_errors, } = &mut *forward_owned_data.lock().unwrap(); let BackwardOwned { weights, predictions, } = &*read_backward_data_for_fore.lock().unwrap(); let MNISTVector(data) = mnist_vec; *input += &data; *input *= 0.5; let logits = weights * &data; let output_vector = logits.map(|logit| logit.tanh()); *prediction_errors = predictions + (&data * (-1.0)); *activation = output_vector.clone(); forward_data.send(Hidden(output_vector)).unwrap(); }); }); let hidden_layer_thr = thread::spawn(move || { let mut rng = thread_rng(); println!("initialised next weights"); let hidden_layer_data = mat_mul::Matrix::<f64, MNIST_CLASSES, HIDDEN_LAYERS>::from_distribution( &mut rng, &Uniform::new(0., 1.0 / ((HIDDEN_LAYERS * MNIST_CLASSES) as f64).sqrt()), ); let hidden_layer_preds: mat_mul::Vector<f64, HIDDEN_LAYERS> = mat_mul::Vector::from_distribution( &mut rng, &Uniform::new(0., 1.0 / (HIDDEN_LAYERS as f64)), ); // This should be doable with left_right. Probably need to implemnt two ops // ``` // AddAssignWeights(mat_mul::Matrxic<f64, HIDDEN_LAYERS, MNIST_CLASSES>), // AddAssignPredictions(mat_mul::Vector<f64, HIDDEN_LAYERS>) // ``` struct HiddenLayerBackOwned { weights: mat_mul::Matrix<f64, MNIST_CLASSES, HIDDEN_LAYERS>, predictions: mat_mul::Vector<f64, HIDDEN_LAYERS>, } struct HiddenLayerForwardOwned { input: mat_mul::Vector<f64, HIDDEN_LAYERS>, activation: mat_mul::Vector<f64, MNIST_CLASSES>, prediction_errors: mat_mul::Vector<f64, HIDDEN_LAYERS>, } let back_prop_data = Arc::new(Mutex::new(HiddenLayerBackOwned { weights: hidden_layer_data, predictions: hidden_layer_preds, })); let read_back_prop_for_forward = Arc::clone(&back_prop_data); let forward_data = Arc::new(Mutex::new(HiddenLayerForwardOwned { input: mat_mul::Vector::new(), activation: mat_mul::Vector::new(), prediction_errors: mat_mul::Vector::new(), })); let read_forward_data_for_back_prop = Arc::clone(&forward_data); let BranchNode { input_channels, output_channels, } = mnist_hidden; let OutputWithErrorBackProp { forward_data: output_forward, reverse_error_prop: output_backward, } = output_channels; let InputWithErrorBackProp { forward_data: input_forward, reverse_error_prop: input_backward, } = input_channels; // Error propagation thread thread::spawn(move || { output_backward.iter().for_each(|error_from_next| { let MNISTClassSmoother(data) = error_from_next; let HiddenLayerForwardOwned { input, activation, prediction_errors, } = &*read_forward_data_for_back_prop.lock().unwrap(); let HiddenLayerBackOwned { weights, predictions, } = &mut *back_prop_data.lock().unwrap(); let backprop = weights.transpose() * (activation.component_mul(&data) + (&*activation * ((&*activation * &data) * -1.0))); // todo impl sub ops let delta = &*prediction_errors + (&backprop * (-1.0)); delta .iter() .zip(prediction_errors.iter()) .zip(predictions.iter_mut()) .zip(backprop.iter()) .zip(weights.row_iter_mut()) .for_each( |((((node_delta, prediction_error), prediction), backprop_row), row)| { if node_delta.abs() > 0.1 * prediction_error.abs() { *prediction -= ERR_LEARNING_RATE * node_delta } else { let scale = -1.0 * *backprop_row * WEIGHT_LEARNING_RATE * *prediction_error; let boost = &*input * scale; // println!("UPDATING WEIGHTS!"); // println!("{:?}, {:?}", row, boost); *row += &boost; } }, ); // This is so bad }); }); // forward propagation input_forward.iter().for_each(|hidden_input| { let HiddenLayerForwardOwned { input, activation, prediction_errors, } = &mut *forward_data.lock().unwrap(); let HiddenLayerBackOwned { weights, predictions, } = &*read_back_prop_for_forward.lock().unwrap(); let Hidden(data) = hidden_input; *input += &data; // we exponentially average the stored input over time, to smooth out the weight updates *input *= 0.5; let new_prediction_errors = predictions + (&data * (-1.0)); *prediction_errors = new_prediction_errors.clone(); input_backward.send(Hidden(new_prediction_errors)).unwrap(); // todo: implement vec * matrix to get transpose mult let logits = weights * &data; let mut total = 0.0; let exps = logits.map(|logit| { let exp = logit.exp(); total += exp; exp }); let output_data = exps.map(|x| x / total); *activation = output_data.clone(); output_forward .send(MNISTClassSmoother(output_data)) .expect("Should do something"); }); }); let mnist_output_thr = thread::spawn(move || { let rng = thread_rng(); let current_label = Arc::new(Mutex::new(MNISTClass::default())); let update_label = Arc::clone(&current_label); let LeafNode { input_channels, ground_truth_channel, node_data, } = mnist_output; // Label update thready .. updates from main training routine thread::spawn(move || { ground_truth_channel.iter().for_each(|new_ground_truth| { let mut updateable = update_label.lock().expect("can't update networkt output"); *updateable = new_ground_truth; }); }); input_channels.forward_data.iter().for_each(|class_input| { let MNISTClassSmoother(data) = class_input; let guard = current_label.lock().expect("Couldn't get current_label"); let MNISTClass(current_label_data) = &*guard; let (index, pred) = data .iter() .zip(current_label_data.iter()) .enumerate() .find_map(|(index, (pred, bool_index))| { if *bool_index { Some((index, pred)) } else { None } }) .unwrap(); let loss = -pred.ln(); //println!("LOSS: {:?}", loss); // println!("Index label: {:?}", index); // println!("prediction: {:?}", data); // println!("Updating progress"); let mut back_prop_data = [0.0; MNIST_CLASSES]; back_prop_data[index] = -1.0 / pred; input_channels.reverse_error_prop.send(MNISTClassSmoother( mat_mul::Vector::from_iter(back_prop_data.iter().map(|x| *x)), )); progress_sender.send(loss).expect("Can't update progress"); }); }); let threes = indexes_by_class.get(3).unwrap(); let mut counter = 0; for _ in 0..40000 { let threes_index = (&mut rng).gen_range(0..threes.len()); let index = *(&threes).get(threes_index).unwrap(); mnist_input_send.send(images[index].to_f32_arr()).unwrap(); ground_truth_sender.send(labels[index].clone()).unwrap(); counter += 1; //println!("{}", &images[index]); //println!("LABEL: {:?}", (&labels)[index].get_index()); } let mut processed = 0; progress_receiver.iter().take(counter).for_each(|loss| { processed += 1; if processed % 100 == 0 { println!("processed {:?}. Loss: {:?}", processed, loss); } }); }
extern crate libc; use std::process::exit; use std::env; use common::utils::{ compile_file, do_file }; mod parser; mod common; mod lexer; mod vm; enum Exec { PrintBytecodeRecursive(String), PrintBytecode(String), DoFile(String), Exit } fn get_name(name: Option<&String>, err: &str) -> Result<String, String> { if let Some(name) = name { Ok(name.to_string()) } else { Err(err.to_string()) } } fn parse_args(args: Vec<String>) -> Result<Exec, String> { match args.get(1) { Some(v) => match v.trim() { "-l" => { let name = get_name(args.get(2), "expected file name")?; Ok(Exec::PrintBytecode(name.trim().to_string())) }, "-ll" => { let name = get_name(args.get(2), "expected file name")?; Ok(Exec::PrintBytecodeRecursive(name.trim().to_string())) } _ => { Ok(Exec::DoFile(v.to_string())) } }, None => { println!("{} [options] filename Options: -l print bytecode of main function -ll print bytecode of main function and all sub functions ", args.get(0).unwrap_or(&"moon".to_string())); Ok(Exec::Exit) } } } fn run() -> Result<(), String> { let args = env::args().collect::<Vec<String>>(); let exec = parse_args(args)?; match exec { Exec::DoFile(name) => { do_file(name) } Exec::PrintBytecode(name) => { let closure = compile_file(name)?; vm::pretty_print_closure(closure, false); Ok(()) } Exec::PrintBytecodeRecursive(name) => { let closure = compile_file(name)?; vm::pretty_print_closure(closure, true); Ok(()) } Exec::Exit => Ok(()) } } fn main() { let res = run(); if let Err(e) = res { eprintln!("{}", e); exit(1) } }
extern crate geo; extern crate line_intersection; use line_intersection::{LineInterval, LineRelation}; use geo::{Coordinate, Line, Point}; use crate::GCode; // TODO: Figure out a better spot for this - Austin Haskell #[derive(Debug, Copy, Clone)] pub struct Rectangle { pub quad_x: i32, pub quad_y: i32, pub x: f32, pub y: f32, pub height: f32, pub width: f32 } // Explanation found here: https://en.wikipedia.org/wiki/Cohen%E2%80%93Sutherland_algorithm // Note that the code below does not pub fn find_intersection_points_for_rectangle(line: ((f32, f32),(f32, f32)), rectangle: Rectangle) -> Vec<(f32, f32)> { let mut expanded_rectangle: Vec<((f32, f32), (f32, f32))> = Vec::new(); expanded_rectangle.push(((rectangle.x, rectangle.y), (rectangle.x, rectangle.y + rectangle.height))); expanded_rectangle.push(((rectangle.x, rectangle.y), (rectangle.x + rectangle.width, rectangle.y))); expanded_rectangle.push(((rectangle.x + rectangle.width, rectangle.y), (rectangle.x + rectangle.width, rectangle.y + rectangle.height))); expanded_rectangle.push(((rectangle.x, rectangle.y + rectangle.height), (rectangle.x + rectangle.width, rectangle.y + rectangle.height))); let mut intersection_points: Vec<(f32, f32)> = Vec::new(); for p in expanded_rectangle { let intersection = find_intersection_point_of_lines(line, (p.0, p.1)); if intersection.is_none() { continue; } intersection_points.push(intersection.unwrap()); } intersection_points } pub fn find_intersection_point_of_lines(line1: ((f32, f32), (f32, f32)), line2: ((f32, f32), (f32, f32))) -> Option<(f32, f32)> { let line_segment_1 = LineInterval::line_segment( Line { start: ((line1.0).0, (line1.0).1).into(), end: ((line1.1).0, (line1.1).1).into() }); let line_segment_2 = LineInterval::line_segment( Line { start: ((line2.0).0, (line2.0).1).into(), end: ((line2.1).0, (line2.1).1).into() }); let line_relationships = line_segment_1.relate(&line_segment_2); if line_relationships == LineRelation::DivergentDisjoint || line_relationships == LineRelation::Parallel || line_relationships == LineRelation::Collinear{ return None; } let intersection_point = line_relationships.unique_intersection().unwrap(); Some((intersection_point.x(), intersection_point.y())) } pub fn is_quadrant_adjacent(quadrant_base: (i32, i32), quadrant_to_check: (i32, i32)) -> bool { let diff = (quadrant_base.0 - quadrant_to_check.0, quadrant_base.1 - quadrant_to_check.1); if diff.0 > 1 || diff.0 < -1{ return false; } if diff.1 > 1 || diff.1 < -1 { return false; } true } pub fn build_two_parameter_command(word: GCode::Word, x: f32, y: f32) -> GCode::GCode { GCode::GCode { command: word, x: x, y: y, z: 0.0 } } pub fn point_to_move_cmd(point: (f32, f32)) -> GCode::GCode { build_two_parameter_command(GCode::Word { letter: 'G', value: 1 }, point.0, point.1) } pub fn point_to_move_quadrant_cmd(quadrant: (i32, i32)) -> GCode::GCode { build_two_parameter_command(GCode::Word { letter: 'Q', value: 1 }, quadrant.0 as f32, quadrant.1 as f32) } #[test] pub fn does_line_cross_rectangle_crosses() { let rect = Rectangle { height: 16.0, width: 16.0, x: 0.0, y: 0.0, quad_x: 0, quad_y: 0 }; let point1: (f32, f32) = (5.0, 5.0); let point2: (f32, f32) = (18.0, 5.0); let actual = find_intersection_points_for_rectangle((point1, point2), rect); assert!(actual.len() != 0); } #[test] pub fn does_line_cross_rectangle_doesnt_cross() { let rect = Rectangle { height: 16.0, width: 16.0, x: 0.0, y: 0.0, quad_x: 0, quad_y: 0 }; let point1: (f32, f32) = (16.5, 16.5); let point2: (f32, f32) = (18.0, 18.0); let actual = find_intersection_points_for_rectangle((point1, point2), rect); assert!(actual.len() == 0); } #[test] pub fn find_intersection_point_of_lines_does_not_intersect() { let line1: ((f32, f32), (f32, f32)) = ((0.0, 0.0), (10.0, 10.0)); let line2: ((f32, f32), (f32, f32)) = ((12.0, 12.0), (20.0, 20.0)); assert!(find_intersection_point_of_lines(line1, line2).is_none()); } #[test] pub fn find_intersection_point_of_lines_does_intersect() { let line1: ((f32, f32), (f32, f32)) = ((0.0, 0.0), (10.0, 10.0)); let line2: ((f32, f32), (f32, f32)) = ((0.0, 6.0), (2.0, 0.0)); assert!(find_intersection_point_of_lines(line1, line2).is_some()); } #[test] fn IsQuadrantAdjacent_IsAdjacent() { let q1: (i32, i32) = (0, 0); let q2: (i32, i32) = (-1, 0); let q3: (i32, i32) = (0, -1); let q4: (i32, i32) = (0, 1); let q5: (i32, i32) = (1, 0); assert!(is_quadrant_adjacent(q1, q2)); assert!(is_quadrant_adjacent(q1, q3)); assert!(is_quadrant_adjacent(q1, q4)); assert!(is_quadrant_adjacent(q1, q5)); } #[test] fn IsQuadrantAdjacent_IsNotAdjacent() { let q1: (i32, i32) = (0, 0); let q2: (i32, i32) = (-2, 0); let q3: (i32, i32) = (0, -5); let q4: (i32, i32) = (0, 10); let q5: (i32, i32) = (10, 0); assert!(!is_quadrant_adjacent(q1, q2)); assert!(!is_quadrant_adjacent(q1, q3)); assert!(!is_quadrant_adjacent(q1, q4)); assert!(!is_quadrant_adjacent(q1, q5)); }
// https://adventofcode.com/2017/day/18 use std::io::{BufRead, BufReader}; use std::fs::File; use std::collections::VecDeque; fn main() { let f = BufReader::new(File::open("input.txt").expect("Opening input.txt failed")); // Parse instructions let mut program = Vec::new(); for line in f.lines() { // Get instruction and target register let raw_line = line.expect("Reading line failed"); let split: Vec<&str> = raw_line.split(' ').collect(); let instruction = split[0]; let target = split[1].chars().nth(0).expect("No target register"); // Push instruction match instruction { "snd" => program.push(Instr::Snd(get_reg(target))), "rcv" => program.push(Instr::Rcv(get_reg(target))), "set" => program.push(Instr::Set(get_reg(target), parse_oper(split[2]))), "add" => program.push(Instr::Add(get_reg(target), parse_oper(split[2]))), "mul" => program.push(Instr::Mul(get_reg(target), parse_oper(split[2]))), "mod" => program.push(Instr::Mod(get_reg(target), parse_oper(split[2]))), "jgz" => program.push(Instr::Jgz(parse_oper(split[1]), parse_oper(split[2]))), _ => panic!("Invalid instruction"), } } // Execute first star let mut reg = [0i64; 5]; let mut pc = 0; let mut snd = VecDeque::new(); let mut rcv = VecDeque::new(); while pc < program.len() { // Track last emitted sound let prev_len = rcv.len(); let prev_sound = match rcv.back() { Some(&val) => Some(val), None => None, }; // Step program let (new_pc, _) = step(pc, &mut reg, &mut snd, &mut rcv, &program); // Move sent messages to other buffer or check for receive if !snd.is_empty() { rcv.push_back(snd.pop_front().unwrap()); } else if rcv.len() < prev_len { // Output a sound so printout the previous sound and break let sound = prev_sound.unwrap(); // Assert to facilitate further tweaks assert_eq!(1187, sound); println!("First sound received was {}", sound); break; } // Set new program counter pc = new_pc; } // Execute second star let mut pc0 = 0; let mut reg0 = [0, 0, 0, 0, 0]; let mut queue0 = VecDeque::new(); let mut pc1 = 0; let mut reg1 = [0, 0, 0, 0, 1]; let mut queue1 = VecDeque::new(); let mut sends = 0; while pc0 < program.len() && pc1 < program.len() { // Step program 0 let (new_pc0, waiting1) = step(pc0, &mut reg0, &mut queue1, &mut queue0, &program); // Step program 1, track sends let prev_len = queue0.len(); let (new_pc1, waiting2) = step(pc1, &mut reg1, &mut queue0, &mut queue1, &program); if prev_len < queue0.len() { sends += 1; } // Stop execution on deadlock if waiting1 && waiting2 { break; } // Set program counters pc0 = new_pc0; pc1 = new_pc1; } // Assert to facilitate further tweaks assert_eq!(5969, sends); println!("Program 1 sent a value {} times", sends); } // Executes instruction at program[pc] and returns next value of pc fn step( pc: usize, registers: &mut [i64; 5], snd: &mut VecDeque<i64>, rcv: &mut VecDeque<i64>, program: &Vec<Instr>, ) -> (usize, bool) { let mut pc = pc; let mut waiting = false; match program[pc] { // Send value in target register Instr::Snd(target) => snd.push_back(registers[target]), // Receive value to target register Instr::Rcv(target) => if rcv.is_empty() { // Wait if queue is empty pc -= 1; waiting = true; } else { registers[target] = rcv.pop_front().unwrap(); }, // Sets target register to given value Instr::Set(target, ref operand) => match *operand { Oper::Reg(source) => registers[target] = registers[source], Oper::Val(val) => registers[target] = val, }, // Adds given value to target register Instr::Add(target, ref operand) => match *operand { Oper::Reg(source) => registers[target] += registers[source], Oper::Val(val) => registers[target] += val, }, // Multiplies target register with given value Instr::Mul(target, ref operand) => match *operand { Oper::Reg(source) => registers[target] *= registers[source], Oper::Val(val) => registers[target] *= val, }, // Stores remainder of register divided by given value to the register Instr::Mod(target, ref operand) => match *operand { Oper::Reg(source) => registers[target] %= registers[source], Oper::Val(val) => registers[target] %= val, }, // Jumps with given offset if condition value is greater than zero Instr::Jgz(ref cond, ref oper) => { let lhs = match *cond { Oper::Reg(target) => registers[target], Oper::Val(val) => val, }; if lhs > 0 { let offset = match *oper { Oper::Reg(source) => registers[source], Oper::Val(val) => val, }; pc = (pc as i64 + offset - 1) as usize; // Subtract 0 to negate pc increment } } } pc += 1; (pc, waiting) } fn get_reg(name: char) -> usize { match name { 'a' => 0, 'b' => 1, 'f' => 2, 'i' => 3, 'p' => 4, _ => panic!("Invalid register {}", name), } } fn parse_oper(operand: &str) -> Oper { // Try to parse a value from the given operand match operand.parse::<i64>() { Ok(val) => Oper::Val(val), // Success Err(_) => { // Not a value, try getting register name let reg_name = operand.chars().nth(0).expect("No operand to parse"); Oper::Reg(get_reg(reg_name)) } } } #[derive(Debug)] enum Instr { Snd(usize), Rcv(usize), Set(usize, Oper), Add(usize, Oper), Mul(usize, Oper), Mod(usize, Oper), Jgz(Oper, Oper), } #[derive(Debug)] enum Oper { Val(i64), Reg(usize), }
#![cfg_attr(not(feature = "std"), no_std)] use ink_lang as ink; #[ink::contract(version = "0.1.0")] mod idata { #[cfg(not(feature = "ink-as-dependency"))] use ink_core::env::call::*; use ink_core::env::EnvError; use ink_core::storage; use ink_prelude::vec::Vec; //iflow const GET_INTERPRETER: [u8; 4] = [0x54, 0xBC, 0xAE, 0x12]; //interpreter const EXECUTE_ELEMENTS: [u8; 4] = [0xB8, 0x66, 0x1E, 0xE4]; #[cfg_attr(feature = "ink-generate-abi", derive(type_metadata::Metadata))] #[derive(scale::Encode, scale::Decode)] pub enum Errors { EnviromentError, Other, CheckInError, CheckOutError, } impl From<EnvError> for Errors { fn from(_: EnvError) -> Self { Errors::EnviromentError } } #[ink(storage)] struct Idata { tokens_on_edges: storage::Value<u128>, started_activities: storage::Value<u128>, idata_parent: storage::Value<AccountId>, iflow_node: storage::Value<AccountId>, index_in_parent: storage::Value<u128>, children: storage::HashMap<u128, Vec<AccountId>>, instance_count: storage::HashMap<u128, u128>, } impl Idata { /// Initializes the value to the initial value. #[ink(constructor)] fn new(&mut self) { self.tokens_on_edges.set(0); self.started_activities.set(0); self.idata_parent.set(AccountId::default()); self.iflow_node.set(AccountId::default()); self.index_in_parent.set(0); } #[ink(message)] fn set_activity_marking(&mut self, n_marking: u128) { self.started_activities.set(n_marking); } #[ink(message)] fn set_marking(&mut self, n_marking: u128) { self.tokens_on_edges.set(n_marking); } #[ink(message)] fn set_parent(&mut self, parent: AccountId, child_flow: AccountId, element_index: u128) { self.index_in_parent.set(element_index); self.idata_parent.set(parent); self.iflow_node.set(child_flow); } #[ink(message)] fn add_child(&mut self, element_index: u128, child: AccountId) { self.children .mutate_with(&element_index, |children| children.push(child)); self.instance_count .mutate_with(&element_index, |count| *count += 1); } /// Returns the current state. #[ink(message)] fn get_marking(&self) -> u128 { *self.tokens_on_edges } #[ink(message)] fn get_started_activities(&self) -> u128 { *self.started_activities } #[ink(message)] fn get_instance_count(&self, element_index: u128) -> u128 { *self.instance_count.get(&element_index).unwrap_or(&0) } #[ink(message)] fn decrease_instance_count(&mut self, element_index: u128) -> u128 { *self.instance_count .mutate_with(&element_index, |count| *count -= 1) .unwrap_or(&0) } #[ink(message)] fn set_instance_count(&mut self, element_index: u128, instance_count: u128) { self.instance_count.insert(element_index, instance_count); } #[ink(message)] fn get_index_in_parent(&self) -> u128 { *self.index_in_parent } #[ink(message)] fn get_child_process_instance(&self, element_index: u128) -> Vec<AccountId> { self.children .get(&element_index) .unwrap_or(&Vec::default()) .clone() } #[ink(message)] fn get_child_flow_instance(&self) -> AccountId { *self.iflow_node } #[ink(message)] fn get_parent(&self) -> AccountId { *self.idata_parent } #[ink(message)] fn continue_execution(&self, element_index: u128) -> Result<(), Errors> { let get_interpreter_selector = Selector::from(GET_INTERPRETER); let execute_elements_selector = Selector::from(EXECUTE_ELEMENTS); let interpreter = CallParams::<EnvTypes, AccountId>::eval( self.get_child_flow_instance(), get_interpreter_selector, ) .fire()?; CallParams::<EnvTypes, Result<(), Errors>>::eval( self.get_child_flow_instance(), execute_elements_selector, ) .push_arg::<AccountId>(&self.env().caller()) .push_arg::<u128>(&element_index) .fire()? } } } pub use crate::idata::{Errors, Idata};
extern crate iui; extern crate ui_sys; use iui::controls::{Area, AreaDrawParams, AreaHandler, HorizontalBox, LayoutStrategy}; use iui::draw::{Brush, FillMode, Path, SolidBrush}; use iui::prelude::*; use std::f64::consts::PI; struct HandleCanvas {} impl AreaHandler for HandleCanvas { fn draw(&mut self, _area: &Area, draw_params: &AreaDrawParams) { let ctx = &draw_params.context; let path = Path::new(ctx, FillMode::Winding); path.add_rectangle(ctx, 0., 0., draw_params.area_width, draw_params.area_height); path.end(ctx); let brush = Brush::Solid(SolidBrush { r: 0.2, g: 0.6, b: 0.8, a: 1., }); draw_params.context.fill(&path, &brush); let path = Path::new(ctx, FillMode::Winding); for i in 0..100 { let x = i as f64 / 100.; let y = ((x * PI * 2.).sin() + 1.) / 2.; path.add_rectangle( ctx, x * draw_params.area_width, 0., draw_params.area_width / 100., y * draw_params.area_height, ); } path.end(ctx); let brush = Brush::Solid(SolidBrush { r: 0.2, g: 0., b: 0.3, a: 1., }); draw_params.context.fill(&path, &brush); } } fn main() { let ui = UI::init().expect("Couldn't initialize UI library"); let mut win = Window::new(&ui, "Area Canvas Example", 200, 200, WindowType::NoMenubar); let mut hbox = HorizontalBox::new(&ui); let area = Area::new(&ui, Box::new(HandleCanvas {})); hbox.append(&ui, area, LayoutStrategy::Stretchy); win.set_child(&ui, hbox); win.show(&ui); ui.main(); }
use std::mem::MaybeUninit; use std::convert::TryInto; use std::sync::atomic::{AtomicBool, Ordering}; pub struct JitCodeDataPagePair { locked: AtomicBool, pub contents: *mut libc::c_void, } pub const PAGE_SIZE: usize = 4096; impl<'a> JitCodeDataPagePair { pub fn new() -> Self { unsafe { let mut contents : MaybeUninit<*mut libc::c_void> = MaybeUninit::uninit(); // avoid uninitalized warning libc::posix_memalign(contents.as_mut_ptr(), PAGE_SIZE, PAGE_SIZE * 2); let contents = contents.assume_init(); libc::mprotect(contents, PAGE_SIZE, libc::PROT_READ | libc::PROT_WRITE); libc::mprotect( (contents as *mut u8).offset(PAGE_SIZE as isize) as *mut libc::c_void, PAGE_SIZE, libc::PROT_READ | libc::PROT_WRITE ); libc::memset(contents, 0xc3, PAGE_SIZE * 2); // for now, prepopulate with 'RET' Self { contents, locked: AtomicBool::new(false) } } } pub unsafe fn lock(&mut self) -> i32 { self.locked.store(true, Ordering::SeqCst); libc::mprotect(self.contents, PAGE_SIZE, libc::PROT_EXEC | libc::PROT_READ) } pub unsafe fn unlock(&mut self) -> i32 { self.locked.store(false, Ordering::SeqCst); libc::mprotect(self.contents, PAGE_SIZE, libc::PROT_WRITE | libc::PROT_READ) } pub fn get_func_ptr<T>(&self, offset: usize) -> unsafe extern "C" fn() -> T { if !self.locked.load(Ordering::SeqCst) { panic!("Cannot run unlocked JitCodeDataPagePair"); } unsafe { std::mem::transmute((self.contents as *const u8).offset(offset.try_into().unwrap())) } } pub unsafe fn code_as_slice(&mut self) -> &'a mut [u8] { if self.locked.load(Ordering::SeqCst) { panic!("Cannot edit locked JitCodeDataPagePair"); } std::slice::from_raw_parts_mut(self.contents as _, PAGE_SIZE) } pub unsafe fn data_as_mut_slice<T: Sized>(&mut self) -> &'a mut [T] { std::slice::from_raw_parts_mut( (self.contents as *mut u8).offset(PAGE_SIZE as isize) as *mut T, PAGE_SIZE / std::mem::size_of::<T>() ) } pub unsafe fn data_as_slice<T: Sized>(&self) -> &'a [T] { std::slice::from_raw_parts( (self.contents as *const u8).offset(PAGE_SIZE as isize) as *const T, PAGE_SIZE / std::mem::size_of::<T>() ) } }
use rand::{thread_rng, Rng}; pub fn private_key(p: u64) -> u64 { thread_rng().gen_range(2, p) } pub fn public_key(p: u64, g: u64, a: u64) -> u64 { let mut result: u64 = 1; let mut g = g % p; let mut a = a; while a > 0 { if a % 2 == 1 { result = (result * g) % p; } a >>= 1; g = (g * g) % p; } result } pub fn secret(p: u64, b_pub: u64, a: u64) -> u64 { public_key(p, b_pub, a) }
mod snapping_mech; // contains public snapping mechanism extern crate rand; extern crate rug; use rand::{random, Open01}; use rand::distributions::{IndependentSample, Range}; use std::f64; use std::io; use rug::ops::Pow; // Implementation of laplacian noise generator which uses snapping mechanism in its place fn main() { // Snapping Mechanism call: f(D), Lambda, B let dp_noisy_output = snapping_mech::snapping_mechanism(12.0, 2.0, 10000.0); println!("Result of adding noise to query result f(D): "); println!("{}", dp_noisy_output); // Snapping Mechanism call: f(D), Lambda. B IS FIXED. let dp_noisy_output_2 = snapping_mech::snapping_mechanism_2(12.0, 2.0); println!("Result of adding noise to query result f(D): "); println!("{}", dp_noisy_output_2); // Run snapping mechanism 'x' number of times to store results in file for plotting snapping_mech::store_results(12.0, 2.0, 10000.0, 1000000); }
use crate::{error::Error, Node}; use std::collections::{HashMap, HashSet}; use std::fmt; use std::hash::Hash; use std::sync::{Arc, RwLock}; pub type InnerDependencyMap<I> = HashMap<I, HashSet<I>>; pub type DependencyMap<I> = Arc<RwLock<InnerDependencyMap<I>>>; /// Dependency graph pub struct DepGraph<I> where I: Clone + fmt::Debug + Eq + Hash + PartialEq + Send + Sync + 'static, { pub ready_nodes: Vec<I>, pub deps: DependencyMap<I>, pub rdeps: DependencyMap<I>, } impl<I> DepGraph<I> where I: Clone + fmt::Debug + Eq + Hash + PartialEq + Send + Sync + 'static, { /// Create a new DepGraph based on a vector of edges. pub fn new(nodes: &[Node<I>]) -> Self { let (deps, rdeps, ready_nodes) = DepGraph::parse_nodes(nodes); DepGraph { ready_nodes, deps, rdeps, } } fn parse_nodes(nodes: &[Node<I>]) -> (DependencyMap<I>, DependencyMap<I>, Vec<I>) { let mut deps = InnerDependencyMap::<I>::default(); let mut rdeps = InnerDependencyMap::<I>::default(); let mut ready_nodes = Vec::<I>::default(); for node in nodes { deps.insert(node.id().clone(), node.deps().clone()); if node.deps().is_empty() { ready_nodes.push(node.id().clone()); } for node_dep in node.deps() { if !rdeps.contains_key(node_dep) { let mut dep_rdeps = HashSet::new(); dep_rdeps.insert(node.id().clone()); rdeps.insert(node_dep.clone(), dep_rdeps.clone()); } else { let dep_rdeps = rdeps.get_mut(node_dep).unwrap(); dep_rdeps.insert(node.id().clone()); } } } ( Arc::new(RwLock::new(deps)), Arc::new(RwLock::new(rdeps)), ready_nodes, ) } } impl<I> IntoIterator for DepGraph<I> where I: Clone + fmt::Debug + Eq + Hash + PartialEq + Send + Sync + 'static, { type Item = I; type IntoIter = DepGraphIter<I>; fn into_iter(self) -> Self::IntoIter { DepGraphIter::<I>::new(self.ready_nodes.clone(), self.deps.clone(), self.rdeps) } } #[derive(Clone)] pub struct DepGraphIter<I> where I: Clone + fmt::Debug + Eq + Hash + PartialEq + Send + Sync + 'static, { ready_nodes: Vec<I>, deps: DependencyMap<I>, rdeps: DependencyMap<I>, } impl<I> DepGraphIter<I> where I: Clone + fmt::Debug + Eq + Hash + PartialEq + Send + Sync + 'static, { pub fn new(ready_nodes: Vec<I>, deps: DependencyMap<I>, rdeps: DependencyMap<I>) -> Self { Self { ready_nodes, deps, rdeps, } } } impl<I> Iterator for DepGraphIter<I> where I: Clone + fmt::Debug + Eq + Hash + PartialEq + Send + Sync + 'static, { type Item = I; fn next(&mut self) -> Option<Self::Item> { if let Some(id) = self.ready_nodes.pop() { // Remove dependencies and retrieve next available nodes, if any. let next_nodes = remove_node_id::<I>(id.clone(), &self.deps, &self.rdeps).unwrap(); // Push ready nodes self.ready_nodes.extend_from_slice(&next_nodes); // Return the node ID Some(id) } else { // No available node None } } } /// Remove all references to the node ID in the dependencies. /// pub fn remove_node_id<I>( id: I, deps: &DependencyMap<I>, rdeps: &DependencyMap<I>, ) -> Result<Vec<I>, Error> where I: Clone + fmt::Debug + Eq + Hash + PartialEq + Send + Sync + 'static, { let rdep_ids = { match rdeps.read().unwrap().get(&id) { Some(node) => node.clone(), // If no node depends on a node, it will not appear // in rdeps. None => Default::default(), } }; let mut deps = deps.write().unwrap(); let next_nodes = rdep_ids .iter() .filter_map(|rdep_id| { let rdep = match deps.get_mut(&rdep_id) { Some(rdep) => rdep, None => return None, }; rdep.remove(&id); if rdep.is_empty() { Some(rdep_id.clone()) } else { None } }) .collect(); // Remove the current node from the list of dependencies. deps.remove(&id); Ok(next_nodes) }
#![doc = "generated by AutoRust 0.1.0"] #![allow(non_camel_case_types)] #![allow(unused_imports)] use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Operation { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub display: Option<operation::Display>, } pub mod operation { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Display { #[serde(default, skip_serializing_if = "Option::is_none")] pub provider: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub operation: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub description: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct OperationListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<Operation>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AsyncOperationStatus { #[serde(rename = "subscriptionId", default, skip_serializing_if = "Option::is_none")] pub subscription_id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<async_operation_status::Status>, #[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")] pub start_time: Option<String>, #[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")] pub end_time: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<B2cTenantResourceProperties>, #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<async_operation_status::Error>, } pub mod async_operation_status { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Status { Succeeded, Pending, Failed, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Error { #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct B2cTenantResourceProperties { #[serde(rename = "billingConfig", default, skip_serializing_if = "Option::is_none")] pub billing_config: Option<b2c_tenant_resource_properties::BillingConfig>, #[serde(rename = "tenantId", default, skip_serializing_if = "Option::is_none")] pub tenant_id: Option<String>, } pub mod b2c_tenant_resource_properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct BillingConfig { #[serde(rename = "billingType", default, skip_serializing_if = "Option::is_none")] pub billing_type: Option<billing_config::BillingType>, #[serde(rename = "effectiveStartDateUtc", default, skip_serializing_if = "Option::is_none")] pub effective_start_date_utc: Option<String>, } pub mod billing_config { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum BillingType { #[serde(rename = "MAU")] Mau, Auths, } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct B2cResourceSku { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<b2c_resource_sku::Name>, #[serde(default, skip_serializing_if = "Option::is_none")] pub tier: Option<b2c_resource_sku::Tier>, } pub mod b2c_resource_sku { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Name { Standard, PremiumP1, PremiumP2, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Tier { A0, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct NameAvailabilityResponse { #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, #[serde(rename = "nameAvailable", default, skip_serializing_if = "Option::is_none")] pub name_available: Option<bool>, #[serde(default, skip_serializing_if = "Option::is_none")] pub reason: Option<NameAvailabilityReason>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum NameAvailabilityReason { AlreadyExists, Invalid, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CheckNameAvailabilityRequestBody { pub name: String, #[serde(rename = "countryCode")] pub country_code: CountryCode, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct B2cTenantUpdateRequest { #[serde(default, skip_serializing_if = "Option::is_none")] pub sku: Option<B2cResourceSku>, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<B2cTenantResourceProperties>, #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct B2cTenantResource { #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<b2c_tenant_resource::Type>, pub sku: B2cResourceSku, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<B2cTenantResourceProperties>, #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, pub location: String, #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, } pub mod b2c_tenant_resource { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Type { #[serde(rename = "Microsoft.AzureActiveDirectory/b2cDirectories")] MicrosoftAzureActiveDirectoryB2cDirectories, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct B2cTenantResourceList { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<B2cTenantResource>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CreateTenantRequestBody { pub location: String, pub properties: create_tenant_request_body::Properties, pub sku: B2cResourceSku, #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, } pub mod create_tenant_request_body { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "createTenantProperties", default, skip_serializing_if = "Option::is_none")] pub create_tenant_properties: Option<CreateTenantProperties>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CreateTenantProperties { #[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")] pub display_name: Option<String>, #[serde(rename = "countryCode", default, skip_serializing_if = "Option::is_none")] pub country_code: Option<CountryCode>, } pub type CountryCode = String; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CloudError { #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<ErrorResponse>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ErrorResponse { #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub target: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub details: Vec<ErrorResponse>, #[serde(rename = "additionalInfo", default, skip_serializing_if = "Vec::is_empty")] pub additional_info: Vec<ErrorAdditionalInfo>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ErrorAdditionalInfo { #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub info: Option<serde_json::Value>, }
pub mod window; pub mod dialog;
use super::nfa::{Nfa, NfaTable}; use std::{ collections::{ hash_map::Entry as HashEntry, BTreeSet, HashMap, HashSet, VecDeque, }, fmt::{self, Debug, Write}, hash::Hash, }; pub type DfaTable<T, S> = HashMap<S, HashMap<T, S>>; pub struct Dfa<T, S> { states: DfaTable<T, S>, start: S, accept: HashSet<S>, } impl<T: Hash + Eq + Debug, S: Hash + Eq + Debug> Dfa<T, S> { pub fn dot(&self) -> Result<String, fmt::Error> { let mut s = String::new(); s.write_str("digraph{edge[arrowhead=normal,arrowtail=dot];")?; for state in self.states.keys() { write!(s, "{:?}", format!("{:?}", state))?; if self.accept.contains(state) { s.write_str("[peripheries=2];")?; } else { s.write_str(";")?; } } for (state, outs) in &self.states { let state_str = format!("{:?}", state); for (by, to) in outs { let by_str = format!("{:?}", by); write!( s, "{:?}->{:?}[label={:?}];", state_str, format!("{:?}", to), by_str )?; } } s.write_str("}")?; Ok(s) } } impl<T: Hash + Clone + Eq, S: Hash + Clone + Ord> From<Nfa<T, S>> for Dfa<T, BTreeSet<S>> { fn from(nfa: Nfa<T, S>) -> Self { let (nstates, head, tail) = nfa.into_parts(); fn collect_states<'a, T: Hash + Eq, S: Hash + Clone + Ord>( states: &NfaTable<T, S>, state: &S, set: &mut BTreeSet<S>, ) { let mut q = VecDeque::new(); q.push_back(state); loop { let curr = match q.pop_front() { Some(c) => c, None => break, }; if set.insert(curr.clone()) { if let Some(outs) = states[curr].get(&None) { for out in outs { q.push_back(out); } } } } } let mut states = DfaTable::new(); let mut accept = HashSet::new(); let mut q = VecDeque::new(); let start = { let mut set = BTreeSet::new(); collect_states(&nstates, &head, &mut set); set }; q.push_back(start.clone()); loop { let curr = match q.pop_front() { Some(c) => c, None => break, }; let outs = match states.entry(curr.clone()) { HashEntry::Vacant(v) => v.insert(HashMap::new()), HashEntry::Occupied(_) => continue, }; for state in &curr { for (by, tos) in nstates[state] .iter() .filter_map(|(b, t)| b.as_ref().map(|b| (b, t))) { let new_tos = outs.entry(by.clone()).or_insert(BTreeSet::new()); for to in tos { collect_states(&nstates, to, new_tos); } } } let outs = &states[&curr]; // Gotta downgrade the reference to borrow states for to in outs.values() { if !states.contains_key(&to) { q.push_back(to.clone()); } } if curr.contains(&tail) { accept.insert(curr); } } Self { states, start, accept, } } }
#[doc = "Reader of register CHAN_WORK_UPDATED"] pub type R = crate::R<u32, super::CHAN_WORK_UPDATED>; #[doc = "Reader of field `CHAN_WORK_UPDATED`"] pub type CHAN_WORK_UPDATED_R = crate::R<u16, u16>; impl R { #[doc = "Bits 0:15 - If set the corresponding WORK register was updated, i.e. was already sampled during the current scan and, in case of Interleaved averaging, reached the averaging count. If this bit is low then either the channel is not enabled or the averaging count is not yet reached for Interleaved averaging."] #[inline(always)] pub fn chan_work_updated(&self) -> CHAN_WORK_UPDATED_R { CHAN_WORK_UPDATED_R::new((self.bits & 0xffff) as u16) } }
extern crate dhc; fn main() { dhc::init(); let ctx = dhc::Context::instance(); loop { ctx.update(); std::thread::sleep(std::time::Duration::from_millis(3000)); } }
extern crate clap; use clap::{value_t, App, Arg}; use failure::Error; use std::fs::File; use std::io::{self, Write}; use std::path::{Path, PathBuf}; use std::process; use std::time::Instant; static NAME: &str = env!("CARGO_PKG_NAME"); static VERSION: &str = env!("CARGO_PKG_VERSION"); static AUTHOR: &str = env!("CARGO_PKG_AUTHORS"); static DESCRIPTION: &str = env!("CARGO_PKG_DESCRIPTION"); fn main() { let now = Instant::now(); let matches = App::new(NAME) .version(VERSION) .author(AUTHOR) .about(DESCRIPTION) .args(&[ Arg::from_usage( "<TEST> +required 'A nix expression containing testcases.'", ), Arg::from_usage("-r, --reporter 'Reporter to display the test results.'") .default_value("Human") .possible_values(&nix_test_runner::Reporter::variants()) .case_insensitive(true), Arg::from_usage("-o, --output=[FILE] 'Specify output file for test results.'"), Arg::from_usage( "--skip-run 'The given input file is already the test output as JSON.'", ), ]) .get_matches(); let reporter = value_t!(matches, "reporter", nix_test_runner::Reporter).unwrap(); let test_file_path = PathBuf::from(matches.value_of("TEST").unwrap()); let output = matches.value_of("output").map(|o| Path::new(o)); let skip_run = matches.is_present("skip-run"); assert!( test_file_path.exists(), "You need to provide an existing file." ); let test_result: Result<nix_test_runner::TestResult, _> = if skip_run { serde_json::from_reader(File::open(test_file_path).expect("could not open file")) .map_err(|e| e.into()) } else { nix_test_runner::run(test_file_path) }; match test_result { Ok(result) => { let now_for_elapsed_reporting = if skip_run { None } else { Some(now) }; formatting(&result, reporter, output, now_for_elapsed_reporting).unwrap(); process::exit(if result.successful() { 0 } else { 1 }) } Err(err) => { io::stderr().write_all(err.to_string().as_bytes()).unwrap(); process::exit(1) } } } fn formatting( result: &nix_test_runner::TestResult, reporter: nix_test_runner::Reporter, output: Option<&Path>, now: Option<Instant>, ) -> Result<(), Error> { let formatted = result.format(now.map(|instant| instant.elapsed()), reporter)?; match output { None => io::stdout().write_all(formatted.as_bytes())?, Some(output_path) => { let display = output_path.display(); let mut file = File::create(&output_path)?; file.write_all(formatted.as_bytes())?; println!("Successfully wrote to {}", display); } }; Ok(()) }
use serde::Serialize; use common::error::Error; use common::result::Result; use crate::application::dtos::UserDto; use crate::domain::user::{UserId, UserRepository}; #[derive(Serialize)] pub struct GetAllResponse { pub users: Vec<UserDto>, } pub struct GetAll<'a> { user_repo: &'a dyn UserRepository, } impl<'a> GetAll<'a> { pub fn new(user_repo: &'a dyn UserRepository) -> Self { GetAll { user_repo } } pub async fn exec(&self, auth_id: String) -> Result<GetAllResponse> { let auth_user = self.user_repo.find_by_id(&UserId::new(auth_id)?).await?; if !auth_user.role().is("admin") { return Err(Error::unauthorized()); } let users = self.user_repo.find_all().await?; Ok(GetAllResponse { users: users.iter().map(|user| UserDto::from(user)).collect(), }) } }
use std::cell::Cell; use futures::unsync::oneshot::{channel, Receiver}; use futures::sync::oneshot::{channel as sync_channel, Receiver as SyncReceiver}; use actor::{Actor, AsyncContext}; use handler::{Handler, ResponseType}; use context::AsyncContextApi; use contextcells::ContextProtocol; use envelope::{Envelope, ToEnvelope}; use message::Request; use queue::{sync, unsync}; /// Trait give access to actor's address pub trait ActorAddress<A, T> where A: Actor { /// Returns actor's address for specific context fn get(ctx: &mut A::Context) -> T; } impl<A> ActorAddress<A, Address<A>> for A where A: Actor, A::Context: AsyncContext<A> + AsyncContextApi<A> { fn get(ctx: &mut A::Context) -> Address<A> { ctx.unsync_address() } } impl<A> ActorAddress<A, SyncAddress<A>> for A where A: Actor, A::Context: AsyncContext<A> + AsyncContextApi<A> { fn get(ctx: &mut A::Context) -> SyncAddress<A> { ctx.sync_address() } } impl<A> ActorAddress<A, (Address<A>, SyncAddress<A>)> for A where A: Actor, A::Context: AsyncContext<A> + AsyncContextApi<A> { fn get(ctx: &mut A::Context) -> (Address<A>, SyncAddress<A>) { (ctx.unsync_address(), ctx.sync_address()) } } impl<A> ActorAddress<A, ()> for A where A: Actor { fn get(_: &mut A::Context) -> () { () } } pub trait Subscriber<M: 'static> { /// Send buffered message fn send(&self, msg: M) -> Result<(), M>; /// Create boxed clone of the current subscriber fn boxed(&self) -> Box<Subscriber<M>>; } /// Convenience impl to allow boxed Subscriber objects to be cloned using `Clone.clone()`. impl<M: 'static> Clone for Box<Subscriber<M>> { fn clone(&self) -> Box<Subscriber<M>> { self.boxed() } } /// Address of the actor /// /// Actor has to run in the same thread as owner of the address. pub struct Address<A> where A: Actor, A::Context: AsyncContext<A> { tx: unsync::UnboundedSender<ContextProtocol<A>> } impl<A> Clone for Address<A> where A: Actor, A::Context: AsyncContext<A> { fn clone(&self) -> Self { Address{tx: self.tx.clone() } } } impl<A> Address<A> where A: Actor, A::Context: AsyncContext<A> { pub(crate) fn new(sender: unsync::UnboundedSender<ContextProtocol<A>>) -> Address<A> { Address{tx: sender} } /// Indicates if address is still connected to the actor. pub fn connected(&self) -> bool { self.tx.connected() } /// Send message `M` to actor `A`. Communication channel to the actor is unbuonded. pub fn send<M>(&self, msg: M) where A: Handler<M>, M: ResponseType + 'static { let _ = self.tx.unbounded_send( ContextProtocol::Envelope(Envelope::local(msg, None, false))); } /// Send message to actor `A` and asyncronously wait for response. /// /// Communication channel to the actor is unbuonded. /// /// if returned `Request` object get dropped, message cancels. pub fn call<B, M>(&self, _: &B, msg: M) -> Request<B, M> where A: Handler<M>, B: Actor, M: ResponseType + 'static { let (tx, rx) = channel(); let _ = self.tx.unbounded_send( ContextProtocol::Envelope(Envelope::local(msg, Some(tx), true))); Request::local(rx) } /// Send message to the actor `A` and asyncronously wait for response. /// /// Communication channel to the actor is unbuonded. /// /// if returned `Receiver` object get dropped, message cancels. pub fn call_fut<M>(&self, msg: M) -> Receiver<Result<M::Item, M::Error>> where A: Handler<M>, M: ResponseType + 'static { let (tx, rx) = channel(); let _ = self.tx.unbounded_send( ContextProtocol::Envelope(Envelope::local(msg, Some(tx), true))); rx } /// Upgrade address to SyncAddress. pub fn upgrade(&self) -> Receiver<SyncAddress<A>> { let (tx, rx) = channel(); let _ = self.tx.unbounded_send( ContextProtocol::Upgrade(tx)); rx } /// Get `Subscriber` for specific message type pub fn subscriber<M>(&self) -> Box<Subscriber<M>> where A: Handler<M>, M: ResponseType + 'static { Box::new(Clone::clone(self)) } } impl<A, M> Subscriber<M> for Address<A> where A: Actor + Handler<M>, A::Context: AsyncContext<A>, M: ResponseType + 'static { fn send(&self, msg: M) -> Result<(), M> { if self.connected() { self.send(msg); Ok(()) } else { Err(msg) } } fn boxed(&self) -> Box<Subscriber<M>> { Box::new(self.clone()) } } /// `Send` address of the actor. Actor can run in differend thread pub struct SyncAddress<A> where A: Actor { tx: sync::UnboundedSender<Envelope<A>>, closed: Cell<bool>, } unsafe impl<A> Send for SyncAddress<A> where A: Actor {} unsafe impl<A> Sync for SyncAddress<A> where A: Actor {} impl<A> Clone for SyncAddress<A> where A: Actor { fn clone(&self) -> Self { SyncAddress{tx: self.tx.clone(), closed: self.closed.clone()} } } impl<A> SyncAddress<A> where A: Actor { pub(crate) fn new(sender: sync::UnboundedSender<Envelope<A>>) -> SyncAddress<A> { SyncAddress{tx: sender, closed: Cell::new(false)} } /// Indicates if address is still connected to the actor. pub fn connected(&self) -> bool { !self.closed.get() } /// Send message `M` to actor `A`. Message cold be sent to actor running in /// different thread. pub fn send<M>(&self, msg: M) where A: Handler<M>, <A as Actor>::Context: ToEnvelope<A>, M: ResponseType + Send + 'static, M::Item: Send, M::Error: Send, { if self.tx.unbounded_send( <<A as Actor>::Context as ToEnvelope<A>>::pack(msg, None, false)).is_err() { self.closed.set(true) } } /// Send message to actor `A` and asyncronously wait for response. /// /// if returned `Request` object get dropped, message cancels. pub fn call<B: Actor, M>(&self, _: &B, msg: M) -> Request<B, M> where A: Handler<M>, M: ResponseType + Send + 'static, M::Item: Send, M::Error: Send, <A as Actor>::Context: ToEnvelope<A>, { let (tx, rx) = sync_channel(); if self.tx.unbounded_send( <A::Context as ToEnvelope<A>>::pack(msg, Some(tx), true)).is_err() { self.closed.set(true) } Request::remote(rx) } /// Send message to actor `A` and asyncronously wait for response. /// /// if returned `Receiver` object get dropped, message cancels. pub fn call_fut<M>(&self, msg: M) -> SyncReceiver<Result<M::Item, M::Error>> where A: Handler<M>, M: ResponseType + Send + 'static, M::Item: Send, M::Error: Send, <A as Actor>::Context: ToEnvelope<A>, { let (tx, rx) = sync_channel(); if self.tx.unbounded_send( <A::Context as ToEnvelope<A>>::pack(msg, Some(tx), true)).is_err() { self.closed.set(true) } rx } /// Get `Subscriber` for specific message type pub fn subscriber<M: 'static + Send>(&self) -> Box<Subscriber<M> + Send> where A: Handler<M>, M: ResponseType + Send + 'static, M::Item: Send, M::Error: Send, <A as Actor>::Context: ToEnvelope<A>, { Box::new(self.clone()) } } impl<A, M> Subscriber<M> for SyncAddress<A> where A: Actor + Handler<M>, <A as Actor>::Context: ToEnvelope<A>, M: ResponseType + Send + 'static, M::Item: Send, M::Error: Send, { fn send(&self, msg: M) -> Result<(), M> { if self.connected() { self.send(msg); Ok(()) } else { Err(msg) } } fn boxed(&self) -> Box<Subscriber<M>> { Box::new(self.clone()) } }
use crate::read_pattern::ReadPattern; use std::ops::{RangeBounds, Bound}; #[derive(Copy, Clone, Debug)] pub struct RangePattern<T, R>(pub T, pub R); impl<T, R> ReadPattern for RangePattern<T, R> where T: ReadPattern, R: RangeBounds<u32> { fn read_pattern(&self, text: &str) -> Option<usize> { if self.0.read_pattern("").is_some() && self.1.end_bound() == Bound::Unbounded { panic!("Infinity loop") } let mut len = 0; let mut count = 0; loop { match self.0.read_pattern(&text[len..]) { Some(l) => { len += l; count += 1; match self.1.end_bound() { Bound::Included(b) if *b == count => return Some(len), Bound::Excluded(b) if *b == count + 1 => return Some(len), _ => {}, } }, None if self.1.contains(&count) => return Some(len), None => return None, } } } }
// Copyright 2020 IOTA Stiftung // SPDX-License-Identifier: Apache-2.0 use crate::host::*; pub trait MapKey { fn get_id(&self) -> Key32; } impl MapKey for str { fn get_id(&self) -> Key32 { get_key_id_from_string(self) } } impl MapKey for String { fn get_id(&self) -> Key32 { get_key_id_from_string(self) } } #[derive(Clone, Copy)] pub struct Key32(pub i32); impl MapKey for Key32 { fn get_id(&self) -> Key32 { *self } } // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // @formatter:off pub const KEY_ADDRESS : Key32 = Key32(-1); pub const KEY_AGGREGATE_BLS : Key32 = Key32(-2); pub const KEY_BALANCES : Key32 = Key32(-3); pub const KEY_BASE58_BYTES : Key32 = Key32(-4); pub const KEY_BASE58_STRING : Key32 = Key32(-5); pub const KEY_CALL : Key32 = Key32(-6); pub const KEY_CALLER : Key32 = Key32(-7); pub const KEY_CHAIN_OWNER_ID : Key32 = Key32(-8); pub const KEY_COLOR : Key32 = Key32(-9); pub const KEY_CONTRACT_CREATOR : Key32 = Key32(-10); pub const KEY_CONTRACT_ID : Key32 = Key32(-11); pub const KEY_DEPLOY : Key32 = Key32(-12); pub const KEY_EVENT : Key32 = Key32(-13); pub const KEY_EXPORTS : Key32 = Key32(-14); pub const KEY_HASH_BLAKE2B : Key32 = Key32(-15); pub const KEY_HASH_SHA3 : Key32 = Key32(-16); pub const KEY_HNAME : Key32 = Key32(-17); pub const KEY_INCOMING : Key32 = Key32(-18); pub const KEY_LENGTH : Key32 = Key32(-19); pub const KEY_LOG : Key32 = Key32(-20); pub const KEY_MAPS : Key32 = Key32(-21); pub const KEY_NAME : Key32 = Key32(-22); pub const KEY_PANIC : Key32 = Key32(-23); pub const KEY_PARAMS : Key32 = Key32(-24); pub const KEY_POST : Key32 = Key32(-25); pub const KEY_RANDOM : Key32 = Key32(-26); pub const KEY_RESULTS : Key32 = Key32(-27); pub const KEY_RETURN : Key32 = Key32(-28); pub const KEY_STATE : Key32 = Key32(-29); pub const KEY_TIMESTAMP : Key32 = Key32(-30); pub const KEY_TRACE : Key32 = Key32(-31); pub const KEY_TRANSFERS : Key32 = Key32(-32); pub const KEY_UTILITY : Key32 = Key32(-33); pub const KEY_VALID : Key32 = Key32(-34); pub const KEY_VALID_BLS : Key32 = Key32(-35); pub const KEY_VALID_ED25519 : Key32 = Key32(-36); pub const KEY_ZZZZZZZ : Key32 = Key32(-37); // @formatter:on
pub mod logout; pub mod challenge; pub mod app_setup; pub mod token; pub mod index; use rocket::request::{self, Request, FromRequest}; use rocket::{Outcome, State}; use rocket::http::Status; use std::net::SocketAddr; use r2d2::{Pool, PooledConnection}; use r2d2_postgres::{PostgresConnectionManager}; use postgres::Connection; use std::ops::Deref; pub struct RemoteIP(Option<SocketAddr>); impl<'a, 'r> FromRequest<'a, 'r> for RemoteIP { type Error = (); fn from_request(request: &'a Request<'r>) -> request::Outcome<Self, Self::Error> { let ip = request.remote(); return Outcome::Success(RemoteIP(ip)); } } pub struct DB(pub PooledConnection<PostgresConnectionManager>); impl<'a, 'r> FromRequest<'a, 'r> for DB { type Error = (); fn from_request(request: &'a Request<'r>) -> request::Outcome<Self, Self::Error> { let pool = request.guard::<State<Pool<PostgresConnectionManager>>>()?; match pool.get() { Ok(conn) => Outcome::Success(DB(conn)), Err(_) => Outcome::Failure((Status::ServiceUnavailable, ())) } } } impl Deref for DB { type Target = Connection; fn deref(&self) -> &Self::Target { &self.0 } }
#[derive(Debug)] pub struct Node<T> { pub val: T, pub next: Link<T> } pub type Link<T> = Option<Box<Node<T>>>;
use crate::{Client, ClientFactory, MssqlFactory, MssqlProvider}; use storm::{provider::ProviderFactory, BoxFuture, Error, Result}; /// This can wrap a ClientFactory and creates a transaction for each Client that are returned. /// It is useful for integration tests making sure that all items are rollback once the test /// is done. pub struct TransactionScoped<F>(pub(crate) F); impl From<MssqlFactory> for TransactionScoped<MssqlFactory> { fn from(f: MssqlFactory) -> Self { TransactionScoped(f) } } impl ProviderFactory for TransactionScoped<MssqlFactory> { type Provider = MssqlProvider; fn create_provider(&self) -> BoxFuture<'_, Result<Self::Provider>> { Box::pin(async move { Ok(MssqlProvider::new(TransactionScoped(self.0 .0.clone()))) }) } } impl<F> ClientFactory for TransactionScoped<F> where F: ClientFactory + Send + Sync, { fn create_client(&self) -> BoxFuture<'_, Result<Client>> { Box::pin(async { let mut client = self.0.create_client().await?; client .simple_query("BEGIN TRAN") .await .map_err(Error::Mssql)?; Ok(client) }) } fn under_transaction(&self) -> bool { true } }
use shorthand::ShortHand; #[derive(ShortHand, Default)] pub struct Example { optional: Option<String>, } #[test] fn test_option_as_ref() { let _: Option<&String> = Example::default().optional(); } fn main() {}
//#[macro_use] //extern crate log; //extern crate android_logger; mod excel_to_json; use excel_to_json::*; use jni::objects::{JClass, JObject, JString}; use jni::sys::{jint, jobject}; use jni::{JNIEnv, JavaVM}; use serde_json::to_string; //JNI加载完成 #[no_mangle] pub extern "C" fn JNI_OnLoad(_jvm: JavaVM, _reserved: *mut std::ffi::c_void) -> jint { //android_logger::init_once(android_logger::Config::default().with_min_level(log::Level::Info)); //info!("JNI_OnLoad."); jni::sys::JNI_VERSION_1_6 } #[no_mangle] pub extern "C" fn Java_io_github_planet0104_excel2json_ExcelToJson_convert( env: JNIEnv, _class: JClass, file: JString, ) -> jobject { let mje = |err| format!("{:?}", err); let result = (|| -> Result<JString, String> { let file: String = env.get_string(file).map_err(mje)?.into(); match convert(&file) { Ok(v) => { let s = to_string(&v).unwrap_or(String::from("json转换失败")); let s = env.new_string(&s).map_err(mje)?; Ok(s) } Err(err) => Err(format!("{:?}", err)), } })(); match result { Ok(s) => s.into_inner(), Err(err) => { let _ = env.throw_new("java/lang/Exception", format!("excel转换失败: {:?}", err)); JObject::null().into_inner() } } }
use crate::runtime::Runtime; use crate::subscription::subscribe_loop_supervisor::SubscribeLoopSupervisor; use crate::transport::{Service, Transport}; use futures_util::lock::Mutex; use std::sync::Arc; mod presence; mod publish; mod subscribe; #[cfg(test)] mod tests; /// # PubNub Client /// /// The PubNub lib implements socket pools to relay data requests as a client /// connection to the PubNub Network. #[derive(Clone, Debug)] pub struct PubNub<TTransport, TRuntime> where TTransport: Transport, TRuntime: Runtime, { /// Transport to use for communication. pub(crate) transport: TTransport, /// Runtime to use for managing resources. pub(crate) runtime: TRuntime, /// Subscribe loop lifecycle management. pub(crate) subscribe_loop_supervisor: Arc<Mutex<SubscribeLoopSupervisor>>, } impl<TTransport, TRuntime> PubNub<TTransport, TRuntime> where TTransport: Transport + 'static, TRuntime: Runtime + 'static, { /// Get a reference to a transport being used. pub fn transport(&self) -> &TTransport { &self.transport } /// Get a reference to a runtime being used. pub fn runtime(&self) -> &TRuntime { &self.runtime } } impl<TTransport, TRuntime> PubNub<TTransport, TRuntime> where TTransport: Transport + 'static, TRuntime: Runtime + 'static, { /// Perform a transport call. /// /// # Errors /// /// Returns transport-specific errors. pub async fn call<TRequest>( &self, req: TRequest, ) -> Result<<TTransport as Service<TRequest>>::Response, <TTransport as Service<TRequest>>::Error> where TTransport: Service<TRequest>, { self.transport.call(req).await } }
#[doc = "Register `APB2RSTR` reader"] pub type R = crate::R<APB2RSTR_SPEC>; #[doc = "Register `APB2RSTR` writer"] pub type W = crate::W<APB2RSTR_SPEC>; #[doc = "Field `AFIORST` reader - Alternate function I/O reset"] pub type AFIORST_R = crate::BitReader<AFIORST_A>; #[doc = "Alternate function I/O reset\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum AFIORST_A { #[doc = "1: Reset the selected module"] Reset = 1, } impl From<AFIORST_A> for bool { #[inline(always)] fn from(variant: AFIORST_A) -> Self { variant as u8 != 0 } } impl AFIORST_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> Option<AFIORST_A> { match self.bits { true => Some(AFIORST_A::Reset), _ => None, } } #[doc = "Reset the selected module"] #[inline(always)] pub fn is_reset(&self) -> bool { *self == AFIORST_A::Reset } } #[doc = "Field `AFIORST` writer - Alternate function I/O reset"] pub type AFIORST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, AFIORST_A>; impl<'a, REG, const O: u8> AFIORST_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Reset the selected module"] #[inline(always)] pub fn reset(self) -> &'a mut crate::W<REG> { self.variant(AFIORST_A::Reset) } } #[doc = "Field `IOPARST` reader - IO port A reset"] pub use AFIORST_R as IOPARST_R; #[doc = "Field `IOPBRST` reader - IO port B reset"] pub use AFIORST_R as IOPBRST_R; #[doc = "Field `IOPCRST` reader - IO port C reset"] pub use AFIORST_R as IOPCRST_R; #[doc = "Field `IOPDRST` reader - IO port D reset"] pub use AFIORST_R as IOPDRST_R; #[doc = "Field `IOPERST` reader - IO port E reset"] pub use AFIORST_R as IOPERST_R; #[doc = "Field `IOPFRST` reader - IO port F reset"] pub use AFIORST_R as IOPFRST_R; #[doc = "Field `IOPGRST` reader - IO port G reset"] pub use AFIORST_R as IOPGRST_R; #[doc = "Field `ADC1RST` reader - ADC 1 interface reset"] pub use AFIORST_R as ADC1RST_R; #[doc = "Field `SPI1RST` reader - SPI 1 reset"] pub use AFIORST_R as SPI1RST_R; #[doc = "Field `USART1RST` reader - USART1 reset"] pub use AFIORST_R as USART1RST_R; #[doc = "Field `TIM9RST` reader - TIM9 timer reset"] pub use AFIORST_R as TIM9RST_R; #[doc = "Field `TIM10RST` reader - TIM10 timer reset"] pub use AFIORST_R as TIM10RST_R; #[doc = "Field `TIM11RST` reader - TIM11 timer reset"] pub use AFIORST_R as TIM11RST_R; #[doc = "Field `IOPARST` writer - IO port A reset"] pub use AFIORST_W as IOPARST_W; #[doc = "Field `IOPBRST` writer - IO port B reset"] pub use AFIORST_W as IOPBRST_W; #[doc = "Field `IOPCRST` writer - IO port C reset"] pub use AFIORST_W as IOPCRST_W; #[doc = "Field `IOPDRST` writer - IO port D reset"] pub use AFIORST_W as IOPDRST_W; #[doc = "Field `IOPERST` writer - IO port E reset"] pub use AFIORST_W as IOPERST_W; #[doc = "Field `IOPFRST` writer - IO port F reset"] pub use AFIORST_W as IOPFRST_W; #[doc = "Field `IOPGRST` writer - IO port G reset"] pub use AFIORST_W as IOPGRST_W; #[doc = "Field `ADC1RST` writer - ADC 1 interface reset"] pub use AFIORST_W as ADC1RST_W; #[doc = "Field `SPI1RST` writer - SPI 1 reset"] pub use AFIORST_W as SPI1RST_W; #[doc = "Field `USART1RST` writer - USART1 reset"] pub use AFIORST_W as USART1RST_W; #[doc = "Field `TIM9RST` writer - TIM9 timer reset"] pub use AFIORST_W as TIM9RST_W; #[doc = "Field `TIM10RST` writer - TIM10 timer reset"] pub use AFIORST_W as TIM10RST_W; #[doc = "Field `TIM11RST` writer - TIM11 timer reset"] pub use AFIORST_W as TIM11RST_W; impl R { #[doc = "Bit 0 - Alternate function I/O reset"] #[inline(always)] pub fn afiorst(&self) -> AFIORST_R { AFIORST_R::new((self.bits & 1) != 0) } #[doc = "Bit 2 - IO port A reset"] #[inline(always)] pub fn ioparst(&self) -> IOPARST_R { IOPARST_R::new(((self.bits >> 2) & 1) != 0) } #[doc = "Bit 3 - IO port B reset"] #[inline(always)] pub fn iopbrst(&self) -> IOPBRST_R { IOPBRST_R::new(((self.bits >> 3) & 1) != 0) } #[doc = "Bit 4 - IO port C reset"] #[inline(always)] pub fn iopcrst(&self) -> IOPCRST_R { IOPCRST_R::new(((self.bits >> 4) & 1) != 0) } #[doc = "Bit 5 - IO port D reset"] #[inline(always)] pub fn iopdrst(&self) -> IOPDRST_R { IOPDRST_R::new(((self.bits >> 5) & 1) != 0) } #[doc = "Bit 6 - IO port E reset"] #[inline(always)] pub fn ioperst(&self) -> IOPERST_R { IOPERST_R::new(((self.bits >> 6) & 1) != 0) } #[doc = "Bit 7 - IO port F reset"] #[inline(always)] pub fn iopfrst(&self) -> IOPFRST_R { IOPFRST_R::new(((self.bits >> 7) & 1) != 0) } #[doc = "Bit 8 - IO port G reset"] #[inline(always)] pub fn iopgrst(&self) -> IOPGRST_R { IOPGRST_R::new(((self.bits >> 8) & 1) != 0) } #[doc = "Bit 9 - ADC 1 interface reset"] #[inline(always)] pub fn adc1rst(&self) -> ADC1RST_R { ADC1RST_R::new(((self.bits >> 9) & 1) != 0) } #[doc = "Bit 12 - SPI 1 reset"] #[inline(always)] pub fn spi1rst(&self) -> SPI1RST_R { SPI1RST_R::new(((self.bits >> 12) & 1) != 0) } #[doc = "Bit 14 - USART1 reset"] #[inline(always)] pub fn usart1rst(&self) -> USART1RST_R { USART1RST_R::new(((self.bits >> 14) & 1) != 0) } #[doc = "Bit 19 - TIM9 timer reset"] #[inline(always)] pub fn tim9rst(&self) -> TIM9RST_R { TIM9RST_R::new(((self.bits >> 19) & 1) != 0) } #[doc = "Bit 20 - TIM10 timer reset"] #[inline(always)] pub fn tim10rst(&self) -> TIM10RST_R { TIM10RST_R::new(((self.bits >> 20) & 1) != 0) } #[doc = "Bit 21 - TIM11 timer reset"] #[inline(always)] pub fn tim11rst(&self) -> TIM11RST_R { TIM11RST_R::new(((self.bits >> 21) & 1) != 0) } } impl W { #[doc = "Bit 0 - Alternate function I/O reset"] #[inline(always)] #[must_use] pub fn afiorst(&mut self) -> AFIORST_W<APB2RSTR_SPEC, 0> { AFIORST_W::new(self) } #[doc = "Bit 2 - IO port A reset"] #[inline(always)] #[must_use] pub fn ioparst(&mut self) -> IOPARST_W<APB2RSTR_SPEC, 2> { IOPARST_W::new(self) } #[doc = "Bit 3 - IO port B reset"] #[inline(always)] #[must_use] pub fn iopbrst(&mut self) -> IOPBRST_W<APB2RSTR_SPEC, 3> { IOPBRST_W::new(self) } #[doc = "Bit 4 - IO port C reset"] #[inline(always)] #[must_use] pub fn iopcrst(&mut self) -> IOPCRST_W<APB2RSTR_SPEC, 4> { IOPCRST_W::new(self) } #[doc = "Bit 5 - IO port D reset"] #[inline(always)] #[must_use] pub fn iopdrst(&mut self) -> IOPDRST_W<APB2RSTR_SPEC, 5> { IOPDRST_W::new(self) } #[doc = "Bit 6 - IO port E reset"] #[inline(always)] #[must_use] pub fn ioperst(&mut self) -> IOPERST_W<APB2RSTR_SPEC, 6> { IOPERST_W::new(self) } #[doc = "Bit 7 - IO port F reset"] #[inline(always)] #[must_use] pub fn iopfrst(&mut self) -> IOPFRST_W<APB2RSTR_SPEC, 7> { IOPFRST_W::new(self) } #[doc = "Bit 8 - IO port G reset"] #[inline(always)] #[must_use] pub fn iopgrst(&mut self) -> IOPGRST_W<APB2RSTR_SPEC, 8> { IOPGRST_W::new(self) } #[doc = "Bit 9 - ADC 1 interface reset"] #[inline(always)] #[must_use] pub fn adc1rst(&mut self) -> ADC1RST_W<APB2RSTR_SPEC, 9> { ADC1RST_W::new(self) } #[doc = "Bit 12 - SPI 1 reset"] #[inline(always)] #[must_use] pub fn spi1rst(&mut self) -> SPI1RST_W<APB2RSTR_SPEC, 12> { SPI1RST_W::new(self) } #[doc = "Bit 14 - USART1 reset"] #[inline(always)] #[must_use] pub fn usart1rst(&mut self) -> USART1RST_W<APB2RSTR_SPEC, 14> { USART1RST_W::new(self) } #[doc = "Bit 19 - TIM9 timer reset"] #[inline(always)] #[must_use] pub fn tim9rst(&mut self) -> TIM9RST_W<APB2RSTR_SPEC, 19> { TIM9RST_W::new(self) } #[doc = "Bit 20 - TIM10 timer reset"] #[inline(always)] #[must_use] pub fn tim10rst(&mut self) -> TIM10RST_W<APB2RSTR_SPEC, 20> { TIM10RST_W::new(self) } #[doc = "Bit 21 - TIM11 timer reset"] #[inline(always)] #[must_use] pub fn tim11rst(&mut self) -> TIM11RST_W<APB2RSTR_SPEC, 21> { TIM11RST_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "APB2 peripheral reset register (RCC_APB2RSTR)\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`apb2rstr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`apb2rstr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct APB2RSTR_SPEC; impl crate::RegisterSpec for APB2RSTR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`apb2rstr::R`](R) reader structure"] impl crate::Readable for APB2RSTR_SPEC {} #[doc = "`write(|w| ..)` method takes [`apb2rstr::W`](W) writer structure"] impl crate::Writable for APB2RSTR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets APB2RSTR to value 0"] impl crate::Resettable for APB2RSTR_SPEC { const RESET_VALUE: Self::Ux = 0; }
use anyhow::Result; use xilinx_dma::AxiDma; use xilinx_dma::DmaBuffer; fn main() -> Result<()> { let dma_buffer_h2d = DmaBuffer::new("udmabuf0")?; let dma_buffer_d2h = DmaBuffer::new("udmabuf1")?; println!("{:?}", dma_buffer_h2d); println!("{:?}", dma_buffer_d2h); // do not use the whole buffer let max_items = 128; let items = std::cmp::min(max_items, dma_buffer_h2d.size() / 4); let items = std::cmp::min(items, dma_buffer_d2h.size() / 4); let slice_h2d = &mut dma_buffer_h2d.slice::<u32>()[0..items]; let slice_d2h = &mut dma_buffer_d2h.slice::<u32>()[0..items]; for i in slice_d2h.iter_mut() { *i = 0; } for i in slice_h2d.iter_mut() { *i = fastrand::u32(0..1024); } let mut dma_h2d = AxiDma::new("uio4")?; let mut dma_d2h = AxiDma::new("uio5")?; println!("{:?}", dma_h2d); println!("{:?}", dma_d2h); dma_h2d.start_h2d(&dma_buffer_h2d, items * 4)?; dma_d2h.start_d2h(&dma_buffer_d2h, items * 4)?; println!("transfers started"); dma_h2d.wait_h2d()?; println!("h2d done"); dma_d2h.wait_d2h()?; println!("d2h done"); dma_h2d.status_h2d(); dma_d2h.status_d2h(); for i in 0..items { assert_eq!(slice_d2h[i], slice_h2d[i] + 123); } Ok(()) }
#![macro_use] use crate::gpio::{AnyPin, Pin}; use crate::pac::gpio::vals::{Afr, Moder}; use crate::pac::gpio::Gpio; use crate::pac::spi; use crate::spi::{ByteOrder, Config, Error, Instance, MisoPin, MosiPin, SckPin, WordSize}; use crate::time::Hertz; use core::marker::PhantomData; use core::ptr; use embassy::util::Unborrow; use embassy_extras::unborrow; pub use embedded_hal::spi::{Mode, Phase, Polarity, MODE_0, MODE_1, MODE_2, MODE_3}; impl WordSize { fn ds(&self) -> spi::vals::Ds { match self { WordSize::EightBit => spi::vals::Ds::EIGHTBIT, WordSize::SixteenBit => spi::vals::Ds::SIXTEENBIT, } } fn frxth(&self) -> spi::vals::Frxth { match self { WordSize::EightBit => spi::vals::Frxth::QUARTER, WordSize::SixteenBit => spi::vals::Frxth::HALF, } } } pub struct Spi<'d, T: Instance> { sck: AnyPin, mosi: AnyPin, miso: AnyPin, phantom: PhantomData<&'d mut T>, } impl<'d, T: Instance> Spi<'d, T> { pub fn new<F>( _peri: impl Unborrow<Target = T> + 'd, sck: impl Unborrow<Target = impl SckPin<T>>, mosi: impl Unborrow<Target = impl MosiPin<T>>, miso: impl Unborrow<Target = impl MisoPin<T>>, freq: F, config: Config, ) -> Self where F: Into<Hertz>, { unborrow!(sck, mosi, miso); unsafe { Self::configure_pin(sck.block(), sck.pin() as _, sck.af_num()); Self::configure_pin(mosi.block(), mosi.pin() as _, mosi.af_num()); Self::configure_pin(miso.block(), miso.pin() as _, miso.af_num()); } let sck = sck.degrade(); let mosi = mosi.degrade(); let miso = miso.degrade(); let pclk = T::frequency(); let br = Self::compute_baud_rate(pclk, freq.into()); unsafe { T::enable(); T::reset(); T::regs().cr2().modify(|w| { w.set_ssoe(false); }); T::regs().cr1().modify(|w| { w.set_cpha( match config.mode.phase == Phase::CaptureOnSecondTransition { true => spi::vals::Cpha::SECONDEDGE, false => spi::vals::Cpha::FIRSTEDGE, }, ); w.set_cpol(match config.mode.polarity == Polarity::IdleHigh { true => spi::vals::Cpol::IDLEHIGH, false => spi::vals::Cpol::IDLELOW, }); w.set_mstr(spi::vals::Mstr::MASTER); w.set_br(spi::vals::Br(br)); w.set_lsbfirst(match config.byte_order { ByteOrder::LsbFirst => spi::vals::Lsbfirst::LSBFIRST, ByteOrder::MsbFirst => spi::vals::Lsbfirst::MSBFIRST, }); w.set_ssi(true); w.set_ssm(true); w.set_crcen(false); w.set_bidimode(spi::vals::Bidimode::UNIDIRECTIONAL); w.set_spe(true); }); } Self { sck, mosi, miso, phantom: PhantomData, } } unsafe fn configure_pin(block: Gpio, pin: usize, af_num: u8) { let (afr, n_af) = if pin < 8 { (0, pin) } else { (1, pin - 8) }; block.moder().modify(|w| w.set_moder(pin, Moder::ALTERNATE)); block.afr(afr).modify(|w| w.set_afr(n_af, Afr(af_num))); } unsafe fn unconfigure_pin(block: Gpio, pin: usize) { block.moder().modify(|w| w.set_moder(pin, Moder::ANALOG)); } fn compute_baud_rate(clocks: Hertz, freq: Hertz) -> u8 { match clocks.0 / freq.0 { 0 => unreachable!(), 1..=2 => 0b000, 3..=5 => 0b001, 6..=11 => 0b010, 12..=23 => 0b011, 24..=39 => 0b100, 40..=95 => 0b101, 96..=191 => 0b110, _ => 0b111, } } fn set_word_size(word_size: WordSize) { unsafe { T::regs().cr1().modify(|w| { w.set_spe(false); }); T::regs().cr2().modify(|w| { w.set_frxth(word_size.frxth()); w.set_ds(word_size.ds()); }); T::regs().cr1().modify(|w| { w.set_spe(true); }); } } } impl<'d, T: Instance> Drop for Spi<'d, T> { fn drop(&mut self) { unsafe { Self::unconfigure_pin(self.sck.block(), self.sck.pin() as _); Self::unconfigure_pin(self.mosi.block(), self.mosi.pin() as _); Self::unconfigure_pin(self.miso.block(), self.miso.pin() as _); } } } trait Word {} impl Word for u8 {} impl Word for u16 {} /// Write a single word blocking. Assumes word size have already been set. fn write_word<W: Word>(regs: &'static crate::pac::spi::Spi, word: W) -> Result<(), Error> { loop { let sr = unsafe { regs.sr().read() }; if sr.ovr() { return Err(Error::Overrun); } else if sr.fre() { return Err(Error::Framing); } else if sr.modf() { return Err(Error::ModeFault); } else if sr.crcerr() { return Err(Error::Crc); } else if sr.txe() { unsafe { let dr = regs.dr().ptr() as *mut W; ptr::write_volatile(dr, word); } return Ok(()); } } } /// Read a single word blocking. Assumes word size have already been set. fn read_word<W: Word>(regs: &'static crate::pac::spi::Spi) -> Result<W, Error> { loop { let sr = unsafe { regs.sr().read() }; if sr.ovr() { return Err(Error::Overrun); } else if sr.modf() { return Err(Error::ModeFault); } else if sr.fre() { return Err(Error::Framing); } else if sr.crcerr() { return Err(Error::Crc); } else if sr.rxne() { unsafe { let dr = regs.dr().ptr() as *const W; return Ok(ptr::read_volatile(dr)); } } } } impl<'d, T: Instance> embedded_hal::blocking::spi::Write<u8> for Spi<'d, T> { type Error = Error; fn write(&mut self, words: &[u8]) -> Result<(), Self::Error> { Self::set_word_size(WordSize::EightBit); let regs = T::regs(); for word in words.iter() { write_word(regs, *word)?; let _: u8 = read_word(regs)?; } Ok(()) } } impl<'d, T: Instance> embedded_hal::blocking::spi::Transfer<u8> for Spi<'d, T> { type Error = Error; fn transfer<'w>(&mut self, words: &'w mut [u8]) -> Result<&'w [u8], Self::Error> { Self::set_word_size(WordSize::EightBit); let regs = T::regs(); for word in words.iter_mut() { write_word(regs, *word)?; *word = read_word(regs)?; } Ok(words) } } impl<'d, T: Instance> embedded_hal::blocking::spi::Write<u16> for Spi<'d, T> { type Error = Error; fn write(&mut self, words: &[u16]) -> Result<(), Self::Error> { Self::set_word_size(WordSize::SixteenBit); let regs = T::regs(); for word in words.iter() { write_word(regs, *word)?; let _: u16 = read_word(regs)?; } Ok(()) } } impl<'d, T: Instance> embedded_hal::blocking::spi::Transfer<u16> for Spi<'d, T> { type Error = Error; fn transfer<'w>(&mut self, words: &'w mut [u16]) -> Result<&'w [u16], Self::Error> { Self::set_word_size(WordSize::SixteenBit); let regs = T::regs(); for word in words.iter_mut() { write_word(regs, *word)?; *word = read_word(regs)?; } Ok(words) } }
use std::collections::HashMap; use std::marker::PhantomData; use std::mem::take; use std::path::Path; use anyhow::{anyhow, Result}; use arrow2::array::TryExtend; use log::*; use super::{Dedup, Interaction, Key}; use crate::arrow::*; use crate::io::{file_size, ObjectWriter}; use crate::util::logging::item_progress; use crate::util::Timer; /// Record for a single output rating. #[derive(ArrowField, Debug)] pub struct TimestampRatingRecord { pub user: i32, pub item: i32, pub rating: f32, pub last_rating: f32, pub timestamp: i64, pub last_time: i64, pub nratings: i32, } /// Record for a single output rating without time. #[derive(ArrowField, Debug)] pub struct TimelessRatingRecord { pub user: i32, pub item: i32, pub rating: f32, pub nratings: i32, } /// Collapse a sequence of ratings into a rating record. pub trait FromRatingSet { fn create(user: i32, item: i32, ratings: Vec<(f32, i64)>) -> Self; } impl FromRatingSet for TimestampRatingRecord { fn create(user: i32, item: i32, ratings: Vec<(f32, i64)>) -> Self { let mut vec = ratings; if vec.len() == 1 { // fast path let (rating, timestamp) = vec[0]; TimestampRatingRecord { user, item, rating, timestamp, last_rating: rating, last_time: timestamp, nratings: 1, } } else { vec.sort_unstable_by_key(|(r, _ts)| (r * 10.0) as i32); let (rating, timestamp) = if vec.len() % 2 == 0 { let mp_up = vec.len() / 2; // we need this and the previous let (r1, ts1) = vec[mp_up - 1]; let (r2, ts2) = vec[mp_up]; // and average ((r1 + r2) * 0.5, (ts1 + ts2) / 2) } else { vec[vec.len() / 2] }; vec.sort_unstable_by_key(|(_r, ts)| *ts); let (last_rating, last_time) = vec[vec.len() - 1]; TimestampRatingRecord { user, item, rating, timestamp, last_rating, last_time, nratings: vec.len() as i32, } } } } impl FromRatingSet for TimelessRatingRecord { fn create(user: i32, item: i32, ratings: Vec<(f32, i64)>) -> Self { let mut vec = ratings; if vec.len() == 1 { // fast path let (rating, _ts) = vec[0]; TimelessRatingRecord { user, item, rating, nratings: 1, } } else { vec.sort_unstable_by_key(|(r, _ts)| (r * 10.0) as i32); let (rating, _ts) = if vec.len() % 2 == 0 { let mp_up = vec.len() / 2; // we need this and the previous let (r1, ts1) = vec[mp_up - 1]; let (r2, ts2) = vec[mp_up]; // and average ((r1 + r2) * 0.5, (ts1 + ts2) / 2) } else { vec[vec.len() / 2] }; TimelessRatingRecord { user, item, rating, nratings: vec.len() as i32, } } } } /// Rating deduplicator. pub struct RatingDedup<R> where R: FromRatingSet, { _phantom: PhantomData<R>, table: HashMap<Key, Vec<(f32, i64)>>, } impl<I: Interaction, R> Dedup<I> for RatingDedup<R> where R: FromRatingSet + ArrowSerialize + Send + Sync + 'static, R::MutableArrayType: TryExtend<Option<R>>, { fn add_interaction(&mut self, act: I) -> Result<()> { let rating = act .get_rating() .ok_or_else(|| anyhow!("rating deduplicator requires ratings"))?; self.record(act.get_user(), act.get_item(), rating, act.get_timestamp()); Ok(()) } fn save(&mut self, path: &Path) -> Result<usize> { self.write_ratings(path) } } impl<R> Default for RatingDedup<R> where R: FromRatingSet + ArrowSerialize + Send + Sync + 'static, R::MutableArrayType: TryExtend<Option<R>>, { fn default() -> RatingDedup<R> { RatingDedup { _phantom: PhantomData, table: HashMap::new(), } } } impl<R> RatingDedup<R> where R: FromRatingSet + ArrowSerialize + Send + Sync + 'static, R::MutableArrayType: TryExtend<Option<R>>, { /// Add a rating to the deduplicator. pub fn record(&mut self, user: i32, item: i32, rating: f32, timestamp: i64) { let k = Key::new(user, item); // get the vector for this user/item pair let vec = self.table.entry(k).or_insert_with(|| Vec::with_capacity(1)); // and insert our records! vec.push((rating, timestamp)); } /// Save the rating table disk. pub fn write_ratings<P: AsRef<Path>>(&mut self, path: P) -> Result<usize> { let path = path.as_ref(); info!( "writing {} deduplicated ratings to {}", friendly::scalar(self.table.len()), path.display() ); let mut writer = TableWriter::open(path)?; let n = self.table.len() as u64; let timer = Timer::new(); let pb = item_progress(n, "writing ratings"); // we're going to consume the hashtable. let table = take(&mut self.table); for (k, vec) in pb.wrap_iter(table.into_iter()) { let record = R::create(k.user, k.item, vec); writer.write_object(record)?; } let rv = writer.finish()?; pb.finish_and_clear(); info!( "wrote {} ratings in {}, file is {}", friendly::scalar(n), timer.human_elapsed(), friendly::bytes(file_size(path)?) ); Ok(rv) } }
/// 使用者确保Slice持有的数据不会被释放。 #[derive(Clone)] pub struct Slice { ptr: usize, len: usize, } impl Slice { pub fn new(ptr: usize, len: usize) -> Self { Self { ptr, len } } pub fn from(data: &[u8]) -> Self { Self { ptr: data.as_ptr() as usize, len: data.len(), } } #[inline(always)] pub fn data(&self) -> &[u8] { unsafe { std::slice::from_raw_parts(self.ptr as *const u8, self.len) } } #[inline(always)] pub fn len(&self) -> usize { self.len } #[inline(always)] pub fn as_ptr(&self) -> *const u8 { self.ptr as *const u8 } #[inline(always)] pub fn as_mut_ptr(&mut self) -> *mut u8 { self.ptr as *mut u8 } #[inline(always)] pub fn backwards(&mut self, n: usize) { debug_assert!(self.len >= n); self.len -= n; } } impl AsRef<[u8]> for Slice { #[inline(always)] fn as_ref(&self) -> &[u8] { self.data() } } impl Default for Slice { fn default() -> Self { Slice { ptr: 0, len: 0 } } } use std::ops::Deref; impl Deref for Slice { type Target = [u8]; fn deref(&self) -> &Self::Target { self.data() } }
use bevy::prelude::{Vec2, Vec3}; use std::ops::Add; use std::cmp::Ordering; #[derive(Clone, Copy, Debug, PartialEq)] pub struct Position { pub x: f32, pub y: f32, pub z: f32, pub absolute: bool } impl Position { pub fn new(x: f32, y: f32, z: f32) -> Self { Position { x, y, z, absolute: false } } pub fn new_2d(x: f32, y: f32) -> Self { Position::new(x, y, 0f32) } // 相对位置到绝对位置 pub fn convert(mut self, ww: f32, wh: f32) -> Self { /*if !self.absolute { self.x = self.x / ARENA_WIDTH as f32 * ww - (ww / 2.0) + (ww / ARENA_WIDTH as f32 / 2.0); self.y = self.y / ARENA_HEIGHT as f32 * wh - (wh / 2.0) + (wh / ARENA_HEIGHT as f32 / 2.0); } else { self.x = self.x - ww / 2f32; self.y = self.y - wh / 2f32; }*/ self.x = self.x - ww / 2f32; self.y = self.y - wh / 2f32; self } } impl Add for Position { type Output = Self; fn add(self, rhs: Self) -> Self::Output { Position::new(rhs.x + self.x, rhs.y + self.y, rhs.z + self.z) } } impl PartialOrd for Position { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { self.x.partial_cmp(&other.x) .and(self.y.partial_cmp(&other.y)) } } impl From<Position> for Vec2 { fn from(pos: Position) -> Self { Vec2::new(pos.x, pos.y) } } impl From<Position> for Vec3 { fn from(pos: Position) -> Self { Vec3::new(pos.x, pos.y, pos.z) } } impl From<&Position> for Vec3 { fn from(pos: &Position) -> Self { Vec3::new(pos.x, pos.y, pos.z) } }
use xml::reader; use std::fmt::{self, Display, Debug}; use std::error::Error as StdError; use std::num; use serde::de::Error as SerdeError; pub enum Error { ParseIntError(num::ParseIntError), Syntax(reader::Error), Custom(String), } pub type VResult<V> = Result<V, Error>; macro_rules! expect { ($actual: expr, $($expected: pat)|+ => $if_ok: expr) => { match $actual { $($expected)|+ => $if_ok, actual => Err($crate::Error::Custom(format!( "Expected token {}, found {:?}", stringify!($($expected)|+), actual ))) } } } #[cfg(debug_assertions)] macro_rules! debug_expect { ($actual: expr, $($expected: pat)|+ => $if_ok: expr) => { match $actual { $($expected)|+ => $if_ok, actual => panic!( "Internal error: Expected token {}, found {:?}", stringify!($($expected)|+), actual ) } } } #[cfg(not(debug_assertions))] macro_rules! debug_expect { ($actual: expr, $($expected: pat)|+ => $if_ok: expr) => { match $actual { $($expected)|+ => $if_ok, _ => unreachable!() } } } impl Display for Error { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { match *self { Error::ParseIntError(ref error) => Display::fmt(error, fmt), Error::Syntax(ref error) => Display::fmt(error, fmt), Error::Custom(ref display) => Display::fmt(display, fmt), } } } impl Debug for Error { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { match *self { Error::ParseIntError(ref error) => Display::fmt(error, fmt), Error::Syntax(ref error) => Debug::fmt(error, fmt), Error::Custom(ref display) => Display::fmt(display, fmt), } } } impl StdError for Error { fn description(&self) -> &str { match *self { Error::ParseIntError(ref error) => error.description(), Error::Syntax(ref error) => error.description(), Error::Custom(_) => "other error", } } fn cause(&self) -> Option<&StdError> { match *self { Error::ParseIntError(ref error) => Some(error), Error::Syntax(ref error) => Some(error), _ => None, } } } impl SerdeError for Error { fn custom<T: Display>(msg: T) -> Self { Error::Custom(msg.to_string()) } }
use super::{utils::*, TrackOwner}; use serde_json::Value; use serenity::{ client::Context, framework::standard::{macros::command, Args, CommandResult}, model::channel::Message, }; use songbird::{ input::{cached::Compressed, Metadata}, Bitrate, }; use std::time::Duration; use tracing::{info, warn}; #[cfg(feature = "cache")] use crate::cache::{self, TrackCache, TrackEndEvent, BITRATE}; #[cfg(feature = "cache")] use songbird::{Event, TrackEvent}; #[cfg(feature = "cache")] use tokio::{fs::File, io::AsyncReadExt}; #[command] #[aliases("a")] #[only_in(guilds)] #[min_args(1)] #[description = "Add song to queue"] // TODO: Handle playlists pub async fn add(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult { let query = args .iter() .map(|a| a.unwrap_or("".to_owned())) .collect::<Vec<String>>() .join(" "); let (input, query_msg) = match if query.starts_with("http") { ( msg.channel_id .say(&ctx.http, format!("Adding {} to the queue", query)) .await, songbird::ytdl(&query).await, ) } else { ( msg.channel_id .say(&ctx.http, format!("Searching on Youtube {}", query)) .await, songbird::input::ytdl_search(&query).await, ) } { (m, Ok(i)) => (i, m.unwrap()), (_, Err(e)) => { info!("Error creating input: {:?}", e); handle_message( msg.channel_id .say(&ctx.http, format!("Error: {:?}", e)) .await, ); return Ok(()); } }; enqueue(ctx, msg, input).await; handle_message(query_msg.delete(&ctx.http).await); Ok(()) } #[command] #[aliases("r", "addraw", "add-raw", "ar")] #[only_in(guilds)] #[min_args(1)] #[description = "Add ffmpeg URI to the queue"] pub async fn raw(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult { let query: String = args.single().unwrap(); let (input, query_msg) = match if { query.starts_with("http") || query.starts_with("rtmp") || query.starts_with("ftp") || query.starts_with("hls") || query.starts_with("tcp") || query.starts_with("udp") } { ( msg.channel_id .say(&ctx.http, format!("Adding {} to the queue", query)) .await, songbird::ffmpeg(&query).await, ) } else { handle_message( msg.channel_id .say(&ctx.http, format!("Invalid protocol")) .await, ); return Ok(()); } { (m, Ok(i)) => (i, m.unwrap()), (_, Err(e)) => { info!("Error creating input: {:?}", e); handle_message( msg.channel_id .say(&ctx.http, format!("Error: {:?}", e)) .await, ); return Ok(()); } }; enqueue(ctx, msg, input).await; handle_message(query_msg.delete(&ctx.http).await); Ok(()) } #[command] #[aliases("i", "ice", "ai", "add-icecast")] #[only_in(guilds)] #[min_args(1)] #[description = "Add icecast stream to the queue"] // TODO: Parse start time as SystemTime pub async fn icecast(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult { use crate::icecast::FromIceJson; let query: String = args.single().unwrap(); let (input, query_msg) = match if query.starts_with("http") { ( msg.channel_id .say(&ctx.http, format!("Adding {} to the queue", query)) .await, { let uri: http::uri::Uri = query.parse().unwrap(); let stats = format!( "{}://{}/status-json.xsl", uri.scheme_str().unwrap(), uri.authority().unwrap(), ); let json: Value = reqwest::get(&stats).await?.json().await?; songbird::ffmpeg(&query).await.and_then(|mut i| { i.metadata = Box::new(Metadata::from_ice_json(json, &query)); Ok(i) }) }, ) } else { handle_message( msg.channel_id .say(&ctx.http, format!("Invalid protocol")) .await, ); return Ok(()); } { (m, Ok(i)) => (i, m.unwrap()), (_, Err(e)) => { info!("Error creating input: {:?}", e); handle_message( msg.channel_id .say(&ctx.http, format!("Error: {:?}", e)) .await, ); return Ok(()); } }; enqueue(ctx, msg, input).await; handle_message(query_msg.delete(&ctx.http).await); Ok(()) } async fn enqueue(ctx: &Context, msg: &Message, input: songbird::input::Input) { let guild = msg.guild(&ctx.cache).await.unwrap(); let guild_id = guild.id; let channel_id = match guild .voice_states .get(&msg.author.id) .and_then(|vs| vs.channel_id) { Some(id) => id, None => { handle_message(msg.reply(&ctx, "not in a voice channel").await); return; } }; let manager = songbird::get(ctx).await.unwrap().clone(); if manager.get(guild_id).is_none() { let (_, join_result) = manager.join(guild_id, channel_id).await; if let Err(e) = join_result { info!("Couldn't join voice channel: {:?}", e); handle_message( msg.channel_id .say(&ctx, "Couldn't join voice channel: {:?}") .await, ); return; } } let meta = input.metadata.clone(); #[cfg(feature = "cache")] let mut comp = None; #[cfg(feature = "cache")] let cache = { let read = ctx.data.read().await; read.get::<TrackCache>().unwrap().clone() }; if let Some(_url) = meta.source_url { #[cfg(feature = "cache")] let input = if let Some(p) = cache.get(&_url).await.ok().flatten() { use songbird::input::dca; info!("Cache hit for {}", _url); let file = format!("audio_cache/{}", p); let mut input = dca(&file).await.unwrap(); // Metadata that doesn't fit in the standard dca1 stuff is in the extra // field of the json metadata // TODO: remove from cache and fetch again if fail let extra_meta = { let mut reader = handle_io(File::open(&file).await); let mut header = [0u8; 4]; handle_io(reader.read_exact(&mut header).await); if header != b"DCA1"[..] { tracing::error!("Invalid magic bytes"); return; } let size = handle_io(reader.read_i32_le().await); if size < 2 { tracing::error!("Invalid metadata size"); return; }; let mut json = Vec::with_capacity(size as usize); let mut json_reader = reader.take(size as u64); handle_io(json_reader.read_to_end(&mut json).await); let value = serde_json::from_slice(&json).unwrap_or_default(); cache::extra_meta(&value) }; { input.metadata = Box::new(Metadata { date: extra_meta.date, duration: extra_meta.duration, thumbnail: extra_meta.thumbnail, ..*input.metadata }) } input } else if let Some(d) = meta.duration { // TODO: Add config entry to limit lenght if d <= Duration::from_secs(1200) { match Compressed::new(input, Bitrate::BitsPerSecond(BITRATE as i32)) { Ok(compressed) => { comp = Some(compressed.new_handle()); // Load the whole thing into RAM. // Audio artifacts appear when not doing this and loading the whole thing // in ram is usually cheaper than keeping ytdl and ffmpeg open let _ = compressed.raw.spawn_loader(); compressed.into() } Err(e) => { warn!("Error creating compressed memory audio store: {:?}", e); handle_message( msg.channel_id .say(&ctx.http, format!("Error: {:?}", e)) .await, ); return; } } } else { input } } else { input }; // TODO: Add config entry to limit lenght #[cfg(not(feature = "cache"))] let input = if meta.duration <= Some(Duration::from_secs(1200)) { match Compressed::new(input, Bitrate::BitsPerSecond(128_000)) { Ok(compressed) => { // Load the whole thing into RAM. // Audio artifacts appear when not doing this and loading the whole thing // in ram is usually cheaper than keeping ytdl and ffmpeg open let _ = compressed.raw.spawn_loader(); compressed.into() } Err(e) => { warn!("Error creating compressed memory audio store: {:?}", e); handle_message( msg.channel_id .say(&ctx.http, format!("Error: {:?}", e)) .await, ); return; } } } else { input }; let manager = songbird::get(ctx).await.unwrap().clone(); if manager.get(guild_id).is_none() { let (_, join_result) = manager.join(guild_id, channel_id).await; if let Err(_) = join_result { handle_message( msg.channel_id .say(&ctx, "Couldn't join voice channel") .await, ); } } let locked = manager.get(guild_id).unwrap(); let mut call = locked.lock().await; let (track, track_handle) = songbird::tracks::create_player(input); let mut typemap = track_handle.typemap().write().await; typemap.insert::<TrackOwner>(msg.author.id); #[cfg(feature = "cache")] if let Some(c) = comp { let _ = track_handle.add_event( Event::Track(TrackEvent::End), TrackEndEvent { cache: cache.clone(), compressed: c, }, ); }; call.enqueue(track); } }
use super::types::DataType; pub struct Regs<RegType: DataType> { regs: [RegType; 32] }
use bincode::rustc_serialize::DecodingError; use std::net::TcpStream; use std::io::{Read, Error}; use commands::ServerCommand; use ser_de::de; pub struct Reader { de_buf: Vec<u8>, buf: [u8; 4098], } impl Reader { pub fn new() -> Reader { Reader { de_buf: Vec::new(), buf: [0; 4098], } } pub fn read(&mut self, stream: &mut TcpStream) -> Result<(), Error> { let read = try!( stream.read(&mut self.buf[..]) ); self.de_buf.push_all(&self.buf[0..read]); Ok(()) } pub fn commands(&mut self) -> Result<Vec<ServerCommand>, DecodingError> { let mut res = Vec::new(); loop { match try!( de(&mut self.de_buf) ) { Some(comm) => { trace!("Incomig: {:?}", &comm); res.push(comm) }, None => { break; }, } } Ok(res) } }
use std::borrow::Cow; pub trait Named<'a> { fn name(&self) -> Cow<'a, str>; } pub trait FromRaw<'a, T> { type Raw; fn from_raw(raw: &'a Self::Raw) -> T; }
#[doc = "Register `CONF2R` reader"] pub type R = crate::R<CONF2R_SPEC>; #[doc = "Register `CONF2R` writer"] pub type W = crate::W<CONF2R_SPEC>; #[doc = "Field `OFFSET2` reader - Twelve-bit calibration offset for configuration 2"] pub type OFFSET2_R = crate::FieldReader<u16>; #[doc = "Field `OFFSET2` writer - Twelve-bit calibration offset for configuration 2"] pub type OFFSET2_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 12, O, u16>; #[doc = "Field `GAIN2` reader - Gain setting for configuration 2"] pub type GAIN2_R = crate::FieldReader; #[doc = "Field `GAIN2` writer - Gain setting for configuration 2"] pub type GAIN2_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>; #[doc = "Field `SE2` reader - Single-ended mode for configuration 2"] pub type SE2_R = crate::FieldReader; #[doc = "Field `SE2` writer - Single-ended mode for configuration 2"] pub type SE2_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>; #[doc = "Field `COMMON2` reader - Common mode for configuration 2"] pub type COMMON2_R = crate::FieldReader; #[doc = "Field `COMMON2` writer - Common mode for configuration 2"] pub type COMMON2_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>; impl R { #[doc = "Bits 0:11 - Twelve-bit calibration offset for configuration 2"] #[inline(always)] pub fn offset2(&self) -> OFFSET2_R { OFFSET2_R::new((self.bits & 0x0fff) as u16) } #[doc = "Bits 20:22 - Gain setting for configuration 2"] #[inline(always)] pub fn gain2(&self) -> GAIN2_R { GAIN2_R::new(((self.bits >> 20) & 7) as u8) } #[doc = "Bits 26:27 - Single-ended mode for configuration 2"] #[inline(always)] pub fn se2(&self) -> SE2_R { SE2_R::new(((self.bits >> 26) & 3) as u8) } #[doc = "Bits 30:31 - Common mode for configuration 2"] #[inline(always)] pub fn common2(&self) -> COMMON2_R { COMMON2_R::new(((self.bits >> 30) & 3) as u8) } } impl W { #[doc = "Bits 0:11 - Twelve-bit calibration offset for configuration 2"] #[inline(always)] #[must_use] pub fn offset2(&mut self) -> OFFSET2_W<CONF2R_SPEC, 0> { OFFSET2_W::new(self) } #[doc = "Bits 20:22 - Gain setting for configuration 2"] #[inline(always)] #[must_use] pub fn gain2(&mut self) -> GAIN2_W<CONF2R_SPEC, 20> { GAIN2_W::new(self) } #[doc = "Bits 26:27 - Single-ended mode for configuration 2"] #[inline(always)] #[must_use] pub fn se2(&mut self) -> SE2_W<CONF2R_SPEC, 26> { SE2_W::new(self) } #[doc = "Bits 30:31 - Common mode for configuration 2"] #[inline(always)] #[must_use] pub fn common2(&mut self) -> COMMON2_W<CONF2R_SPEC, 30> { COMMON2_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "configuration 2 register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`conf2r::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`conf2r::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct CONF2R_SPEC; impl crate::RegisterSpec for CONF2R_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`conf2r::R`](R) reader structure"] impl crate::Readable for CONF2R_SPEC {} #[doc = "`write(|w| ..)` method takes [`conf2r::W`](W) writer structure"] impl crate::Writable for CONF2R_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets CONF2R to value 0"] impl crate::Resettable for CONF2R_SPEC { const RESET_VALUE: Self::Ux = 0; }
extern crate random_choice; #[cfg(test)] mod tests { use std::collections::BTreeMap; use random_choice::random_choice; #[test] fn test_random_choice_f64() { let capacity: usize = 500; let mut samples: Vec<usize> = Vec::with_capacity(capacity); let mut weights: Vec<f64> = Vec::with_capacity(capacity); for i in 0..capacity { samples.push(i); weights.push(i as f64); } let number_choices = 10000; let choices = random_choice().random_choice_f64(&samples, &weights, number_choices); assert!(choices.len() == number_choices); let mut weight_counter = BTreeMap::new(); for choice in choices { let counter = weight_counter.entry(choice).or_insert(0); *counter += 1; } let mut last_value: usize = 0; for (_, value) in &weight_counter { assert!((last_value as i32 - (*value) as i32).abs() <= 2); last_value = *value; // println!("({}, {})", key, value); } } #[test] fn test_random_choice_f32() { let capacity: usize = 500; let mut samples: Vec<usize> = Vec::with_capacity(capacity); let mut weights: Vec<f32> = Vec::with_capacity(capacity); for i in 0..capacity { samples.push(i); weights.push(i as f32); } let number_choices = 10000; let choices = random_choice().random_choice_f32(&samples, &weights, number_choices); assert!(choices.len() == number_choices); let mut weight_counter = BTreeMap::new(); for choice in choices { let counter = weight_counter.entry(choice).or_insert(0); *counter += 1; } let mut last_value: usize = 0; for (_, value) in &weight_counter { assert!((last_value as i32 - (*value) as i32).abs() <= 2); last_value = *value; // println!("({}, {})", key, value); } } #[test] fn test_random_choice_zero_elements_f64() { let capacity: usize = 1000; let mut samples: Vec<usize> = Vec::with_capacity(capacity); let mut weights: Vec<f64> = Vec::with_capacity(capacity); for i in 0..capacity { samples.push(i + 1); weights.push((i + 1usize) as f64); } let choices = random_choice().random_choice_f64(&samples, &weights, 0 as usize); assert!(choices.len() == 0); } #[test] fn test_random_choice_in_place_f64() { let mut samples = vec!["hi", "this", "is", "a", "test!"]; let weights: Vec<f64> = vec![1.0, 1.0, 1.0, 1.0, 1.0]; random_choice().random_choice_in_place_f64(&mut samples, &weights); for sample in samples { print!("{}, ", sample); } } #[test] fn test_random_choice_in_place_f32() { let mut samples = vec!["hi", "this", "is", "a", "test!"]; let weights: Vec<f32> = vec![1.0, 1.0, 1.0, 1.0, 1.0]; random_choice().random_choice_in_place_f32(&mut samples, &weights); for sample in samples { print!("{}, ", sample); } } }
#[derive(Copy, Clone, Debug)] pub struct Vec2<T> { pub x: T, pub y: T } impl<T: std::ops::Neg<Output = T>> std::ops::Neg for Vec2<T> { type Output = Self; fn neg(self) -> Self { Self::new(-self.x, -self.y) } } impl<T: std::ops::Add<Output = T>> std::ops::Add for Vec2<T> { type Output = Self; fn add(self, other: Self) -> Self { Vec2::new(self.x + other.x, self.y + other.y) } } impl<T: std::ops::Sub<Output = T>> std::ops::Sub for Vec2<T> { type Output = Self; fn sub(self, other: Self) -> Self { Vec2::new(self.x - other.x, self.y - other.y) } } impl<T: std::ops::Div<Output = T> + Copy> std::ops::Div<T> for Vec2<T> { type Output = Self; fn div(self, v: T) -> Self { Vec2::new(self.x / v, self.y / v) } } impl<T: std::ops::Mul<Output = T> + Copy> std::ops::Mul<T> for Vec2<T> { type Output = Self; fn mul(self, v: T) -> Self { Vec2::new(self.x * v, self.y * v) } } impl<T> Vec2<T> { pub const fn new(x: T, y: T) -> Vec2<T> { Self {x, y} } } impl Vec2<f64> { pub fn norm(self) -> Vec2<f64> { self/(self.x * self.x + self.y * self.y).sqrt() } } #[derive(Copy, Clone, Debug)] pub struct Rect<T> { pub pos: Vec2<T>, pub size: Vec2<T> } impl<T: PartialOrd + std::ops::Add<Output = T> + Copy> std::ops::BitAnd<Rect<T>> for self::Rect<T> { type Output = bool; fn bitand(self, other: Self) -> bool { self.pos.x + self.size.x > other.pos.x && self.pos.x < other.pos.x + other.size.x && self.pos.y + self.size.y > other.pos.y && self.pos.y < other.pos.y + other.size.y } } impl<T> Rect<T> { pub const fn new(x: T, y: T, width: T, height: T) -> Rect<T> { Self {pos: Vec2::new(x, y), size: Vec2::new(width, height)} } } impl<T: std::ops::Add<Output = T>> Rect<T> { pub fn at(self, vec: Vec2<T>) -> Self { Rect { pos: vec + self.pos, size: self.size } } } pub type Rectf = Rect<f32>; pub type Vec2f = Vec2<f32>; pub type Recti = Rect<i32>; pub type Vec2i = Vec2<i32>; pub type Id = usize;
// This file is part of Substrate. // Copyright (C) 2019-2020 Parity Technologies (UK) Ltd. // SPDX-License-Identifier: Apache-2.0 // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Generators are a set of trait on which storage traits are implemented. //! //! (i.e. implementing the generator for StorageValue on a type will automatically derive the //! implementation of StorageValue for this type). //! //! They are used by `decl_storage`. //! //! This is internal api and is subject to change. mod double_map; mod map; mod value; pub use double_map::StorageDoubleMap; pub use map::StorageMap; pub use value::StorageValue; #[cfg(test)] #[allow(dead_code)] mod tests { use crate::storage::{generator::StorageValue, unhashed, IterableStorageMap}; use crate::{assert_noop, assert_ok}; use codec::Encode; use sp_io::TestExternalities; struct Runtime {} pub trait Trait { type Origin; type BlockNumber; } impl Trait for Runtime { type Origin = u32; type BlockNumber = u32; } decl_module! { pub struct Module<T: Trait> for enum Call where origin: T::Origin {} } crate::decl_storage! { trait Store for Module<T: Trait> as Runtime { Value get(fn value) config(): (u64, u64); NumberMap: map hasher(identity) u32 => u64; DoubleMap: double_map hasher(identity) u32, hasher(identity) u32 => u64; } } #[test] fn value_translate_works() { let t = GenesisConfig::default().build_storage().unwrap(); TestExternalities::new(t).execute_with(|| { // put the old value `1111u32` in the storage. let key = Value::storage_value_final_key(); unhashed::put_raw(&key, &1111u32.encode()); // translate let translate_fn = |old: Option<u32>| -> Option<(u64, u64)> { old.map(|o| (o.into(), (o * 2).into())) }; let _ = Value::translate(translate_fn); // new storage should be `(1111, 1111 * 2)` assert_eq!(Value::get(), (1111, 2222)); }) } #[test] fn map_translate_works() { let t = GenesisConfig::default().build_storage().unwrap(); TestExternalities::new(t).execute_with(|| { // start with a map of u32 -> u32. for i in 0u32..100u32 { unhashed::put(&NumberMap::hashed_key_for(&i), &(i as u64)); } assert_eq!( NumberMap::iter().collect::<Vec<_>>(), (0..100).map(|x| (x as u32, x as u64)).collect::<Vec<_>>(), ); // do translation. NumberMap::translate( |k: u32, v: u64| if k % 2 == 0 { Some((k as u64) << 32 | v) } else { None }, ); assert_eq!( NumberMap::iter().collect::<Vec<_>>(), (0..50u32) .map(|x| x * 2) .map(|x| (x, (x as u64) << 32 | x as u64)) .collect::<Vec<_>>(), ); }) } #[test] fn try_mutate_works() { let t = GenesisConfig::default().build_storage().unwrap(); TestExternalities::new(t).execute_with(|| { assert_eq!(Value::get(), (0, 0)); assert_eq!(NumberMap::get(0), 0); assert_eq!(DoubleMap::get(0, 0), 0); // `assert_noop` ensures that the state does not change assert_noop!( Value::try_mutate(|value| -> Result<(), &'static str> { *value = (2, 2); Err("don't change value") }), "don't change value" ); assert_noop!( NumberMap::try_mutate(0, |value| -> Result<(), &'static str> { *value = 4; Err("don't change value") }), "don't change value" ); assert_noop!( DoubleMap::try_mutate(0, 0, |value| -> Result<(), &'static str> { *value = 6; Err("don't change value") }), "don't change value" ); // Showing this explicitly for clarity assert_eq!(Value::get(), (0, 0)); assert_eq!(NumberMap::get(0), 0); assert_eq!(DoubleMap::get(0, 0), 0); assert_ok!(Value::try_mutate(|value| -> Result<(), &'static str> { *value = (2, 2); Ok(()) })); assert_ok!(NumberMap::try_mutate(0, |value| -> Result<(), &'static str> { *value = 4; Ok(()) })); assert_ok!(DoubleMap::try_mutate(0, 0, |value| -> Result<(), &'static str> { *value = 6; Ok(()) })); assert_eq!(Value::get(), (2, 2)); assert_eq!(NumberMap::get(0), 4); assert_eq!(DoubleMap::get(0, 0), 6); }); } }
#![allow(dead_code)] #![allow(unused_imports)] #![allow(unused_variables)] extern crate mongodb; #[cfg(all(feature = "tokio-runtime", not(feature = "tokio-sync")))] mod async_scram { // ASYNC SCRAM CONNECTION EXAMPLE STARTS HERE use mongodb::{options::ClientOptions, Client}; #[tokio::main] async fn main() -> mongodb::error::Result<()> { let client_options = ClientOptions::parse( "mongodb+srv://<username>:<password>@<cluster-url>/<dbname>?w=majority", ) .await?; let client = Client::with_options(client_options)?; let database = client.database("test"); // do something with database Ok(()) } // CONNECTION EXAMPLE ENDS HERE } #[cfg(all(feature = "tokio-runtime", not(feature = "tokio-sync")))] mod async_x509 { // ASYNC X509 CONNECTION EXAMPLE STARTS HERE use mongodb::{ options::{AuthMechanism, ClientOptions, Credential, Tls, TlsOptions}, Client, }; use std::path::PathBuf; #[tokio::main] async fn main() -> mongodb::error::Result<()> { let mut client_options = ClientOptions::parse("mongodb+srv://<cluster-url>/<dbname>?w=majority").await?; client_options.credential = Some( Credential::builder() .mechanism(AuthMechanism::MongoDbX509) .build(), ); let tls_options = TlsOptions::builder() .ca_file_path(PathBuf::from("/path/to/ca-cert")) .cert_key_file_path(PathBuf::from("/path/to/cert")) .build(); client_options.tls = Some(Tls::Enabled(tls_options)); let client = Client::with_options(client_options)?; let database = client.database("test"); // do something with database Ok(()) } // CONNECTION EXAMPLE ENDS HERE } #[cfg(any(feature = "sync", feature = "tokio-sync"))] mod sync_scram { // SYNC SCRAM CONNECTION EXAMPLE STARTS HERE use mongodb::{options::ClientOptions, sync::Client}; fn main() -> mongodb::error::Result<()> { let client_options = ClientOptions::parse( "mongodb+srv://<username>:<password>@<cluster-url>/<dbname>?w=majority", )?; let client = Client::with_options(client_options)?; let database = client.database("test"); // do something with database Ok(()) } // CONNECTION EXAMPLE ENDS HERE } #[cfg(any(feature = "sync", feature = "tokio-sync"))] mod sync_x509 { // SYNC X509 CONNECTION EXAMPLE STARTS HERE use mongodb::{ options::{AuthMechanism, ClientOptions, Credential, Tls, TlsOptions}, sync::Client, }; use std::path::PathBuf; fn main() -> mongodb::error::Result<()> { let mut client_options = ClientOptions::parse("mongodb+srv://<cluster-url>/<dbname>?w=majority")?; client_options.credential = Some( Credential::builder() .mechanism(AuthMechanism::MongoDbX509) .build(), ); let tls_options = TlsOptions::builder() .ca_file_path(PathBuf::from("/path/to/ca-cert")) .cert_key_file_path(PathBuf::from("/path/to/cert")) .build(); client_options.tls = Some(Tls::Enabled(tls_options)); let client = Client::with_options(client_options)?; let database = client.database("test"); // do something with database Ok(()) } // CONNECTION EXAMPLE ENDS HERE }
pub mod result; pub use result::Res; pub struct Engine { text: Vec<char>, position: usize, // if an error was made, then error store the position were you fucked up error: Option<usize>, result: crate::result::Res, } #[derive(Debug, PartialEq, Copy, Clone)] pub enum Keys { // if a valid char was given Valid(char), // if a bad char was given move in an internal bad state Invalid(char), // if a good char was given whilst being in a bad state Good(char), // if a bad char was given whilst being in a bad state Bad(char), // no char remaining Finished, } #[derive(Debug, PartialEq, Clone)] pub enum Delete { // nothing to do Running, // how much chars where deleted Del(usize, String), } pub use Delete::*; pub use Keys::*; impl Engine { pub fn new(text: text::Text) -> Self { Engine { text: text.text.chars().collect(), position: 0, error: None, result: crate::result::Res::new(text), } } fn is_error(&self) -> bool { self.error.is_some() } pub fn handle_keys(&mut self, k: char) -> Keys { let next_key = self.text.iter().nth(self.position); if next_key.is_none() && !self.is_error() { self.result.finished(); return Finished; } else if next_key.is_none() && self.is_error() { return Bad('\x00'); } let next_key = *next_key.unwrap(); let result = if next_key == k && self.error.is_none() { Valid(k) } else if next_key == k && self.error.is_some() { Good(next_key) } else if self.error.is_some() { Bad(next_key) } else { self.error = Some(self.position); Invalid(next_key) }; self.result.keys(result); self.position += 1; result } pub fn handle_backspace(&mut self) -> Delete { // first come back before the character we’re gonna delete if self.position == 0 { return Running; } self.position -= 1; // reprint this character in white if self.error.is_some() && self.error.unwrap() == self.position { self.error = None; } self.result.delete(); Del(1, self.text[self.position].to_string()) } pub fn result(self) -> crate::result::Res { self.result } }
// https://github.com/frankmcsherry/blog/blob/master/posts/2018-05-19.md use std::rc::Rc; use std::cell::RefCell; /// A sorted list of distinct tuples. #[derive(Debug)] pub struct Relation<Tuple: Ord> { elements: Vec<Tuple> } impl<Tuple: Ord, I: IntoIterator<Item=Tuple>> From<I> for Relation<Tuple> { fn from(iterator: I) -> Self { let mut elements: Vec<Tuple> = iterator.into_iter().collect(); elements.sort_unstable(); elements.dedup(); Relation { elements } } } impl<Tuple: Ord> Relation<Tuple> { fn from_vec(mut elements: Vec<Tuple>) -> Self { elements.sort_unstable(); elements.dedup(); Relation { elements } } /// Merges two relations into their union. pub fn merge(self, other: Self) -> Self { let mut elements = Vec::with_capacity(self.elements.len() + other.elements.len()); elements.extend(self.elements.into_iter()); elements.extend(other.elements.into_iter()); elements.into() } } impl<Tuple: Ord> Clone for Variable<Tuple> { fn clone(&self) -> Self { Variable { distinct: self.distinct, name: self.name.clone(), stable: self.stable.clone(), recent: self.recent.clone(), to_add: self.to_add.clone(), } } } pub struct Variable<Tuple: Ord> { /// Should the variable be maintained distinctly. distinct: bool, /// A useful name for the variable. name: String, /// A list of already processed tuples. stable: Rc<RefCell<Vec<Relation<Tuple>>>>, /// A list of recently added but unprocessed tuples. recent: Rc<RefCell<Relation<Tuple>>>, /// A list of tuples yet to be introduced. to_add: Rc<RefCell<Vec<Relation<Tuple>>>>, } impl<Tuple: Ord> std::ops::Deref for Relation<Tuple> { type Target = [Tuple]; fn deref(&self) -> &Self::Target { &self.elements[..] } } impl<Tuple: Ord> Variable<Tuple> { fn complete(self) -> Relation<Tuple> { assert!(self.recent.borrow().is_empty()); assert!(self.to_add.borrow().is_empty()); let mut result: Relation<Tuple> = Vec::new().into(); while let Some(batch) = self.stable.borrow_mut().pop() { result = result.merge(batch); } result } fn from_join<K: Ord, V1: Ord, V2: Ord>( &self, input1: &Variable<(K,V1)>, input2: &Variable<(K,V2)>, logic: impl FnMut(&K,&V1,&V2)->Tuple) { join_into(input1, input2, self, logic) } fn from_map<T2: Ord>(&self, input: &Variable<T2>, logic: impl FnMut(&T2)->Tuple) { map_into(input, self, logic) } fn new(name: &str) -> Self { Variable { distinct: true, name: name.to_string(), stable: Rc::new(RefCell::new(Vec::new().into())), recent: Rc::new(RefCell::new(Vec::new().into())), to_add: Rc::new(RefCell::new(Vec::new().into())), } } /// Inserts a relation into the variable. /// /// This is most commonly used to load initial values into a variable. /// it is not obvious that it should be commonly used otherwise, but /// it should not be harmful. pub fn insert(&self, relation: Relation<Tuple>) { if !relation.is_empty() { self.to_add.borrow_mut().push(relation); } } } fn join_helper<Key: Ord, Val1: Ord, Val2: Ord>( input1: &Relation<(Key,Val1)>, input2: &Relation<(Key,Val2)>, mut result: impl FnMut(&Key, &Val1, &Val2)) { let mut slice1 = &input1.elements[..]; let mut slice2 = &input2.elements[..]; while !slice1.is_empty() && !slice2.is_empty() { use std::cmp::Ordering; // If the keys match call `result`, else advance the smaller key until they might. match slice1[0].0.cmp(&slice2[0].0) { Ordering::Less => { slice1 = gallop(slice1, |x| x.0 < slice2[0].0); }, Ordering::Equal => { // Determine the number of matching keys in each slice. let count1 = slice1.iter().take_while(|x| x.0 == slice1[0].0).count(); let count2 = slice2.iter().take_while(|x| x.0 == slice2[0].0).count(); // Produce results from the cross-product of matches. for index1 in 0 .. count1 { for index2 in 0 .. count2 { result(&slice1[0].0, &slice1[index1].1, &slice2[index2].1); } } // Advance slices past this key. slice1 = &slice1[count1..]; slice2 = &slice2[count2..]; } Ordering::Greater => { slice2 = gallop(slice2, |x| x.0 < slice1[0].0); } } } } pub fn gallop<T>(mut slice: &[T], mut cmp: impl FnMut(&T)->bool) -> &[T] { // if empty slice, or already >= element, return if slice.len() > 0 && cmp(&slice[0]) { let mut step = 1; while step < slice.len() && cmp(&slice[step]) { slice = &slice[step..]; step = step << 1; } step = step >> 1; while step > 0 { if step < slice.len() && cmp(&slice[step]) { slice = &slice[step..]; } step = step >> 1; } slice = &slice[1..]; // advance one, as we always stayed < value } return slice; } pub fn join_into<Key: Ord, Val1: Ord, Val2: Ord, Result: Ord>( input1: &Variable<(Key, Val1)>, input2: &Variable<(Key, Val2)>, output: &Variable<Result>, mut logic: impl FnMut(&Key, &Val1, &Val2)->Result) { let mut results = Vec::new(); // input1.recent and input2.stable. for batch2 in input2.stable.borrow().iter() { join_helper(&input1.recent.borrow(), &batch2, |k,v1,v2| results.push(logic(k,v1,v2))); } // input1.stable and input2.recent. for batch1 in input1.stable.borrow().iter() { join_helper(&batch1, &input2.recent.borrow(), |k,v1,v2| results.push(logic(k,v1,v2))); } // input1.recent and input2.recent. join_helper(&input1.recent.borrow(), &input2.recent.borrow(), |k,v1,v2| results.push(logic(k,v1,v2))); output.insert(results.into()); } pub fn map_into<T1: Ord, T2: Ord>( input: &Variable<T1>, output: &Variable<T2>, mut logic: impl FnMut(&T1)->T2) { let mut results = Vec::new(); let recent = input.recent.borrow(); for tuple in recent.iter() { results.push(logic(tuple)); } output.insert(Relation::from_vec(results)); } impl<Tuple: Ord> VariableTrait for Variable<Tuple> { fn changed(&mut self) -> bool { // 1. Merge self.recent into self.stable. if !self.recent.borrow().is_empty() { let mut recent = ::std::mem::replace(&mut (*self.recent.borrow_mut()), Vec::new().into()); while self.stable.borrow().last().map(|x| x.len() <= 2 * recent.len()) == Some(true) { let last = self.stable.borrow_mut().pop().unwrap(); recent = recent.merge(last); } self.stable.borrow_mut().push(recent); } // 2. Move self.to_add into self.recent. let to_add = self.to_add.borrow_mut().pop(); if let Some(mut to_add) = to_add { while let Some(to_add_more) = self.to_add.borrow_mut().pop() { to_add = to_add.merge(to_add_more); } // 2b. Restrict `to_add` to tuples not in `self.stable`. if self.distinct { for batch in self.stable.borrow().iter() { let mut slice = &batch[..]; // Only gallop if the slice is relatively large. if slice.len() > 4 * to_add.elements.len() { to_add.elements.retain(|x| { slice = gallop(slice, |y| y < x); slice.len() == 0 || &slice[0] != x }); } else { to_add.elements.retain(|x| { while slice.len() > 0 && &slice[0] < x { slice = &slice[1..]; } slice.len() == 0 || &slice[0] != x }); } } } *self.recent.borrow_mut() = to_add; } !self.recent.borrow().is_empty() } } pub struct Iteration { variables: Vec<Box<VariableTrait>>, } impl Iteration { /// Create a new iterative context. pub fn new() -> Self { Iteration { variables: Vec::new() } } /// Reports whether any of the monitored variables have changed since /// the most recent call. pub fn changed(&mut self) -> bool { let mut result = false; for variable in self.variables.iter_mut() { if variable.changed() { result = true; break; } } result } /// Creates a new named variable associated with the iterative context. pub fn variable<Tuple: Ord+'static>(&mut self, name: &str) -> Variable<Tuple> { let variable = Variable::new(name); self.variables.push(Box::new(variable.clone())); variable } /// Creates a new named variable associated with the iterative context. /// /// This variable will not be maintained distinctly, and may advertise tuples as /// recent multiple times (perhaps unboundedly many times). pub fn variable_indistinct<Tuple: Ord+'static>(&mut self, name: &str) -> Variable<Tuple> { let mut variable = Variable::new(name); variable.distinct = false; self.variables.push(Box::new(variable.clone())); variable } } trait VariableTrait { /// Reports whether the variable has changed since it was last asked. fn changed(&mut self) -> bool; } fn main() { let mut iteration1 = Iteration::new(); let parent_of = iteration1.variable::<(&str, &str)>("parent_of"); let child_of = iteration1.variable::<(&str, &str)>("child_of"); let ancestor_of = iteration1.variable::<(&str, &str)>("ancestor_of"); parent_of.insert((vec![("Archie", "Janice"), ("Janice", "Lemon"), ("Janice", "Mango"), ("Lemon", "Warms")]).into()); while iteration1.changed() { child_of.from_map(&parent_of, |&(parent, child)| (child, parent)); ancestor_of.from_join(&parent_of, &parent_of, |&a, &b, _c| (a,b)); ancestor_of.from_join(&ancestor_of, &child_of, |_b, &a, &c| (c,a)); } let ancestors = ancestor_of.complete(); println!("Variable\t{:?}", ancestors); }
use std::ops::{ Add, AddAssign, Div, DivAssign, Index, IndexMut, Mul, MulAssign, Neg, Sub, SubAssign, }; #[derive(Copy, Clone)] pub struct Vec3 { e: [f32; 3], } impl Vec3 { #[inline(always)] pub fn new(e0: f32, e1: f32, e2: f32) -> Vec3 { Vec3 { e: [e0, e1, e2] } } #[inline(always)] pub fn x(&self) -> f32 { self.e[0] } #[inline(always)] pub fn y(&self) -> f32 { self.e[1] } #[inline(always)] pub fn z(&self) -> f32 { self.e[2] } #[inline(always)] pub fn r(&self) -> f32 { self.e[0] } #[inline(always)] pub fn g(&self) -> f32 { self.e[1] } #[inline(always)] pub fn b(&self) -> f32 { self.e[2] } #[inline(always)] pub fn length(&self) -> f32 { self.squared_length().sqrt() } #[inline(always)] pub fn squared_length(&self) -> f32 { self.e[0] * self.e[0] + self.e[1] * self.e[1] + self.e[2] * self.e[2] } #[inline(always)] pub fn make_unit_vector(&mut self) { *self /= self.length(); } #[inline(always)] pub fn unit_vector(&self) -> Vec3 { self / self.length() } } impl<'a> Neg for &'a Vec3 { type Output = Vec3; #[inline(always)] fn neg(self) -> Self::Output { Vec3 { e: [-self.e[0], -self.e[1], -self.e[2]], } } } impl Index<usize> for Vec3 { type Output = f32; #[inline(always)] fn index(&self, idx: usize) -> &f32 { &self.e[idx] } } impl IndexMut<usize> for Vec3 { #[inline(always)] fn index_mut(&mut self, index: usize) -> &mut f32 { &mut self.e[index] } } impl MulAssign<f32> for Vec3 { #[inline(always)] fn mul_assign(&mut self, rhs: f32) { self.e[0] *= rhs; self.e[1] *= rhs; self.e[2] *= rhs; } } impl DivAssign<f32> for Vec3 { #[inline(always)] fn div_assign(&mut self, rhs: f32) { self.e[0] /= rhs; self.e[1] /= rhs; self.e[2] /= rhs; } } impl<'a> Mul<f32> for &'a Vec3 { type Output = Vec3; #[inline(always)] fn mul(self, rhs: f32) -> Vec3 { Vec3 { e: [self.e[0] * rhs, self.e[1] * rhs, self.e[2] * rhs], } } } impl<'a> Div<f32> for &'a Vec3 { type Output = Vec3; #[inline(always)] fn div(self, rhs: f32) -> Vec3 { Vec3 { e: [self.e[0] / rhs, self.e[1] / rhs, self.e[2] / rhs], } } } macro_rules! impl_op_assign_vec { ($trait:ident, $func:ident, $op:tt) => { impl<'a> $trait<&'a Vec3> for Vec3 { #[inline(always)] fn $func(&mut self, rhs: &'a Vec3) { self.e[0] $op rhs.e[0]; self.e[1] $op rhs.e[1]; self.e[2] $op rhs.e[2]; } } impl $trait for Vec3 { #[inline(always)] fn $func(&mut self, rhs: Vec3) { self.e[0] $op rhs.e[0]; self.e[1] $op rhs.e[1]; self.e[2] $op rhs.e[2]; } } }; } macro_rules! impl_op_vec { ($trait:ident, $func:ident, $op:tt) => { impl<'a> $trait for &'a Vec3 { type Output = Vec3; #[inline(always)] fn $func(self, rhs: &'a Vec3) -> Vec3 { Vec3 { e: [ self.e[0] $op rhs.e[0], self.e[1] $op rhs.e[1], self.e[2] $op rhs.e[2], ], } } } impl<'a> $trait<Vec3> for &'a Vec3 { type Output = Vec3; #[inline(always)] fn $func(self, rhs: Vec3) -> Vec3 { Vec3 { e: [ self.e[0] $op rhs.e[0], self.e[1] $op rhs.e[1], self.e[2] $op rhs.e[2], ], } } } impl $trait for Vec3 { type Output = Vec3; #[inline(always)] fn $func(self, rhs: Vec3) -> Vec3 { Vec3 { e: [ self.e[0] $op rhs.e[0], self.e[1] $op rhs.e[1], self.e[2] $op rhs.e[2], ], } } } impl<'a> $trait<&'a Vec3> for Vec3 { type Output = Vec3; #[inline(always)] fn $func(self, rhs: &'a Vec3) -> Vec3 { Vec3 { e: [ self.e[0] $op rhs.e[0], self.e[1] $op rhs.e[1], self.e[2] $op rhs.e[2], ], } } } }; } impl_op_assign_vec!(AddAssign, add_assign, +=); impl_op_assign_vec!(SubAssign, sub_assign, -=); impl_op_assign_vec!(MulAssign, mul_assign, *=); impl_op_assign_vec!(DivAssign, div_assign, /=); impl_op_vec!(Add, add, +); impl_op_vec!(Sub, sub, -); impl_op_vec!(Mul, mul, *); impl_op_vec!(Div, div, /); pub fn dot(v1: &Vec3, v2: &Vec3) -> f32 { v1[0] * v2[0] + v1[1] * v2[1] + v1[2] * v2[2] }
#![cfg(all(test, feature = "test_e2e"))] #[macro_use] extern crate log; use azure_core::prelude::*; use azure_storage::blob::container::PublicAccess; use azure_storage::blob::prelude::*; use azure_storage::core::prelude::*; use bytes::Bytes; #[tokio::test] async fn put_append_blob() { let account = std::env::var("STORAGE_ACCOUNT").expect("Set env variable STORAGE_ACCOUNT first!"); let master_key = std::env::var("STORAGE_MASTER_KEY").expect("Set env variable STORAGE_MASTER_KEY first!"); let blob_name: &'static str = "append_blob.txt"; let container_name: &'static str = "rust-upload-test"; let _data = b"abcdef"; let http_client = new_http_client(); let storage = StorageAccountClient::new_access_key(http_client.clone(), &account, &master_key) .as_storage_client(); let container = storage.as_container_client(container_name); let blob = container.as_blob_client(blob_name); if storage .list_containers() .execute() .await .unwrap() .incomplete_vector .iter() .find(|x| x.name == container_name) .is_none() { container .create() .public_access(PublicAccess::None) .execute() .await .unwrap(); } let mut metadata = Metadata::new(); metadata.insert("attrib", "value"); metadata.insert("second", "something"); blob.put_append_blob() .content_type("text/plain") .metadata(&metadata) .execute() .await .unwrap(); trace!("created {:?}", blob_name); let resp = blob.get_metadata().execute().await.unwrap(); assert_eq!(resp.metadata.len(), 2); assert_eq!(resp.metadata.get("attrib"), Some(Bytes::from("value"))); assert_eq!(resp.metadata.get("second"), Some(Bytes::from("something"))); assert_eq!(resp.metadata.get("not_found"), None); }
use std::collections::BTreeSet; use std::error::Error; use std::fs::{read_to_string, File}; use std::time::Duration; use actix::{Actor, System}; use actix_web::{App, HttpServer}; use daemonize::Daemonize; use nix::errno::Errno; use nix::sys::signal; use nix::unistd::Pid; use crate::caltrain_status::{Direction, TrainType}; use crate::cfg::{CALTRAIND_PATH, PID_PATH, SOCKET_PATH, STDERR_PATH, STDOUT_PATH}; use crate::daemon::cstatus_fetcher::CStatusFetcher; use crate::daemon::notifier::Notifier; use crate::station::Station; use chrono::NaiveTime; mod cstatus_fetcher; mod notifier; pub fn close_existing() { let pid = match read_to_string(PID_PATH.as_path()) { Ok(s) => s.parse::<i32>().expect("pid file did not contain pid"), Err(_) => return, }; match signal::kill(Pid::from_raw(pid), signal::SIGTERM) { Ok(_) | Err(nix::Error::Sys(Errno::ESRCH)) => { std::fs::remove_file(PID_PATH.as_path()).expect("error deleting pid file") } _ => (), } } fn daemonize() -> Result<(), Box<dyn Error>> { close_existing(); Daemonize::new() .pid_file(PID_PATH.as_path()) .chown_pid_file(true) .working_directory(CALTRAIND_PATH.as_path()) .stdout(File::create(STDOUT_PATH.as_path())?) .stderr(File::create(STDERR_PATH.as_path())?) .start()?; Ok(()) } pub fn start( n_threads: usize, train_types: BTreeSet<TrainType>, station: Station, direction: Direction, refresh_rate: Duration, notify_at: Vec<u16>, notify_after: Option<NaiveTime>, ) -> Result<(), Box<dyn Error>> { daemonize()?; let sys = System::new("caltraind"); CStatusFetcher::new(station, refresh_rate).start(); for n in notify_at { Notifier::new(train_types.clone(), n, direction, notify_after).start(); } HttpServer::new(App::new) .workers(n_threads) .bind_uds(SOCKET_PATH.as_path())? .start(); sys.run()?; Ok(()) }
use java_properties; use quick_xml::events::{BytesEnd, BytesStart, BytesText, Event}; use quick_xml::{Error, Reader, Writer}; use std::fs::{self, File}; use std::io::{BufReader, BufWriter, Cursor}; use std::str; use yaml_rust; static INVALID_END_PATH_VEC: &[char] = &['/', '\\']; //// validate project path,eg. application-${env}.properties /// and set application.profiles to ${env}. /// /// # Example: /// ```rust /// let project_path = "/data/some-server"; /// validate_project(project_path,"test"); /// ``` /// /// # with submodule starter /// ```rust /// let project_path = "/data/parent-module/sub-module"; /// validate_project(project_path,"test"); /// ``` pub fn validate_project<'a>(project_path: &'a str, env: &'a str) { //find deploy application.project,application-${env}.properties println!("validate project..."); let project_path = project_path.trim_end_matches(INVALID_END_PATH_VEC); let mut project_name: Vec<char> = Vec::new(); for c in project_path.chars().rev() { if c == '/' || c == '\\' { break; } project_name.push(c); } project_name.reverse(); let project_name: String = project_name.into_iter().collect(); let propertiesFile = File::open(format!( "{}/src/main/resources/application-{}.properties", project_path, env )) .unwrap_or_else(|_| { File::open(format!( "{}/src/main/resources/application-{}.yml", project_path, env )) .expect("the properties file: application.properties or application.yml is not exists") }); fix_package_name(&format!("{}/pom.xml", project_path), &project_name); } fn fix_package_name<'a>(pom_file: &'a str, package_name: &'a str) { let content = fix_package_name_from_str(&fs::read_to_string(pom_file).unwrap(), package_name); fs::write(pom_file, content).unwrap(); } fn fix_package_name_from_str<'a>(content: &'a str, package_name: &'a str) -> String { let mut reader = Reader::from_str(content); reader.trim_text(true); let mut writer = Writer::new(Cursor::new(Vec::new())); let mut need_write = false; let mut finded_final_name = false; let mut buf = Vec::new(); loop { match reader.read_event(&mut buf) { Ok(Event::Eof) => break, Ok(ref e) => { if let Event::Start(ref x) = e { if x.name() == b"finalName" { finded_final_name = true; let final_name = &reader.read_text(x.name(), &mut Vec::new()).expect( "cannot decode project final name in tag: <finalName>xxx</finalName>", ); if final_name != package_name { println!( "this current finalName is {},and fix to {}", final_name, package_name ); need_write = true; writer.write_event(e).unwrap(); writer .write_event(Event::Text(BytesText::from_plain_str(package_name))) .unwrap(); writer .write_event(Event::End(BytesEnd::owned(x.name().to_vec()))) .unwrap(); continue; } else { println!("finalName is correct and does not need to be fixed"); } } } else if let Event::End(ref x) = e { if x.name() == b"build" && finded_final_name == false { writer .write_event(Event::Start(BytesStart::owned( b"finalName".to_vec(), "finalName".len(), ))) .unwrap(); writer .write_event(Event::Text(BytesText::from_plain_str(package_name))) .unwrap(); writer .write_event(Event::End(BytesEnd::owned(b"finalName".to_vec()))) .unwrap(); } } writer.write_event(e).unwrap(); } Err(e) => panic!("Error at position {}: {:?}", reader.buffer_position(), e), } buf.clear(); } if need_write { String::from_utf8(writer.into_inner().into_inner()).unwrap() } else { content.to_owned() } } #[test] fn when_final_name_not_match_then_fix_it() { let content = fs::read_to_string("./tests/pom.xml").unwrap(); assert_eq!(557, content.find("demo-test").unwrap()); let content = fix_package_name_from_str(&content, "test_1"); assert_eq!(493, content.find("test_1").unwrap()); } #[test] fn when_final_name_not_exists_then_fix_it() { let content = fs::read_to_string("./tests/pom-with-no-final-name.xml").unwrap(); let content = fix_package_name_from_str(&content, "test_1"); assert_eq!(493, content.find("test_1").unwrap()); } #[test] fn when_final_name_matches_then_dont_fix_it() { let content = fs::read_to_string("./tests/pom.xml").unwrap(); assert_eq!(557, content.find("demo-test").unwrap()); let content = fix_package_name_from_str(&content, "demo-test"); assert_eq!(557, content.find("demo-test").unwrap()); }
use std::fs::File; use std::path::PathBuf; use std::process; use structopt::StructOpt; mod feed; mod upload; #[derive(Debug, StructOpt)] #[structopt(about = "audiobook to podcast tool")] enum Opt { Feed { #[structopt(long)] title: String, #[structopt(long)] image: Option<PathBuf>, #[structopt(long)] region: String, #[structopt(long)] bucket: String, #[structopt(short, long)] out: PathBuf, #[structopt(long)] upload: bool, #[structopt(parse(from_os_str))] files: Vec<PathBuf>, }, Upload { #[structopt(long)] region: String, #[structopt(long)] bucket: String, #[structopt(parse(from_os_str))] files: Vec<PathBuf>, }, } fn main() { let opt = Opt::from_args(); match opt { Opt::Feed { title, image, region, bucket, out, upload, files, } => { let uploader = upload::S3Uploader::new(&region, &bucket).unwrap(); let feed = feed::FeedGenerator { title, base_url: uploader.base_url(), image: image.clone().map(|path| feed::Image { path }), }; let media_files = files.iter().map(|path| feed::MediaFile { path }).collect(); if let Err(e) = feed.generate_for_files(media_files, File::create(&out).unwrap()) { eprintln!("Failed to create feed: {}", e); process::exit(1); } if upload { let feed_url = uploader.url_for_file(&out); let mut upload_files = vec![out]; if let Some(image) = &image { upload_files.push(image.clone()); } upload_files.extend(files); match uploader.upload(upload_files) { Ok(_) => { eprintln!("Upload complete"); eprintln!("Podcast available at {}", feed_url); process::exit(0); } Err(e) => { eprintln!("Upload error: {}", e.message); process::exit(1); } } } } Opt::Upload { region, bucket, files, } => { let uploader = upload::S3Uploader::new(&region, &bucket).unwrap(); uploader.upload(files).unwrap(); } }; }
#[cfg(feature = "std")] use std::vec::Vec; #[cfg(not(feature = "std"))] extern crate alloc; #[cfg(not(feature = "std"))] use alloc::vec::Vec; /// Rather than store `Option` elements, a SlotList uses a custom maybe type /// that associates a number with each empty slot. These numbers allow the /// collection to create a linked list of empty slots, reducing an insertion /// complexity that would otherwise be `O(n)`. #[derive(Copy, Clone, Debug)] pub enum Slot<T: Sized> { Occupied(T), Empty(Option<usize>), } impl<T> Slot<T> { pub fn replace(&mut self, value: T) -> Slot<T> { core::mem::replace(self, Slot::Occupied(value)) } pub fn take(&mut self) -> Slot<T> { core::mem::take(self) } pub fn set_next_empty(&mut self, index: usize) { match self { Slot::Occupied(_) => panic!("Can't modify empty chain for an occupied slot"), Slot::Empty(next) => *next = Some(index), } } pub fn as_option_of_ref(&self) -> Option<&T> { match self { Slot::Occupied(ref value) => Some(value), Slot::Empty(_) => None, } } pub fn as_mut(&mut self) -> Option<&mut T> { match *self { Slot::Occupied(ref mut value) => Some(value), Slot::Empty(_) => None, } } pub fn is_occupied(&self) -> bool { match self { Slot::Occupied(_) => true, Slot::Empty(_) => false, } } pub fn occupied(self) -> Option<T> { match self { Slot::Occupied(value) => Some(value), Slot::Empty(_) => None, } } } impl<T> Default for Slot<T> { fn default() -> Slot<T> { Slot::Empty(None) } } /// SlotList is a vector-like data structure where every entry, or "slot," is an /// `Option` that may contain a value. The added value of a SlotList over a /// `Vec<Option<T>>` is that inserting a new value tries to re-use any empty /// slots before allocating new space. This creates a data structure with two /// properties: the index of a given element will always remain static, and /// elements can be removed without wasting space. pub struct SlotList<T: Sized> { first_empty_slot: Option<usize>, last_empty_slot: Option<usize>, slots: Vec<Slot<T>>, } impl<T: Sized> SlotList<T> { /// Construct a new SlotList with no elements. A new, empty list will not /// allocate any memory, and can be a `const` value. pub const fn new() -> SlotList<T> { SlotList { first_empty_slot: None, last_empty_slot: None, slots: Vec::new(), } } /// Preallocate a SlotList with enough memory to store the requested number of /// elements. pub fn with_capacity(capacity: usize) -> SlotList<T> { SlotList { first_empty_slot: None, last_empty_slot: None, slots: Vec::with_capacity(capacity), } } pub fn capacity(&self) -> usize { self.slots.capacity() } /// Locate the first empty slot that can be used to store a value, returning /// its numeric index. If none is found, the list will push an empty slot onto /// the end and return the index of that slot. fn find_empty_slot(&mut self) -> usize { let mut index = self.slots.len(); if let Some(first_index) = self.first_empty_slot { // An empty slot exists, so re-use it index = first_index; let empty = self.slots.get(first_index).unwrap(); let next_first = match empty { Slot::Occupied(_) => panic!("Empty slot chain was broken"), Slot::Empty(next) => *next, }; self.first_empty_slot = next_first; } if self.first_empty_slot.is_none() { // This implies that there are no more empty slots. // After the first element has been placed on the list, it maintains at // least one empty entry at all times that can be used for the next insert // operation. // If there was no first empty slot (should only be true for a newly // initialized list), this also guarantees that the initial index value // set at dthe top of the function will point to an empty entry. let mut last_entry = self.slots.len(); self.slots.push(Slot::Empty(None)); if last_entry == 0 { self.slots.push(Slot::Empty(None)); last_entry += 1; } self.first_empty_slot = Some(last_entry); self.last_empty_slot = Some(last_entry); } index } /// Insert a new value into the list. This will attempt to use an empty slot, /// before allocating a new one at the end pub fn insert(&mut self, item: T) -> usize { let index = self.find_empty_slot(); self.slots[index] = Slot::Occupied(item); index } /// Retrieve a reference to the value at the specified index pub fn get(&self, index: usize) -> Option<&T> { let slot = self.slots.get(index)?; match slot { Slot::Occupied(item) => Some(item), Slot::Empty(_) => None, } } /// Retrieve a mutable reference to the value at the specified index pub fn get_mut(&mut self, index: usize) -> Option<&mut T> { let slot = self.slots.get_mut(index)?; slot.as_mut() } /// Remove the value at the specified index, returning the value that was /// stored there pub fn remove(&mut self, index: usize) -> Option<T> { let slot = self.slots.get_mut(index)?; let prev = slot.take(); if prev.is_occupied() { // `index` now represents the latest in the chain of empty slots if let Some(last_slot_index) = self.last_empty_slot { self.slots .get_mut(last_slot_index) .unwrap() .set_next_empty(index); } self.last_empty_slot = Some(index); } prev.occupied() } /// Set a specific slot to the provided value, returning the value that was /// previously stored there. /// This may require fixing up the empty slot chain, and in a worst-case /// scenario the complexity of this method becomes O(n). pub fn replace(&mut self, index: usize, item: T) -> Option<T> { if index >= self.slots.len() { panic!("Index out of bounds"); } let slot = self.slots.get_mut(index).unwrap(); let prev = slot.replace(item); if let Slot::Empty(next) = prev { // `index` represented an element in the empty chain // To fix up the chain, we need to replace pointers to it let mut current = self.first_empty_slot; while let Some(current_index) = current { let current_slot = self.slots.get_mut(current_index).unwrap(); current = match current_slot { Slot::Occupied(_) => panic!("Empty slot chain was broken"), Slot::Empty(next_slot) => *next_slot, }; if current == Some(index) { *current_slot = Slot::Empty(next); // If the removed empty slot was the last in the chain, update the // pointer to the new last item if self.last_empty_slot == Some(index) { self.last_empty_slot = Some(current_index); } current = None; } } } prev.occupied() } /// Construct an iterator that will visit all of the occupied slots in /// increasing index order pub fn iter(&self) -> impl Iterator<Item = &T> { self.slots.iter().filter_map(|i| i.as_option_of_ref()) } /// Helper for testing chain consistency, only available in test mode #[cfg(test)] pub fn get_first_empty_slot(&self) -> Option<usize> { self.first_empty_slot } /// Helper for testing chain consistency, only available in test mode #[cfg(test)] pub fn get_last_empty_slot(&self) -> Option<usize> { self.last_empty_slot } /// Helper for testing chain consistency, only available in test mode #[cfg(test)] pub fn get_raw_slot(&self, index: usize) -> Option<&Slot<T>> { self.slots.get(index) } } impl<T: Clone> Clone for SlotList<T> { fn clone(&self) -> Self { Self { first_empty_slot: self.first_empty_slot, last_empty_slot: self.last_empty_slot, slots: self.slots.clone(), } } } impl<T: core::fmt::Debug> core::fmt::Debug for SlotList<T> { fn fmt(&self, formatter: &mut core::fmt::Formatter) -> core::fmt::Result { formatter.debug_list() .entries(self.slots.iter()) .finish() } } #[cfg(test)] mod tests { use super::{Slot, SlotList}; #[test] fn initialization() { let mut list: SlotList<u32> = SlotList::new(); assert_eq!(list.insert(5), 0); } #[test] fn inserting_items() { let mut list: SlotList<u32> = SlotList::with_capacity(3); assert_eq!(list.get(1), None); assert_eq!(list.insert(20), 0); assert_eq!(list.insert(30), 1); assert_eq!(list.insert(40), 2); assert_eq!(list.get(0), Some(&20)); assert_eq!(list.get(1), Some(&30)); assert_eq!(list.get(2), Some(&40)); assert_eq!(list.get(3), None); } #[test] fn grow_to_fit() { let mut list: SlotList<u32> = SlotList::new(); assert_eq!(list.get(1), None); assert_eq!(list.insert(20), 0); assert_eq!(list.insert(30), 1); assert_eq!(list.insert(40), 2); assert_eq!(list.get(0), Some(&20)); assert_eq!(list.get(1), Some(&30)); assert_eq!(list.get(2), Some(&40)); assert_eq!(list.get(3), None); } #[test] fn removing_items() { let mut list: SlotList<u32> = SlotList::new(); list.insert(55); list.insert(40); list.insert(60); assert_eq!(list.remove(1), Some(40)); assert_eq!(list.get(1), None); } #[test] fn replacing_emptied_items() { let mut list: SlotList<u32> = SlotList::new(); list.insert(11); list.insert(22); list.insert(33); list.remove(0); list.remove(1); // First it will fill the empty slot at the end of the list assert_eq!(list.insert(44), 3); // Another empty slot has been added to index 4, but that is at the end of // the empty chain. // Next it will fill the previously freed slots at 0 and 1 assert_eq!(list.insert(55), 0); assert_eq!(list.insert(66), 1); // Once those have been filled, the chain returns to point to slot 4 assert_eq!(list.insert(77), 4); } #[test] fn replacing_empty_slot() { let mut list: SlotList<u32> = SlotList::new(); list.insert(0); assert_eq!(list.get_first_empty_slot(), Some(1)); assert_eq!(list.get_last_empty_slot(), Some(1)); list.remove(0); assert_eq!(list.get_first_empty_slot(), Some(1)); assert_eq!(list.get_last_empty_slot(), Some(0)); // Replacing the last element in the "empty chain" should fix up the chain // and its pointers. assert_eq!(list.replace(0, 5), None); assert_eq!(list.get_first_empty_slot(), Some(1)); assert_eq!(list.get_last_empty_slot(), Some(1)); if let Slot::Empty(next) = list.get_raw_slot(1).unwrap() { assert!(next.is_none()); } else { panic!("First slot was not empty"); } } #[test] fn replacing_existing_entries() { let mut list: SlotList<u32> = SlotList::new(); list.insert(1); list.insert(3); list.insert(5); list.remove(1); assert_eq!(list.replace(0, 10), Some(1)); assert_eq!(list.replace(1, 12), None); } #[test] fn iterator() { let mut list: SlotList<u32> = SlotList::new(); list.insert(1); list.insert(2); list.insert(1); list.insert(3); list.insert(1); list.remove(1); list.remove(3); let mut count = 0; for x in list.iter() { count += 1; assert_eq!(*x, 1); } assert_eq!(count, 3); } #[test] fn maintain_size() { let mut list: SlotList<u32> = SlotList::with_capacity(4); for _ in 0..100 { let index = list.insert(10); list.remove(index); } assert_eq!(list.capacity(), 4); } }
//! A module to verify individual number /// Size of individual number. const INDIVIDUAL_NUMBER_DIGITS: usize = 12; /// Verifies the individual number. pub fn verify(number: &str) -> Result<(), ::VerifyError> { let mut digits = number.chars() .filter_map(|x| { if x.is_numeric() { x.to_digit(10) } else { None } }) .collect::<Vec<u32>>(); if digits.len() != INDIVIDUAL_NUMBER_DIGITS { return Err(::VerifyError::InvalidNumberLength); } let check_digit = digits.pop().unwrap(); let mut pq = 0; for (i, p) in digits.iter().rev().enumerate() { let n = i + 1; let q = if n <= 6 { n + 1 } else { n - 5 } as u32; pq += p * q; } let remainder = pq % 11; let calc_digit = match remainder { 0 | 1 => 0, n => 11 - n, }; if calc_digit == check_digit { Ok(()) } else { Err(::VerifyError::InvalidCheckDigit) } }
mod mapmerge; use std::collections::HashMap; use mapmerge::MergeMap; fn main() { let mut map_merge = MergeMap::new(); for rounds in 0..10 { let mut some_map = HashMap::with_capacity(1_000_000); for i in 0..1_000_000 { some_map.insert(i, i*2); } map_merge.merge(some_map, rounds); } println!("Hello, world!"); }
use async_std::net::{TcpListener, TcpStream}; use futures_util::future::poll_fn; use futures_util::io::AsyncReadExt; use futures_util::io::AsyncWriteExt; use hreq_h1::buf_reader::BufIo; use hreq_h1::http11::poll_for_crlfcrlf; use hreq_h1::Error; mod common; #[async_std::test] async fn client_get_200_ok() -> Result<(), Error> { common::setup_logger(); let l = TcpListener::bind("127.0.0.1:0").await?; let c = TcpStream::connect(l.local_addr()?); let server = async move { let (s, _) = l.accept().await.unwrap(); let mut s_buf = BufIo::with_capacity(16_384, s); let req_s = poll_fn(|cx| { poll_for_crlfcrlf(cx, &mut s_buf, |buf, _| { String::from_utf8_lossy(buf).to_string() }) }) .await .unwrap(); assert_eq!(req_s, "GET / HTTP/1.1\r\n\r\n"); s_buf.write_all(b"HTTP/1.1 200 OK\r\n\r\nOK").await.unwrap(); }; async_std::task::spawn(server); let c = c.await?; let (mut send_req, c_conn) = hreq_h1::client::handshake(c); // drive client async_std::task::spawn(async move { c_conn.await.ok() }); let req = http::Request::get("/").body(())?; let (res_fut, _) = send_req.send_request(req, true)?; let res = res_fut.await?; let (_, mut recv_body) = res.into_parts(); let mut body = String::new(); recv_body.read_to_string(&mut body).await?; assert_eq!(body, "OK"); Ok(()) }
use crate::event::EventOnce; use log::debug; use std::cell::RefCell; use std::pin::Pin; use std::{ borrow::Cow, ffi::CStr, rc::{Rc, Weak}, }; use wlroots_sys::*; #[derive(Debug, PartialEq)] pub enum DeviceType { Keyboard(*mut wlr_keyboard), Pointer(*mut wlr_pointer), Unknown, } pub struct Device { device: *mut wlr_input_device, pub on_destroy: EventOnce<()>, event_manager: RefCell<Option<Pin<Box<DeviceEventManager>>>>, } impl Device { pub(crate) fn init(device: *mut wlr_input_device) -> Rc<Device> { let device = Rc::new(Device { device, on_destroy: EventOnce::default(), event_manager: RefCell::new(None), }); let mut event_manager = DeviceEventManager::new(Rc::downgrade(&device)); unsafe { event_manager.destroy(&mut (*device.raw_ptr()).events.destroy); } *device.event_manager.borrow_mut() = Some(event_manager); device } pub fn raw_ptr(&self) -> *mut wlr_input_device { self.device } pub fn device_type(&self) -> DeviceType { unsafe { let device = &*self.device; match device.type_ { type_ if type_ == wlr_input_device_type_WLR_INPUT_DEVICE_KEYBOARD => { DeviceType::Keyboard(device.__bindgen_anon_1.keyboard) } type_ if type_ == wlr_input_device_type_WLR_INPUT_DEVICE_POINTER => { DeviceType::Pointer(device.__bindgen_anon_1.pointer) } _ => DeviceType::Unknown, } } } pub fn name(&self) -> Cow<str> { unsafe { CStr::from_ptr((*self.device).name).to_string_lossy() } } pub fn output_name(&self) -> Option<Cow<str>> { unsafe { let output_name = (*self.device).output_name; if output_name.is_null() { None } else { Some(CStr::from_ptr(output_name).to_string_lossy()) } } } } impl PartialEq for Device { fn eq(&self, other: &Device) -> bool { self.device == other.device } } wayland_listener!( DeviceEventManager, Weak<Device>, [ destroy => destroy_func: |this: &mut DeviceEventManager, _data: *mut libc::c_void,| unsafe { if let Some(handler) = this.data.upgrade() { debug!("Device::destroy"); handler.on_destroy.fire(()) } }; ] );
#[doc = "Reader of register DDRPHYC_DSGCR"] pub type R = crate::R<u32, super::DDRPHYC_DSGCR>; #[doc = "Writer for register DDRPHYC_DSGCR"] pub type W = crate::W<u32, super::DDRPHYC_DSGCR>; #[doc = "Register DDRPHYC_DSGCR `reset()`'s with value 0xfa00_001f"] impl crate::ResetValue for super::DDRPHYC_DSGCR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0xfa00_001f } } #[doc = "Reader of field `PUREN`"] pub type PUREN_R = crate::R<bool, bool>; #[doc = "Write proxy for field `PUREN`"] pub struct PUREN_W<'a> { w: &'a mut W, } impl<'a> PUREN_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Reader of field `BDISEN`"] pub type BDISEN_R = crate::R<bool, bool>; #[doc = "Write proxy for field `BDISEN`"] pub struct BDISEN_W<'a> { w: &'a mut W, } impl<'a> BDISEN_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Reader of field `ZUEN`"] pub type ZUEN_R = crate::R<bool, bool>; #[doc = "Write proxy for field `ZUEN`"] pub struct ZUEN_W<'a> { w: &'a mut W, } impl<'a> ZUEN_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Reader of field `LPIOPD`"] pub type LPIOPD_R = crate::R<bool, bool>; #[doc = "Write proxy for field `LPIOPD`"] pub struct LPIOPD_W<'a> { w: &'a mut W, } impl<'a> LPIOPD_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Reader of field `LPDLLPD`"] pub type LPDLLPD_R = crate::R<bool, bool>; #[doc = "Write proxy for field `LPDLLPD`"] pub struct LPDLLPD_W<'a> { w: &'a mut W, } impl<'a> LPDLLPD_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "Reader of field `DQSGX`"] pub type DQSGX_R = crate::R<u8, u8>; #[doc = "Write proxy for field `DQSGX`"] pub struct DQSGX_W<'a> { w: &'a mut W, } impl<'a> DQSGX_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 5)) | (((value as u32) & 0x07) << 5); self.w } } #[doc = "Reader of field `DQSGE`"] pub type DQSGE_R = crate::R<u8, u8>; #[doc = "Write proxy for field `DQSGE`"] pub struct DQSGE_W<'a> { w: &'a mut W, } impl<'a> DQSGE_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 8)) | (((value as u32) & 0x07) << 8); self.w } } #[doc = "Reader of field `NOBUB`"] pub type NOBUB_R = crate::R<bool, bool>; #[doc = "Write proxy for field `NOBUB`"] pub struct NOBUB_W<'a> { w: &'a mut W, } impl<'a> NOBUB_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11); self.w } } #[doc = "Reader of field `FXDLAT`"] pub type FXDLAT_R = crate::R<bool, bool>; #[doc = "Write proxy for field `FXDLAT`"] pub struct FXDLAT_W<'a> { w: &'a mut W, } impl<'a> FXDLAT_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12); self.w } } #[doc = "Reader of field `CKEPDD`"] pub type CKEPDD_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CKEPDD`"] pub struct CKEPDD_W<'a> { w: &'a mut W, } impl<'a> CKEPDD_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16); self.w } } #[doc = "Reader of field `ODTPDD`"] pub type ODTPDD_R = crate::R<bool, bool>; #[doc = "Write proxy for field `ODTPDD`"] pub struct ODTPDD_W<'a> { w: &'a mut W, } impl<'a> ODTPDD_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 20)) | (((value as u32) & 0x01) << 20); self.w } } #[doc = "Reader of field `NL2PD`"] pub type NL2PD_R = crate::R<bool, bool>; #[doc = "Write proxy for field `NL2PD`"] pub struct NL2PD_W<'a> { w: &'a mut W, } impl<'a> NL2PD_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24); self.w } } #[doc = "Reader of field `NL2OE`"] pub type NL2OE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `NL2OE`"] pub struct NL2OE_W<'a> { w: &'a mut W, } impl<'a> NL2OE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 25)) | (((value as u32) & 0x01) << 25); self.w } } #[doc = "Reader of field `TPDPD`"] pub type TPDPD_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TPDPD`"] pub struct TPDPD_W<'a> { w: &'a mut W, } impl<'a> TPDPD_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 26)) | (((value as u32) & 0x01) << 26); self.w } } #[doc = "Reader of field `TPDOE`"] pub type TPDOE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TPDOE`"] pub struct TPDOE_W<'a> { w: &'a mut W, } impl<'a> TPDOE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 27)) | (((value as u32) & 0x01) << 27); self.w } } #[doc = "Reader of field `CKOE`"] pub type CKOE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CKOE`"] pub struct CKOE_W<'a> { w: &'a mut W, } impl<'a> CKOE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 28)) | (((value as u32) & 0x01) << 28); self.w } } #[doc = "Reader of field `ODTOE`"] pub type ODTOE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `ODTOE`"] pub struct ODTOE_W<'a> { w: &'a mut W, } impl<'a> ODTOE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u32) & 0x01) << 29); self.w } } #[doc = "Reader of field `RSTOE`"] pub type RSTOE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `RSTOE`"] pub struct RSTOE_W<'a> { w: &'a mut W, } impl<'a> RSTOE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30); self.w } } #[doc = "Reader of field `CKEOE`"] pub type CKEOE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CKEOE`"] pub struct CKEOE_W<'a> { w: &'a mut W, } impl<'a> CKEOE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31); self.w } } impl R { #[doc = "Bit 0 - PUREN"] #[inline(always)] pub fn puren(&self) -> PUREN_R { PUREN_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - BDISEN"] #[inline(always)] pub fn bdisen(&self) -> BDISEN_R { BDISEN_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - ZUEN"] #[inline(always)] pub fn zuen(&self) -> ZUEN_R { ZUEN_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 3 - LPIOPD"] #[inline(always)] pub fn lpiopd(&self) -> LPIOPD_R { LPIOPD_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 4 - LPDLLPD"] #[inline(always)] pub fn lpdllpd(&self) -> LPDLLPD_R { LPDLLPD_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bits 5:7 - DQSGX"] #[inline(always)] pub fn dqsgx(&self) -> DQSGX_R { DQSGX_R::new(((self.bits >> 5) & 0x07) as u8) } #[doc = "Bits 8:10 - DQSGE"] #[inline(always)] pub fn dqsge(&self) -> DQSGE_R { DQSGE_R::new(((self.bits >> 8) & 0x07) as u8) } #[doc = "Bit 11 - NOBUB"] #[inline(always)] pub fn nobub(&self) -> NOBUB_R { NOBUB_R::new(((self.bits >> 11) & 0x01) != 0) } #[doc = "Bit 12 - FXDLAT"] #[inline(always)] pub fn fxdlat(&self) -> FXDLAT_R { FXDLAT_R::new(((self.bits >> 12) & 0x01) != 0) } #[doc = "Bit 16 - CKEPDD"] #[inline(always)] pub fn ckepdd(&self) -> CKEPDD_R { CKEPDD_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bit 20 - ODTPDD"] #[inline(always)] pub fn odtpdd(&self) -> ODTPDD_R { ODTPDD_R::new(((self.bits >> 20) & 0x01) != 0) } #[doc = "Bit 24 - NL2PD"] #[inline(always)] pub fn nl2pd(&self) -> NL2PD_R { NL2PD_R::new(((self.bits >> 24) & 0x01) != 0) } #[doc = "Bit 25 - NL2OE"] #[inline(always)] pub fn nl2oe(&self) -> NL2OE_R { NL2OE_R::new(((self.bits >> 25) & 0x01) != 0) } #[doc = "Bit 26 - TPDPD"] #[inline(always)] pub fn tpdpd(&self) -> TPDPD_R { TPDPD_R::new(((self.bits >> 26) & 0x01) != 0) } #[doc = "Bit 27 - TPDOE"] #[inline(always)] pub fn tpdoe(&self) -> TPDOE_R { TPDOE_R::new(((self.bits >> 27) & 0x01) != 0) } #[doc = "Bit 28 - CKOE"] #[inline(always)] pub fn ckoe(&self) -> CKOE_R { CKOE_R::new(((self.bits >> 28) & 0x01) != 0) } #[doc = "Bit 29 - ODTOE"] #[inline(always)] pub fn odtoe(&self) -> ODTOE_R { ODTOE_R::new(((self.bits >> 29) & 0x01) != 0) } #[doc = "Bit 30 - RSTOE"] #[inline(always)] pub fn rstoe(&self) -> RSTOE_R { RSTOE_R::new(((self.bits >> 30) & 0x01) != 0) } #[doc = "Bit 31 - CKEOE"] #[inline(always)] pub fn ckeoe(&self) -> CKEOE_R { CKEOE_R::new(((self.bits >> 31) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - PUREN"] #[inline(always)] pub fn puren(&mut self) -> PUREN_W { PUREN_W { w: self } } #[doc = "Bit 1 - BDISEN"] #[inline(always)] pub fn bdisen(&mut self) -> BDISEN_W { BDISEN_W { w: self } } #[doc = "Bit 2 - ZUEN"] #[inline(always)] pub fn zuen(&mut self) -> ZUEN_W { ZUEN_W { w: self } } #[doc = "Bit 3 - LPIOPD"] #[inline(always)] pub fn lpiopd(&mut self) -> LPIOPD_W { LPIOPD_W { w: self } } #[doc = "Bit 4 - LPDLLPD"] #[inline(always)] pub fn lpdllpd(&mut self) -> LPDLLPD_W { LPDLLPD_W { w: self } } #[doc = "Bits 5:7 - DQSGX"] #[inline(always)] pub fn dqsgx(&mut self) -> DQSGX_W { DQSGX_W { w: self } } #[doc = "Bits 8:10 - DQSGE"] #[inline(always)] pub fn dqsge(&mut self) -> DQSGE_W { DQSGE_W { w: self } } #[doc = "Bit 11 - NOBUB"] #[inline(always)] pub fn nobub(&mut self) -> NOBUB_W { NOBUB_W { w: self } } #[doc = "Bit 12 - FXDLAT"] #[inline(always)] pub fn fxdlat(&mut self) -> FXDLAT_W { FXDLAT_W { w: self } } #[doc = "Bit 16 - CKEPDD"] #[inline(always)] pub fn ckepdd(&mut self) -> CKEPDD_W { CKEPDD_W { w: self } } #[doc = "Bit 20 - ODTPDD"] #[inline(always)] pub fn odtpdd(&mut self) -> ODTPDD_W { ODTPDD_W { w: self } } #[doc = "Bit 24 - NL2PD"] #[inline(always)] pub fn nl2pd(&mut self) -> NL2PD_W { NL2PD_W { w: self } } #[doc = "Bit 25 - NL2OE"] #[inline(always)] pub fn nl2oe(&mut self) -> NL2OE_W { NL2OE_W { w: self } } #[doc = "Bit 26 - TPDPD"] #[inline(always)] pub fn tpdpd(&mut self) -> TPDPD_W { TPDPD_W { w: self } } #[doc = "Bit 27 - TPDOE"] #[inline(always)] pub fn tpdoe(&mut self) -> TPDOE_W { TPDOE_W { w: self } } #[doc = "Bit 28 - CKOE"] #[inline(always)] pub fn ckoe(&mut self) -> CKOE_W { CKOE_W { w: self } } #[doc = "Bit 29 - ODTOE"] #[inline(always)] pub fn odtoe(&mut self) -> ODTOE_W { ODTOE_W { w: self } } #[doc = "Bit 30 - RSTOE"] #[inline(always)] pub fn rstoe(&mut self) -> RSTOE_W { RSTOE_W { w: self } } #[doc = "Bit 31 - CKEOE"] #[inline(always)] pub fn ckeoe(&mut self) -> CKEOE_W { CKEOE_W { w: self } } }
// https://www.codewars.com/kata/57c1ab3949324c321600013f use std::collections::HashMap; fn to_leet_speak(s: &str) -> String { let leet_map: HashMap<char, &str> = vec![ ('A', "@"), ('B', "8"), ('C', "("), ('D', "D"), ('E', "3"), ('F', "F"), ('G', "6"), ('H', "#"), ('I', "!"), ('J', "J"), ('K', "K"), ('L', "1"), ('M', "M"), ('N', "N"), ('O', "0"), ('P', "P"), ('Q', "Q"), ('R', "R"), ('S', "$"), ('T', "7"), ('U', "U"), ('V', "V"), ('W', "W"), ('X', "X"), ('Y', "Y"), ('Z', "2"), (' ', " ") ] .into_iter() .collect(); s.chars().map(|c| *leet_map.get(&c).unwrap()).collect() } #[cfg(test)] mod tests { use super::*; #[test] fn test_to_leet_speak() { assert_eq!(to_leet_speak("LEET"), "1337"); assert_eq!(to_leet_speak("CODEWARS"), "(0D3W@R$"); assert_eq!(to_leet_speak("LOREM IPSUM DOLOR SIT AMET"), "10R3M !P$UM D010R $!7 @M37"); } }
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![cfg_attr(feature="nightly",feature(fn_traits))] #![cfg_attr(feature="nightly",feature(unboxed_closures))] #![doc=" This crate supports nameable closures, without requiring any language changes. # Why do we need this The Rust language supports closures, however its concret types are anonymous and developers cannot expect same size, and so not able to put similar closures in containers that requires `Sized` constraint. However, there is an exception: if a closure does not refer to its captured variables, it can be coerce to a `fn` type, which is a pointer type and it is `Sized`. This library extends this idea by requesting an additional `State` field, which is just the tuple of the captured variables. So if two closure have the same signiture for function calls and have the same state type, they are considered the same type. # How to use There are 5 structures being defined, and they are correspond to different use cases. ## Closure This struct works for closures that only intended to refer to the state field, not modifying them nor owns them. Furthermore, it does not own its state, so dropping this struct will not drop its state. ## ClosureRef This structure works like the above, but it owns its state. So dropping it will drop its state. ## ClosureMut This struct works for closures that will mutate its state, but would not drop its state when called. So it can be called multiple times, but will have different effects on each call. Because it does not own its state, the state will not be dropped when the struct was dropped. Amount all 5 variants, this is the only struct that does not support `Copy` and `Clone` at all, because it is not possible to copy or clone a mutable reference. ## ClosureRefMut This struct works like the above, but it owns its state. Because it owns its state, so it can be `Copy` or `Clone` if its state is `Copy` or `Clone`, without problems. ## ClosureOnce This struct owns its state, and will drop its state when called. # The `closure!` macro To create closures, use the `closure!` macro. The format of the macro is: ```text closure!([ref]? [mut]? state_variable=expression => [closure definition]) ``` The closure definition is like the usual (including the `move` keyword), but type annotations are not supported. However, we are providing a type that is namable, so you can always specify the result type to constraint the variables involved, so this should not be a big due. If the macro sees a `ref` keyword in front of the closure, it will expect a `move` keyword before the closure body. If `ref` is specified, the state variable is a reference to its value. If `move` is not specified, the state expression must be typed as a reference and match the mutation specification of the state variable. The closure body can only access the state variable and the variables in the closure definition header. <table> <tr> <th>Macro Grammar</th> <th>Struct</th> </tr> <tr> <td><code>closure!(state=exp => |x,y| body(x,y,state))</code></td> <td><code>Closure</code></td> </tr> <tr> <td><code>closure!(state=exp => move |x,y| body(x,y,state))</code></td> <td><code>ClosureOnce</code></td> </tr> <tr> <td><code>closure!(mut state=exp => |x,y| body(x,y,state))</code></td> <td><code>ClosureMut</code></td> </tr> <tr> <td><code>closure!(mut state=exp => move |x,y| body(x,y,state))</code></td> <td><code>ClosureOnce</code> (with mutable <code>state</code>)</td> </tr> <tr> <td><code>closure!(ref state=exp => move |x,y| body(x,y,state))</code></td> <td><code>ClosureRef</code></td> </tr> <tr> <td><code>closure!(ref mut state=exp => move |x,y| body(x,y,state))</code></td> <td><code>ClosureRefMut</code></td> </tr> </table> Examples: ```rust # #[macro_use] extern crate namable_closures; # use namable_closures::ClosureRef; # use namable_closures::StableFn; # struct Point{x:i32,y:i32} # impl Point{ fn new(x:i32,y:i32) -> Point {Point{x:x,y:y}} } // state refered as reference in body, but moved to the closure let add_ten:ClosureRef<i32,(i32,),i32> = closure!(ref state=10 => move |i| i+*state); assert_eq!(add_ten.stable_call((1,)),11); let offset:ClosureRef<Point,(i32,i32),Point> = closure!(ref state=Point::new(10,20) => move |a,b| Point::new(state.x+a,state.y+b)); let p = offset.stable_call((1,2)); assert_eq!(p.x,11); assert_eq!(p.y,22); # use namable_closures::Closure; // state refered as reference in body, and not moving let state = 10; let add_ten:Closure<i32,(i32,),i32> = closure!(state=&state => |i| i+10); assert_eq!(add_ten.stable_call((1,)),11); let state = Point::new(10,20); let offset:Closure<Point,(i32,i32),Point> = closure!(state=&state => |a,b| Point::new(state.x+a,state.y+b)); let p = offset.stable_call((1i32,2i32)); assert_eq!(p.x,11); assert_eq!(p.y,22); # use namable_closures::ClosureRefMut; # use namable_closures::StableFnMut; // state refered as mutable reference in body, but moved to closure let mut accumulate:ClosureRefMut<i32,(i32,),i32> = closure!(ref mut state=0 => move |c| {*state+=c;*state}); assert_eq!(accumulate.stable_call_mut((1,)),1); assert_eq!(accumulate.stable_call_mut((2,)),3); # use namable_closures::ClosureMut; // state refered as mutable reference in body, but not moving let mut state = 0; { let mut match_cnt:ClosureMut<i32,(i32,i32),()> = closure!(mut state=&mut state => |a,b| if a==b { *state+=1 }); for i in 0..10 { match_cnt.stable_call_mut((i,i*3%10)); } } assert_eq!(state,2); # use namable_closures::ClosureOnce; # use std::io; # use namable_closures::StableFnOnce; # struct MyStream(); # impl MyStream{ # fn new() -> MyStream { MyStream() } # fn write_all(&mut self, _:&[u8]) -> Result<usize,io::Error> { Ok(0) } # fn read_exact_ex(&mut self, _:&mut [u8], _:usize) -> Result<(),io::Error> { Ok(()) } # } # struct RoleSet(); # impl RoleSet { fn from_config() -> RoleSet { RoleSet() }} # struct Passwd(); # impl Passwd { fn get_from_cache() -> Passwd { Passwd() }} # fn authenticate(_:String,_:Passwd) -> Result<(),io::Error> { Ok(()) } # fn check_user(_:RoleSet,_:String,_:Passwd) -> Result<(),io::Error> { Ok(()) } // state moved to body and so to the closure let sign_on:ClosureOnce<Passwd,(String,),Result<(),io::Error>> = closure!(passwd=Passwd::get_from_cache() => move |user| authenticate(user,passwd)); sign_on.stable_call_once((\"123\".to_string(),)); let auth:ClosureOnce<RoleSet,(String,Passwd),Result<(),io::Error>> = closure!(role_set=RoleSet::from_config() => move |user,passwd| check_user(role_set,user,passwd)); let send_data:ClosureOnce<MyStream,(&[u8],),Result<usize,io::Error>> = closure!(mut stream=MyStream::new() => move |data| stream.write_all(data)); let read_data:ClosureOnce<MyStream,(&mut [u8],usize),Result<(),io::Error>> = closure!(mut stream=MyStream::new() => move |buf,len| stream.read_exact_ex(buf, len)); ``` "] #![cfg_attr(feature="nightly", doc=" The same examples that uses the unstable features: ```rust # #[macro_use] extern crate namable_closures; # use namable_closures::ClosureRef; # struct Point{x:i32,y:i32} # impl Point{ fn new(x:i32,y:i32) -> Point {Point{x:x,y:y}} } // state refered as reference in body, but moved to the closure let add_ten:ClosureRef<i32,(i32,),i32> = closure!(ref state=10 => move |i| i+*state); assert_eq!(add_ten(1),11); let offset:ClosureRef<Point,(i32,i32),Point> = closure!(ref state=Point::new(10,20) => move |a,b| Point::new(state.x+a,state.y+b)); let p = offset(1,2); assert_eq!(p.x,11); assert_eq!(p.y,22); # use namable_closures::Closure; // state refered as reference in body, and not moving let state = 10; let add_ten:Closure<i32,(i32,),i32> = closure!(state=&state => |i| i+10); assert_eq!(add_ten(1),11); let state = Point::new(10,20); let offset:Closure<Point,(i32,i32),Point> = closure!(state=&state => |a,b| Point::new(state.x+a,state.y+b)); let p = offset(1i32,2i32); assert_eq!(p.x,11); assert_eq!(p.y,22); # use namable_closures::ClosureRefMut; // state refered as mutable reference in body, but moved to closure let mut accumulate:ClosureRefMut<i32,(i32,),i32> = closure!(ref mut state=0 => move |c| {*state+=c;*state}); assert_eq!(accumulate(1),1); assert_eq!(accumulate(2),3); # use namable_closures::ClosureMut; // state refered as mutable reference in body, but not moving let mut state = 0; { let mut match_cnt:ClosureMut<i32,(i32,i32),()> = closure!(mut state=&mut state => |a,b| if a==b { *state+=1 }); for i in 0..10 { match_cnt(i,i*3%10); } } assert_eq!(state,2); # use namable_closures::ClosureOnce; # use std::io; # struct MyStream(); # impl MyStream{ # fn new() -> MyStream { MyStream() } # fn write_all(&mut self, _:&[u8]) -> Result<usize,io::Error> { Ok(0) } # fn read_exact_ex(&mut self, _:&mut [u8], _:usize) -> Result<(),io::Error> { Ok(()) } # } # struct RoleSet(); # impl RoleSet { fn from_config() -> RoleSet { RoleSet() }} # struct Passwd(); # impl Passwd { fn get_from_cache() -> Passwd { Passwd() }} # fn authenticate(_:String,_:Passwd) -> Result<(),io::Error> { Ok(()) } # fn check_user(_:RoleSet,_:String,_:Passwd) -> Result<(),io::Error> { Ok(()) } // state moved to body and so to the closure let sign_on:ClosureOnce<Passwd,(String,),Result<(),io::Error>> = closure!(passwd=Passwd::get_from_cache() => move |user| authenticate(user,passwd)); let auth:ClosureOnce<RoleSet,(String,Passwd),Result<(),io::Error>> = closure!(role_set=RoleSet::from_config() => move |user,passwd| check_user(role_set,user,passwd)); let send_data:ClosureOnce<MyStream,(&[u8],),Result<usize,io::Error>> = closure!(mut stream=MyStream::new() => move |data| stream.write_all(data)); let read_data:ClosureOnce<MyStream,(&mut [u8],usize),Result<(),io::Error>> = closure!(mut stream=MyStream::new() => move |buf,len| stream.read_exact_ex(buf, len)); ``` ")] #[doc=" The macro to create closures. Examples: ```rust # #[macro_use] extern crate namable_closures; # use namable_closures::ClosureRef; # use namable_closures::StableFn; # struct Point{x:i32,y:i32} # impl Point{ fn new(x:i32,y:i32) -> Point {Point{x:x,y:y}} } // state refered as reference in body, but moved to the closure let add_ten:ClosureRef<i32,(i32,),i32> = closure!(ref state=10 => move |i| i+*state); assert_eq!(add_ten.stable_call((1,)),11); let offset:ClosureRef<Point,(i32,i32),Point> = closure!(ref state=Point::new(10,20) => move |a,b| Point::new(state.x+a,state.y+b)); let p = offset.stable_call_once((1,2)); assert_eq!(p.x,11); assert_eq!(p.y,22); # use namable_closures::Closure; // state refered as reference in body, and not moving let state = 10; let add_ten:Closure<i32,(i32,),i32> = closure!(state=&state => |i| i+10); assert_eq!(add_ten.stable_call((1,)),11); let state = Point::new(10,20); let offset:Closure<Point,(i32,i32),Point> = closure!(state=&state => |a,b| Point::new(state.x+a,state.y+b)); let p = offset.stable_call_once((1i32,2i32)); assert_eq!(p.x,11); assert_eq!(p.y,22); # use namable_closures::ClosureRefMut; # use namable_closures::StableFnOnce;; // state refered as mutable reference in body, but moved to closure let mut accumulate:ClosureRefMut<i32,(i32,),i32> = closure!(ref mut state=0 => move |c| {*state+=c;*state}); assert_eq!(accumulate.stable_call_mut((1,)),1); assert_eq!(accumulate.stable_call_once((2,)),3); # use namable_closures::ClosureMut; # use namable_closures::StableFnMut; // state refered as mutable reference in body, but not moving let mut state = 0; { let mut match_cnt:ClosureMut<i32,(i32,i32),()> = closure!(mut state=&mut state => |a,b| if a==b { *state+=1 }); for i in 0..10 { match_cnt.stable_call_mut((i,i*3%10)); } } assert_eq!(state,2); # use namable_closures::ClosureOnce; # use std::io; # struct MyStream(); # impl MyStream{ # fn new() -> MyStream { MyStream() } # fn write_all(&mut self, _:&[u8]) -> Result<usize,io::Error> { Ok(0) } # fn read_exact_ex(&mut self, _:&mut [u8], _:usize) -> Result<(),io::Error> { Ok(()) } # } # struct RoleSet(); # impl RoleSet { fn from_config() -> RoleSet { RoleSet() }} # struct Passwd(); # impl Passwd { fn get_from_cache() -> Passwd { Passwd() }} # fn authenticate(_:String,_:Passwd) -> Result<(),io::Error> { Ok(()) } # fn check_user(_:RoleSet,_:String,_:Passwd) -> Result<(),io::Error> { Ok(()) } // state moved to body and so to the closure let sign_on:ClosureOnce<Passwd,(String,),Result<(),io::Error>> = closure!(passwd=Passwd::get_from_cache() => move |user| authenticate(user,passwd)); let auth:ClosureOnce<RoleSet,(String,Passwd),Result<(),io::Error>> = closure!(role_set=RoleSet::from_config() => move |user,passwd| check_user(role_set,user,passwd)); let send_data:ClosureOnce<MyStream,(&[u8],),Result<usize,io::Error>> = closure!(mut stream=MyStream::new() => move |data| stream.write_all(data)); let read_data:ClosureOnce<MyStream,(&mut [u8],usize),Result<(),io::Error>> = closure!(mut stream=MyStream::new() => move |buf,len| stream.read_exact_ex(buf, len)); ``` "] #[cfg_attr(feature="nightly", doc=" The same examples that uses unstable features: ```rust # #[macro_use] extern crate namable_closures; # use namable_closures::ClosureRef; # struct Point{x:i32,y:i32} # impl Point{ fn new(x:i32,y:i32) -> Point {Point{x:x,y:y}} } // state refered as reference in body, but moved to the closure let add_ten:ClosureRef<i32,(i32,),i32> = closure!(ref state=10 => move |i| i+*state); assert_eq!(add_ten(1),11); let offset:ClosureRef<Point,(i32,i32),Point> = closure!(ref state=Point::new(10,20) => move |a,b| Point::new(state.x+a,state.y+b)); let p = offset(1,2); assert_eq!(p.x,11); assert_eq!(p.y,22); # use namable_closures::Closure; // state refered as reference in body, and not moving let state = 10; let add_ten:Closure<i32,(i32,),i32> = closure!(state=&state => |i| i+10); assert_eq!(add_ten(1),11); let state = Point::new(10,20); let offset:Closure<Point,(i32,i32),Point> = closure!(state=&state => |a,b| Point::new(state.x+a,state.y+b)); let p = offset(1i32,2i32); assert_eq!(p.x,11); assert_eq!(p.y,22); # use namable_closures::ClosureRefMut; // state refered as mutable reference in body, but moved to closure let mut accumulate:ClosureRefMut<i32,(i32,),i32> = closure!(ref mut state=0 => move |c| {*state+=c;*state}); assert_eq!(accumulate(1),1); assert_eq!(accumulate(2),3); # use namable_closures::ClosureMut; // state refered as mutable reference in body, but not moving let mut state = 0; { let mut match_cnt:ClosureMut<i32,(i32,i32),()> = closure!(mut state=&mut state => |a,b| if a==b { *state+=1 }); for i in 0..10 { match_cnt(i,i*3%10); } } assert_eq!(state,2); # use namable_closures::ClosureOnce; # use std::io; # struct MyStream(); # impl MyStream{ # fn new() -> MyStream { MyStream() } # fn write_all(&mut self, _:&[u8]) -> Result<usize,io::Error> { Ok(0) } # fn read_exact_ex(&mut self, _:&mut [u8], _:usize) -> Result<(),io::Error> { Ok(()) } # } # struct RoleSet(); # impl RoleSet { fn from_config() -> RoleSet { RoleSet() }} # struct Passwd(); # impl Passwd { fn get_from_cache() -> Passwd { Passwd() }} # fn authenticate(_:String,_:Passwd) -> Result<(),io::Error> { Ok(()) } # fn check_user(_:RoleSet,_:String,_:Passwd) -> Result<(),io::Error> { Ok(()) } // state moved to body and so to the closure let sign_on:ClosureOnce<Passwd,(String,),Result<(),io::Error>> = closure!(passwd=Passwd::get_from_cache() => move |user| authenticate(user,passwd)); sign_on(\"123\".to_string()); let auth:ClosureOnce<RoleSet,(String,Passwd),Result<(),io::Error>> = closure!(role_set=RoleSet::from_config() => move |user,passwd| check_user(role_set,user,passwd)); let send_data:ClosureOnce<MyStream,(&[u8],),Result<usize,io::Error>> = closure!(mut stream=MyStream::new() => move |data| stream.write_all(data)); send_data(&[1u8]); let read_data:ClosureOnce<MyStream,(&mut [u8],usize),Result<(),io::Error>> = closure!(mut stream=MyStream::new() => move |buf,len| stream.read_exact_ex(buf, len)); ``` ")] #[macro_export] macro_rules! closure { ($state:ident=$state_val:expr => move || $body:expr) => { ClosureOnce::new(|$state,()| $body, $state_val) }; ($state:ident=$state_val:expr => move |$arg:pat| $body:expr) => { ClosureOnce::new(|$state,($arg,)| $body, $state_val) }; ($state:ident=$state_val:expr => move |$arg1:pat,$($arg2:pat),+| $body:expr) => { ClosureOnce::new(|$state,($arg1,$($arg2),*)| $body, $state_val) }; (mut $state:ident=$state_val:expr => move || $body:expr) => { ClosureOnce::new(|mut $state,()| $body, $state_val) }; (mut $state:ident=$state_val:expr => move |$arg:pat| $body:expr) => { ClosureOnce::new(|mut $state,($arg,)| $body, $state_val) }; (mut $state:ident=$state_val:expr => move |$arg1:pat,$($arg2:pat),+| $body:expr) => { ClosureOnce::new(|mut $state,($arg1,$($arg2),*)| $body, $state_val) }; (mut $state:ident=$state_val:expr => || $body:expr) => { ClosureMut::new(|$state,()| $body, $state_val) }; (mut $state:ident=$state_val:expr => |$arg:pat| $body:expr) => { ClosureMut::new(|$state,($arg,)| $body, $state_val) }; (mut $state:ident=$state_val:expr => |$arg1:pat,$($arg2:pat),+| $body:expr) => { ClosureMut::new(|$state,($arg1,$($arg2),*)| $body, $state_val) }; (ref mut $state:ident=$state_val:expr => move || $body:expr) => { ClosureRefMut::new(|$state,()| $body, $state_val) }; (ref mut $state:ident=$state_val:expr => move |$arg:pat| $body:expr) => { ClosureRefMut::new(|$state,($arg,)| $body, $state_val) }; (ref mut $state:ident=$state_val:expr => move |$arg1:pat,$($arg2:pat),+| $body:expr) => { ClosureRefMut::new(|$state,($arg1,$($arg2),*)| $body, $state_val) }; ($state:ident=$state_val:expr => || $body:expr) => { Closure::new(|$state,()| $body, $state_val) }; ($state:ident=$state_val:expr => |$arg:pat| $body:expr) => { Closure::new(|$state,($arg,)| $body, $state_val) }; ($state:ident=$state_val:expr => |$arg1:pat,$($arg2:pat),+| $body:expr) => { Closure::new(|$state,($arg1,$($arg2),*)| $body, $state_val) }; (ref $state:ident=$state_val:expr => move || $body:expr) => { ClosureRef::new(|$state,()| $body, $state_val) }; (ref $state:ident=$state_val:expr => move |$arg:pat| $body:expr) => { ClosureRef::new(|$state,($arg,)| $body, $state_val) }; (ref $state:ident=$state_val:expr => move |$arg1:pat,$($arg2:pat),+| $body:expr) => { ClosureRef::new(|$state,($arg1,$($arg2),*)| $body, $state_val) }; (ref $state:ident=$state_val:expr => |$($arg:pat),*| $body:expr) => { compile_error!("Use of ref keyword require move keyword for the closure body") }; (ref mut $state:ident=$state_val:expr => |$($arg:pat),*| $body:expr) => { compile_error!("Use of ref keyword require move keyword for the closure body") }; } #[macro_export] macro_rules! closure_rec { ($me:ident.state=$state_val:expr => || $body:expr) => { ClosureOnceRec::new(|$me,()| $body, $state_val) }; ($me:ident.state=$state_val:expr => |$arg:pat| $body:expr) => { ClosureOnceRec::new(|$me,($arg,)| $body, $state_val) }; ($me:ident.state=$state_val:expr => |$arg1:pat,$($arg2:pat),+| $body:expr) => { ClosureOnceRec::new(|$me,($arg1,$($arg2),*)| $body, $state_val) }; (mut $me:ident.state=$state_val:expr => || $body:expr) => { ClosureOnceRec::new(|mut $me,()| $body, $state_val) }; (mut $me:ident.state=$state_val:expr => |$arg:pat| $body:expr) => { ClosureOnceRec::new(|mut $me,($arg,)| $body, $state_val) }; (mut $me:ident.state=$state_val:expr => |$arg1:pat,$($arg2:pat),+| $body:expr) => { ClosureOnceRec::new(|mut $me,($arg1,$($arg2),*)| $body, $state_val) }; (mut $me:ident.state=$state_val:expr => mut || $body:expr) => { ClosureMutRec::new(|$me,()| $body, $state_val) }; (mut $me:ident.state=$state_val:expr => mut |$arg:pat| $body:expr) => { ClosureMutRec::new(|$me,($arg,)| $body, $state_val) }; (mut $me:ident.state=$state_val:expr => mut |$arg1:pat,$($arg2:pat),+| $body:expr) => { ClosureMutRec::new(|$me,($arg1,$($arg2),*)| $body, $state_val) }; ($me:ident.state=$state_val:expr => mut || $body:expr) => { ClosureRecMut::new(|$me,()| $body, $state_val) }; ($me:ident.state=$state_val:expr => mut |$arg:pat| $body:expr) => { ClosureRecMut::new(|$me,($arg,)| $body, $state_val) }; ($me:ident.state=$state_val:expr => mut |$arg1:pat,$($arg2:pat),+| $body:expr) => { ClosureRecMut::new(|$me,($arg1,$($arg2),*)| $body, $state_val) }; ($me:ident.state=$state_val:expr => ref || $body:expr) => { ClosureRec::new(|$me,()| $body, $state_val) }; ($me:ident.state=$state_val:expr => ref |$arg:pat| $body:expr) => { ClosureRec::new(|$me,($arg,)| $body, $state_val) }; ($me:ident.state=$state_val:expr => ref |$arg1:pat,$($arg2:pat),+| $body:expr) => { ClosureRec::new(|$me,($arg1,$($arg2),*)| $body, $state_val) }; } #[macro_export] macro_rules! call { (ref $c:ident ()) => { $c.ident.stable_call(()) }; (ref $c:ident ($arg:expr)) => { $c.stable_call(($arg,)) }; (ref $c:ident ($arg1:expr,$($arg2:expr),+)) => { $c.stable_call(($arg1,$($arg2),*)) }; (mut $c:ident ()) => { $c.stable_call_mut(()) }; (mut $c:ident ($arg:expr)) => { $c.stable_call_mut(($arg,)) }; (mut $c:ident ($arg1:expr,$($arg2:expr),+)) => { $c.stable_call_mut(($arg1,$($arg2),*)) }; ($c:ident ()) => { $c.stable_call_once(()) }; ($c:ident ($arg:expr)) => { $c.stable_call_once(($arg,)) }; ($c:ident ($arg1:expr,$($arg2:expr),+)) => { $c.stable_call_once(($arg1,$($arg2),*)) }; } #[macro_export] macro_rules! regulate { (|| ref $c:ident) => { || $c.stable_call(()) }; (|$arg:ident| ref $c:ident) => { |$arg| $c.stable_call(($arg,)) }; (|$arg1:ident,$($arg2:ident),+| ref $c:ident) => { |$arg1,$($arg2),*| $c.stable_call(($arg1,$($arg2),*)) }; (|| mut $c:ident) => { || $c.stable_call_mut(()) }; (|$arg:ident| mut $c:ident) => { |$arg| $c.stable_call_mut(($arg,)) }; (|$arg1:ident,$($arg2:ident),+| mut $c:ident) => { |$arg1,$($arg2),*| $c:stable_call_mut(($arg1,$($arg2),*)) }; (|| $c:ident) => { || $c.stable_call_once(()) }; (|$arg:ident| $c:ident) => { |$arg| $c.stable_call_once(($arg,)) }; (|$arg1:ident,$($arg2:ident),+| $c:ident) => { |$arg1,$($arg2),*| $c.stable_call_once(($arg1,$($arg2),*)) }; } pub mod closures; pub mod closure_rec; pub mod stable_fn; pub use closures::{Closure,ClosureMut,ClosureOnce,ClosureRef,ClosureRefMut}; pub use stable_fn::{StableFn,StableFnMut,StableFnOnce}; pub use closure_rec::{ClosureOnceRec,ClosureRecMut,ClosureMutRec,ClosureRec};
use tokio::process::Command; use anyhow::{Result, Context}; use async_trait::async_trait; use crate::{ services::model::{Nameable, Ensurable, is_binary_present}, helpers::ExitStatusIntoUnit }; static NAME: &str = "kfp"; #[derive(Default)] pub struct Kfp {} impl Nameable for Kfp { fn name(&self) -> &'static str { NAME } } #[async_trait] impl Ensurable for Kfp { async fn is_present(&self) -> Result<bool> { is_binary_present(self).await } async fn make_present(&self) -> Result<()> { Command::new("pip3") .arg("install") .arg("urllib3==1.24.2") .arg("kfp") .arg("kfp-server-api") .arg("--upgrade") .arg("--user") .status().await .status_to_unit() .context("Unable to install kfp cli.")?; Command::new("sh") .arg("-c") .arg("cp $HOME/.local/bin/kfp /usr/local/bin/kfp") .status().await .status_to_unit() .context("Unable to copy the kfp binary.")?; Command::new("sh") .arg("-c") .arg("cp $HOME/.local/bin/dsl-compile /usr/local/bin/dsl-compile") .status().await .status_to_unit() .context("Unable to copy the dsl-compile binary.")?; Command::new("which") .arg("dsl-compile") .status().await .status_to_unit() .context("Unable to verify dsl-compile install.")?; Command::new("which") .arg("kfp") .status().await .status_to_unit() .context("Unable to verify kfp install.")?; Ok(()) } }
// Create the Error, ErrorKind, ResultExt, and Result types error_chain!{ foreign_links { IOError(::std::io::Error); PostgresError(::postgres::Error); Utf8Error(::std::string::FromUtf8Error); } }
#![allow(improper_ctypes)] #![no_std] #![no_main] #![feature(lang_items)] #![feature(int_uint)] // update fail_bounds_check #![feature(no_std)] #![crate_name="blinky"] //extern crate libc; use zero::std_types::*; use libarm::stm32f4xx::*; mod zero { pub mod std_types; pub mod zero; } #[macro_use] mod libarm { #[macro_use] pub mod stm32f4xx; #[macro_use] pub mod stm32f4xx_gpio; #[macro_use] pub mod stm32f4xx_rcc; } const LED_GREEN :uint = 12; const LED_ORANGE :uint = 13; const LED_RED :uint = 14; const LED_BLUE :uint = 15; static LED :uint = LED_RED; #[no_mangle] pub extern "C" fn TIM2_IRQHandler() { let TIM2 = TIM2(); let GPIOD = GPIOD(); let toggle_led = 1 << LED; // flash on update event if TIM2.SR & TIM_SR_UIF!() > 0 { GPIOD.ODR ^= toggle_led; } TIM2.SR = 0x0; // reset the status register } #[no_mangle] pub extern "C" fn main() { let pin = LED; let RCC = RCC(); let GPIOD = GPIOD(); let TIM2 = TIM2(); let NVIC = NVIC(); let mode = GPIO_Mode_OUT!() << (pin * 2); let speed = GPIO_Speed_100MHz!() << (pin * 2); let otype = GPIO_OType_PP!() << pin; let pullup = GPIO_PuPd_NOPULL!() << (pin * 2); let irq_en = 1 << (TIM2_IRQn!() as uint); RCC.AHB1ENR |= RCC_AHB1ENR_GPIODEN!(); // enable the clock to GPIOD RCC.APB1ENR |= RCC_APB1ENR_TIM2EN!(); // enable TIM2 clock // // Initialise the GPIO port. // GPIOD.MODER |= mode; GPIOD.OSPEEDR |= speed; GPIOD.OTYPER |= otype; GPIOD.PUPDR |= pullup; NVIC.ISER[0] |= irq_en; // enable the TIM2 IRQ TIM2.PSC = 0xFFFF; // max prescaler TIM2.DIER |= TIM_DIER_UIE!(); // enable update interrupt TIM2.ARR = 0xFF; // count to 255 (autoreload value 255) TIM2.CR1 |= TIM_CR1_ARPE!() | TIM_CR1_CEN!(); // autoreload on, counter enabled TIM2.EGR = 1; // trigger update event to reload timer registers loop {} }
//! The commands from the specification that are listed as administrative. use super::*; /// Which version of the protocol is spoken? /// /// # Comments /// > For this specification 2. pub fn protocol_version() -> Command { command!("protocol_version" => (none) -> (int)) } /// What is the name of the engine? /// /// # Comments /// > E.g. “GNU Go”, “GoLois”, “Many Faces of Go”. /// > The name does not include any version information, /// > which is provided by the version command. pub fn name() -> Command { command!("name" => (none) -> (string*)) } /// What is the version of the engine? /// /// # Comments /// > E.g. “3.1.33”, “10.5”. /// > Engines without a sense of version number should return the empty string. pub fn version() -> Command { command!("version" => (none) -> (string*)) } /// Does the engine know this command? /// /// # Comments /// > The protocol makes no distinction /// > between unknown commands and known but unimplemented ones. /// > Do not declare a command as known if it is known not to work. pub fn known_command() -> Command { command!("known_command" => (string) -> (boolean)) } /// Which commands does the engine know? /// /// # Comments /// > Include all known commands, /// > including required ones and private extensions. pub fn list_commands() -> Command { command!("list_commands" => (none) -> (string&)) } /// End the communication. /// /// # Effects /// > The session is terminated and the connection is closed.l /// /// # Comments /// > The full response of this command /// > must be sent before the engine closes the connection. /// > The controller must receive the response /// > before the connection is closed on its side. pub fn quit() -> Command { command!("quit" => (none) -> (none)) }
use std::collections::HashMap; use std::io::prelude::*; use std::fs::File; extern crate csv; extern crate rustc_serialize; #[derive(RustcDecodable)] struct Record { mail_addr: String, user_name: String, } struct Database {} impl Database { fn get_properties(db_name: String) -> HashMap<String, String> { let mut prop = HashMap::new(); let file_name = format!("{}.txt", db_name); let mut rdr = match csv::Reader::from_file(file_name) { Ok(r) => r.has_headers(false), Err(e) => panic!(e), }; for record in rdr.decode() { let record: Record = record.unwrap(); prop.insert(record.mail_addr, record.user_name); } prop } } struct HtmlWriter { writer: File, } impl HtmlWriter { fn new(writer: File) -> HtmlWriter { HtmlWriter { writer: writer, } } #[allow(unused_must_use)] fn title(&mut self, title: String) { write!(self.writer, "<html>"); write!(self.writer, "<head>"); write!(self.writer, "<title>{}</title>", title); write!(self.writer, "</head>"); write!(self.writer, "<body>\n"); write!(self.writer, "<h1>{}</h1>\n", title); } #[allow(unused_must_use)] fn paragraph(&mut self, msg: String) { write!(self.writer, "<p>{}</p>\n", msg); } fn link(&mut self, href: String, caption: String) { self.paragraph(format!("<a href=\"{}\">{}</a>", href, caption)); } fn mail_to(&mut self, mail_addr: String, user_name: String) { self.link(format!("mailto:{}", mail_addr), user_name); } #[allow(unused_must_use)] fn close(&mut self) { write!(self.writer, "</body>"); write!(self.writer, "<html>\n"); } } struct PageMaker {} impl PageMaker { fn make_welcome_page(mail_addr: String, file_name: String) { let mail_prop = Database::get_properties("maildata".to_string()); let user_name = match mail_prop.get(&mail_addr) { Some(n) => n, None => panic!("{} is not found", mail_addr), }; let file = File::create(file_name.clone()).expect("Unable to create file"); let mut writer = HtmlWriter::new(file); writer.title(format!("Welcome to {}'s page!", user_name)); writer.paragraph(format!("{}のページへようこそ。", user_name)); writer.paragraph("メールまっていますね。".to_string()); writer.mail_to(mail_addr.clone(), user_name.to_string()); writer.close(); println!("{} is created for {} ({})", file_name, mail_addr, user_name); } } fn main() { PageMaker::make_welcome_page("hyuki@hyuki.com".to_string(), "welcome.html".to_string()); }
#[doc = "Reader of register GPIO_OE"] pub type R = crate::R<u32, super::GPIO_OE>; #[doc = "Writer for register GPIO_OE"] pub type W = crate::W<u32, super::GPIO_OE>; #[doc = "Register GPIO_OE `reset()`'s with value 0"] impl crate::ResetValue for super::GPIO_OE { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `GPIO_OE`"] pub type GPIO_OE_R = crate::R<u32, u32>; #[doc = "Write proxy for field `GPIO_OE`"] pub struct GPIO_OE_W<'a> { w: &'a mut W, } impl<'a> GPIO_OE_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u32) -> &'a mut W { self.w.bits = (self.w.bits & !0x3fff_ffff) | ((value as u32) & 0x3fff_ffff); self.w } } impl R { #[doc = "Bits 0:29 - Set output enable (1/0 -> output/input) for GPIO0...29.\\n Reading back gives the last value written.\\n If core 0 and core 1 both write to GPIO_OE simultaneously (or to a SET/CLR/XOR alias),\\n the result is as though the write from core 0 took place first,\\n and the write from core 1 was then applied to that intermediate result."] #[inline(always)] pub fn gpio_oe(&self) -> GPIO_OE_R { GPIO_OE_R::new((self.bits & 0x3fff_ffff) as u32) } } impl W { #[doc = "Bits 0:29 - Set output enable (1/0 -> output/input) for GPIO0...29.\\n Reading back gives the last value written.\\n If core 0 and core 1 both write to GPIO_OE simultaneously (or to a SET/CLR/XOR alias),\\n the result is as though the write from core 0 took place first,\\n and the write from core 1 was then applied to that intermediate result."] #[inline(always)] pub fn gpio_oe(&mut self) -> GPIO_OE_W { GPIO_OE_W { w: self } } }
// see also 'tests/ok/07_vis2.rs' #[macro_use] extern crate parameterized_macro; pub mod a { #[parameterized(v = { Some(- 1), None })] pub(in crate::b) fn my_test(v: Option<i32>) {} } mod b { #[cfg(test)] fn call() { a::my_test::case_0(); // this is ok } } fn main() { if cfg!(test) { a::my_test::case_0(); // this isn't ok } }
use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use cosmwasm_std::{CanonicalAddr, Decimal, ReadonlyStorage, StdResult, Storage, Uint128}; use cosmwasm_storage::{ bucket, bucket_read, singleton, singleton_read, Bucket, ReadonlyBucket, Singleton, }; static KEY_CONFIG: &[u8] = b"config"; #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct Config { pub owner: CanonicalAddr, pub terraswap_factory: CanonicalAddr, pub spectrum_token: CanonicalAddr, pub spectrum_gov: CanonicalAddr, pub mirror_token: CanonicalAddr, pub mirror_staking: CanonicalAddr, pub mirror_gov: CanonicalAddr, pub controller: CanonicalAddr, pub platform: CanonicalAddr, pub base_denom: String, pub community_fee: Decimal, pub platform_fee: Decimal, pub controller_fee: Decimal, pub deposit_fee: Decimal, pub lock_start: u64, pub lock_end: u64, } impl Config { pub fn calc_locked_reward(&self, total_amount: Uint128, height: u64) -> Uint128 { if self.lock_end <= height { Uint128::zero() } else if self.lock_start >= height { total_amount } else { total_amount.multiply_ratio(self.lock_end - height, self.lock_end - self.lock_start) } } } pub fn store_config<S: Storage>(storage: &mut S, config: &Config) -> StdResult<()> { singleton(storage, KEY_CONFIG).save(config) } pub fn read_config<S: Storage>(storage: &S) -> StdResult<Config> { singleton_read(storage, KEY_CONFIG).load() } static KEY_STATE: &[u8] = b"state"; #[derive(Serialize, Deserialize, Clone, PartialEq, JsonSchema)] pub struct State { // addr for contract, this is for query pub contract_addr: CanonicalAddr, // this is to reconcile with gov pub previous_spec_share: Uint128, // amount of SPEC reward per weight pub spec_share_index: Decimal, // current MIR rewards in share pub total_farm_share: Uint128, // total SPEC reward distribution weight pub total_weight: u32, // is for stat only, not use pub earning: Uint128, } impl State { pub fn calc_farm_share(&self, farm_amount: Uint128, total_farm_amount: Uint128) -> Uint128 { if self.total_farm_share.is_zero() || total_farm_amount.is_zero() { farm_amount } else { farm_amount.multiply_ratio(self.total_farm_share, total_farm_amount) } } } pub fn state_store<S: Storage>(storage: &mut S) -> Singleton<S, State> { singleton(storage, KEY_STATE) } pub fn read_state<S: Storage>(storage: &S) -> StdResult<State> { singleton_read(storage, KEY_STATE).load() } static PREFIX_POOL_INFO: &[u8] = b"pool_info"; #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct PoolInfo { // LP token pub staking_token: CanonicalAddr, // total auto-compound share in the pool pub total_auto_bond_share: Uint128, // total auto-stake share in the pool pub total_stake_bond_share: Uint128, // LP amount for auto-stake pub total_stake_bond_amount: Uint128, // distribution weight pub weight: u32, // readonly, flag whether UI should show compound option pub auto_compound: bool, // current MIR reward share for the pool pub farm_share: Uint128, // index to reconcile with state.spec_share_index // (state.spec_share_index - pool_info.state_spec_share_index) * pool_info.weight = additional SPEC rewards for this pool pub state_spec_share_index: Decimal, // total MIR rewards in share per total_stake_bond_share pub farm_share_index: Decimal, // additional SPEC rewards allocated for auto-compound per total_auto_bond_share pub auto_spec_share_index: Decimal, // additional SPEC rewards allocated for auto-stake per total_stake_bond_share pub stake_spec_share_index: Decimal, // for MIR pool: number of MIR to reinvest // for non-MIR pool: number of UST to reinvest pub reinvest_allowance: Uint128, } impl PoolInfo { pub fn calc_auto_bond_share(&self, auto_bond_amount: Uint128, lp_balance: Uint128) -> Uint128 { let total_auto_bond_amount = (lp_balance - self.total_stake_bond_amount).unwrap(); if self.total_auto_bond_share.is_zero() || total_auto_bond_amount.is_zero() { auto_bond_amount } else { auto_bond_amount.multiply_ratio(self.total_auto_bond_share, total_auto_bond_amount) } } pub fn calc_stake_bond_share(&self, stake_bond_amount: Uint128) -> Uint128 { if self.total_stake_bond_share.is_zero() || self.total_stake_bond_amount.is_zero() { stake_bond_amount } else { stake_bond_amount .multiply_ratio(self.total_stake_bond_share, self.total_stake_bond_amount) } } pub fn calc_user_auto_balance(&self, lp_balance: Uint128, auto_bond_share: Uint128) -> Uint128 { if self.total_auto_bond_share.is_zero() { Uint128::zero() } else { (lp_balance - self.total_stake_bond_amount) .unwrap() .multiply_ratio(auto_bond_share, self.total_auto_bond_share) } } pub fn calc_user_stake_balance(&self, stake_bond_share: Uint128) -> Uint128 { if self.total_stake_bond_share.is_zero() { Uint128::zero() } else { self.total_stake_bond_amount .multiply_ratio(stake_bond_share, self.total_stake_bond_share) } } } pub fn pool_info_store<S: Storage>(storage: &mut S) -> Bucket<S, PoolInfo> { bucket(PREFIX_POOL_INFO, storage) } pub fn pool_info_read<S: Storage>(storage: &S) -> ReadonlyBucket<S, PoolInfo> { bucket_read(PREFIX_POOL_INFO, storage) } static PREFIX_REWARD: &[u8] = b"reward"; #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct RewardInfo { // index to reconcile with pool_info.farm_share_index // (pool_info.farm_share_index - reward_info.farm_share_index) * reward_info.stake_bond_share // = new MIR rewards for auto-stake pub farm_share_index: Decimal, // index to reconcile with pool_info.auto_spec_share_index // (pool_info.auto_spec_share_index - reward_info.auto_spec_share_index) * reward_info.auto_bond_share // = new SPEC rewards for auto-compound pub auto_spec_share_index: Decimal, // index to reconcile with pool_info.stake_spec_share_index // (pool_info.stake_spec_share_index - reward_info.stake_spec_share_index) * reward_info.stake_bond_share // = new SPEC rewards for auto-stake pub stake_spec_share_index: Decimal, // share of auto-compound for a person pub auto_bond_share: Uint128, // share of auto-stake for a person pub stake_bond_share: Uint128, // current MIR rewards in share balance pub farm_share: Uint128, // current SPEC reward share balance pub spec_share: Uint128, // cumulative SPEC share balance pub accum_spec_share: Uint128, } /// returns a bucket with all rewards owned by this owner (query it by owner) pub fn rewards_store<'a, S: Storage>( storage: &'a mut S, owner: &CanonicalAddr, ) -> Bucket<'a, S, RewardInfo> { Bucket::multilevel(&[PREFIX_REWARD, owner.as_slice()], storage) } /// returns a bucket with all rewards owned by this owner (query it by owner) /// (read-only version for queries) pub fn rewards_read<'a, S: ReadonlyStorage>( storage: &'a S, owner: &CanonicalAddr, ) -> ReadonlyBucket<'a, S, RewardInfo> { ReadonlyBucket::multilevel(&[PREFIX_REWARD, owner.as_slice()], storage) }
mod chat; mod command; mod key; mod login; mod onclose; mod onopen; mod ping; mod pong; mod say; mod scoreboard; mod signal; pub use self::chat::ChatHandler; pub use self::command::CommandHandler; pub use self::key::KeyHandler; pub use self::login::LoginHandler; pub use self::onclose::OnCloseHandler; pub use self::onopen::OnOpenHandler; pub use self::ping::PingTimerHandler; pub use self::pong::PongHandler; pub use self::say::SayHandler; pub use self::scoreboard::ScoreBoardTimerHandler; pub use self::signal::SignalHandler;
#[doc = "Reader of register POWER_CTL"] pub type R = crate::R<u32, super::POWER_CTL>; #[doc = "Writer for register POWER_CTL"] pub type W = crate::W<u32, super::POWER_CTL>; #[doc = "Register POWER_CTL `reset()`'s with value 0"] impl crate::ResetValue for super::POWER_CTL { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `SUSPEND`"] pub type SUSPEND_R = crate::R<bool, bool>; #[doc = "Write proxy for field `SUSPEND`"] pub struct SUSPEND_W<'a> { w: &'a mut W, } impl<'a> SUSPEND_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Reader of field `DP_UP_EN`"] pub type DP_UP_EN_R = crate::R<bool, bool>; #[doc = "Write proxy for field `DP_UP_EN`"] pub struct DP_UP_EN_W<'a> { w: &'a mut W, } impl<'a> DP_UP_EN_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16); self.w } } #[doc = "Reader of field `DP_BIG`"] pub type DP_BIG_R = crate::R<bool, bool>; #[doc = "Write proxy for field `DP_BIG`"] pub struct DP_BIG_W<'a> { w: &'a mut W, } impl<'a> DP_BIG_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17); self.w } } #[doc = "Reader of field `DP_DOWN_EN`"] pub type DP_DOWN_EN_R = crate::R<bool, bool>; #[doc = "Write proxy for field `DP_DOWN_EN`"] pub struct DP_DOWN_EN_W<'a> { w: &'a mut W, } impl<'a> DP_DOWN_EN_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u32) & 0x01) << 18); self.w } } #[doc = "Reader of field `DM_UP_EN`"] pub type DM_UP_EN_R = crate::R<bool, bool>; #[doc = "Write proxy for field `DM_UP_EN`"] pub struct DM_UP_EN_W<'a> { w: &'a mut W, } impl<'a> DM_UP_EN_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u32) & 0x01) << 19); self.w } } #[doc = "Reader of field `DM_BIG`"] pub type DM_BIG_R = crate::R<bool, bool>; #[doc = "Write proxy for field `DM_BIG`"] pub struct DM_BIG_W<'a> { w: &'a mut W, } impl<'a> DM_BIG_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 20)) | (((value as u32) & 0x01) << 20); self.w } } #[doc = "Reader of field `DM_DOWN_EN`"] pub type DM_DOWN_EN_R = crate::R<bool, bool>; #[doc = "Write proxy for field `DM_DOWN_EN`"] pub struct DM_DOWN_EN_W<'a> { w: &'a mut W, } impl<'a> DM_DOWN_EN_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 21)) | (((value as u32) & 0x01) << 21); self.w } } #[doc = "Reader of field `ENABLE_DPO`"] pub type ENABLE_DPO_R = crate::R<bool, bool>; #[doc = "Write proxy for field `ENABLE_DPO`"] pub struct ENABLE_DPO_W<'a> { w: &'a mut W, } impl<'a> ENABLE_DPO_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 28)) | (((value as u32) & 0x01) << 28); self.w } } #[doc = "Reader of field `ENABLE_DMO`"] pub type ENABLE_DMO_R = crate::R<bool, bool>; #[doc = "Write proxy for field `ENABLE_DMO`"] pub struct ENABLE_DMO_W<'a> { w: &'a mut W, } impl<'a> ENABLE_DMO_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u32) & 0x01) << 29); self.w } } impl R { #[doc = "Bit 2 - Put PHY into Suspend mode. If the PHY is enabled, this bit MUST be set before entering a low power mode (DeepSleep). Note: - This bit is invalid if the HOST bit of the Host Control 0 Register (HOST_CTL0) is '1'."] #[inline(always)] pub fn suspend(&self) -> SUSPEND_R { SUSPEND_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 16 - Enables the pull up on the DP. '0' : Disable. '1' : Enable."] #[inline(always)] pub fn dp_up_en(&self) -> DP_UP_EN_R { DP_UP_EN_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bit 17 - Select the resister value if POWER_CTL.DP_EN='1'. This bit is valid in GPIO. '0' : The resister value is from 900 to1575Opull up on the DP. '1' : The resister value is from 1425 to 3090Opull up on the DP"] #[inline(always)] pub fn dp_big(&self) -> DP_BIG_R { DP_BIG_R::new(((self.bits >> 17) & 0x01) != 0) } #[doc = "Bit 18 - Enables the ~15k pull down on the DP."] #[inline(always)] pub fn dp_down_en(&self) -> DP_DOWN_EN_R { DP_DOWN_EN_R::new(((self.bits >> 18) & 0x01) != 0) } #[doc = "Bit 19 - Enables the pull up on the DM. The bit is valid in GPIO. The pull up resistor is disabled in not GPIO. '0' : Disable. '1' : Enable."] #[inline(always)] pub fn dm_up_en(&self) -> DM_UP_EN_R { DM_UP_EN_R::new(((self.bits >> 19) & 0x01) != 0) } #[doc = "Bit 20 - Select the resister value if POWER_CTL.DM_EN='1'. This bit is valid in GPIO. '0' : The resister value is from 900 to1575Opull up on the DM. '1' : The resister value is from 1425 to 3090Opull up on the DM"] #[inline(always)] pub fn dm_big(&self) -> DM_BIG_R { DM_BIG_R::new(((self.bits >> 20) & 0x01) != 0) } #[doc = "Bit 21 - Enables the ~15k pull down on the DP."] #[inline(always)] pub fn dm_down_en(&self) -> DM_DOWN_EN_R { DM_DOWN_EN_R::new(((self.bits >> 21) & 0x01) != 0) } #[doc = "Bit 28 - Enables the single ended receiver on D+."] #[inline(always)] pub fn enable_dpo(&self) -> ENABLE_DPO_R { ENABLE_DPO_R::new(((self.bits >> 28) & 0x01) != 0) } #[doc = "Bit 29 - Enables the signle ended receiver on D-."] #[inline(always)] pub fn enable_dmo(&self) -> ENABLE_DMO_R { ENABLE_DMO_R::new(((self.bits >> 29) & 0x01) != 0) } } impl W { #[doc = "Bit 2 - Put PHY into Suspend mode. If the PHY is enabled, this bit MUST be set before entering a low power mode (DeepSleep). Note: - This bit is invalid if the HOST bit of the Host Control 0 Register (HOST_CTL0) is '1'."] #[inline(always)] pub fn suspend(&mut self) -> SUSPEND_W { SUSPEND_W { w: self } } #[doc = "Bit 16 - Enables the pull up on the DP. '0' : Disable. '1' : Enable."] #[inline(always)] pub fn dp_up_en(&mut self) -> DP_UP_EN_W { DP_UP_EN_W { w: self } } #[doc = "Bit 17 - Select the resister value if POWER_CTL.DP_EN='1'. This bit is valid in GPIO. '0' : The resister value is from 900 to1575Opull up on the DP. '1' : The resister value is from 1425 to 3090Opull up on the DP"] #[inline(always)] pub fn dp_big(&mut self) -> DP_BIG_W { DP_BIG_W { w: self } } #[doc = "Bit 18 - Enables the ~15k pull down on the DP."] #[inline(always)] pub fn dp_down_en(&mut self) -> DP_DOWN_EN_W { DP_DOWN_EN_W { w: self } } #[doc = "Bit 19 - Enables the pull up on the DM. The bit is valid in GPIO. The pull up resistor is disabled in not GPIO. '0' : Disable. '1' : Enable."] #[inline(always)] pub fn dm_up_en(&mut self) -> DM_UP_EN_W { DM_UP_EN_W { w: self } } #[doc = "Bit 20 - Select the resister value if POWER_CTL.DM_EN='1'. This bit is valid in GPIO. '0' : The resister value is from 900 to1575Opull up on the DM. '1' : The resister value is from 1425 to 3090Opull up on the DM"] #[inline(always)] pub fn dm_big(&mut self) -> DM_BIG_W { DM_BIG_W { w: self } } #[doc = "Bit 21 - Enables the ~15k pull down on the DP."] #[inline(always)] pub fn dm_down_en(&mut self) -> DM_DOWN_EN_W { DM_DOWN_EN_W { w: self } } #[doc = "Bit 28 - Enables the single ended receiver on D+."] #[inline(always)] pub fn enable_dpo(&mut self) -> ENABLE_DPO_W { ENABLE_DPO_W { w: self } } #[doc = "Bit 29 - Enables the signle ended receiver on D-."] #[inline(always)] pub fn enable_dmo(&mut self) -> ENABLE_DMO_W { ENABLE_DMO_W { w: self } } }
// MIT License // // Copyright (c) 2019-2021 Tobias Pfeiffer // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. use { std::{str::FromStr, io, mem, fmt, marker::PhantomData}, serde::{*, de::{self, *}, forward_to_deserialize_any}, self::State::* }; const UNICODE_FILE_MAGIC_BYTES: [u8; 3] = [0xEF, 0xBB, 0xBF]; pub fn deserialize<'de, T: Deserialize<'de>, R: io::BufRead + fmt::Debug>(reader: R) -> Result<T, Error> { T::deserialize(&mut Deserializer::from_reader(reader)) } #[derive(Debug, Deserialize)] pub struct Body<T> { #[serde(rename = "$value")] pub value: T } #[derive(PartialEq, Eq, Debug, Copy, Clone)] pub enum State { /// The outer tag is yet to be parsed. Uninit, /// Reading a tag's name. TagKey, /// Indicates that the next value is a map. TagValue(bool), /// Searching for attributes in a tag. AttrKey, /// An attribute value is to be parsed AttrValue, /// Reading the body of an xml tag while searching for inner tags. BodyValue, BodyKey } fn invalid_state(state: State, file: &'static str, line: u32) -> Error { Error::Custom(format!("invalid state: {:?}, {}:{}", state, file, line)) } #[derive(Debug)] pub enum Error { Custom(String), InvalidType(String, String), InvalidValue(String, String), InvalidLength(usize, String), UnknownVariant(String, &'static [&'static str]), UnknownField(String, &'static [&'static str]), MissingField(&'static str), DuplicateField(&'static str), Io(io::Error), Utf8(std::str::Utf8Error), InvalidToken(String, String), Parse(String) } impl Error { pub(crate) fn invalid_token(unexp: &str, exp: &str) -> Self { Self::InvalidToken(unexp.to_string(), exp.to_string()) } pub(crate) fn unexpected_eof(msg: &str) -> Self { Self::Io(io::Error::new(io::ErrorKind::UnexpectedEof, msg)) } } struct DisplayImpl<'a>(&'a dyn Expected); impl fmt::Display for DisplayImpl<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.0.fmt(f) } } impl std::error::Error for Error {} impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::Custom(s) => f.write_str(s), Self::InvalidType(unexp, exp) => write!(f, "invalid type `{}`, expected `{}`", unexp, exp), Self::InvalidValue(unexp, exp) => write!(f, "invalid value `{}`, expected `{}`", unexp, exp), Self::InvalidLength(unexp, exp) => write!(f, "invalid length `{}`, expected `{}`", *unexp, exp), Self::UnknownVariant(unexp, exp) => write!(f, "unknown variant `{}`, expected variants `{:?}`", unexp, exp), Self::UnknownField(unexp, exp) => write!(f, "unknown field `{}`, expected fields `{:?}`", unexp, exp), Self::MissingField(exp) => write!(f, "missing field `{}`", exp), Self::DuplicateField(field) => write!(f, "duplicate field `{}`", field), Self::Io(err) => write!(f, "io error: {}", err), Self::Utf8(err) => write!(f, "utf8 error: {}", err), Self::InvalidToken(unexp, exp) => write!(f, "invalid token: {}, expected {}", unexp, exp), Self::Parse(err) => write!(f, "parse error: {}", err) } } } impl de::Error for Error { fn custom<U: fmt::Display>(msg: U) -> Self { Self::Custom(msg.to_string()) } fn invalid_type(unexp: Unexpected, exp: &dyn Expected) -> Self { Self::InvalidType(unexp.to_string(), DisplayImpl(exp).to_string()) } fn invalid_value(unexp: Unexpected, exp: &dyn Expected) -> Self { Self::InvalidValue(unexp.to_string(), DisplayImpl(exp).to_string()) } fn invalid_length(len: usize, exp: &dyn Expected) -> Self { Self::InvalidLength(len, DisplayImpl(exp).to_string()) } fn unknown_variant(variant: &str, expected: &'static [&'static str]) -> Self { Self::UnknownVariant(variant.to_string(), expected) } fn unknown_field(field: &str, expected: &'static [&'static str]) -> Self { Self::UnknownField(field.to_string(), expected) } fn missing_field(field: &'static str) -> Self { Self::MissingField(field) } fn duplicate_field(field: &'static str) -> Self { Self::DuplicateField(field) } } impl From<io::Error> for Error { fn from(e: io::Error) -> Self { Self::Io(e) } } impl From<std::str::Utf8Error> for Error { fn from(e: std::str::Utf8Error) -> Self { Self::Utf8(e) } } struct IgnoredIdSeed; impl<'de> de::DeserializeSeed<'de> for IgnoredIdSeed { type Value = IgnoredAny; fn deserialize<D: de::Deserializer<'de>>(self, deserializer: D) -> Result<Self::Value, D::Error> { deserializer.deserialize_identifier(de::IgnoredAny) } } struct IgnoredAnySeed; impl<'de> de::DeserializeSeed<'de> for IgnoredAnySeed { type Value = IgnoredAny; fn deserialize<D: de::Deserializer<'de>>(self, deserializer: D) -> Result<Self::Value, D::Error> { deserializer.deserialize_ignored_any(de::IgnoredAny) } } struct NoopSeed; impl<'de> de::DeserializeSeed<'de> for NoopSeed { type Value = (); fn deserialize<D: de::Deserializer<'de>>(self, _deserializer: D) -> Result<Self::Value, D::Error> { Ok(()) } } #[derive(Debug)] pub struct Deserializer<T: io::BufRead> { reader: T, state: State, stack: Vec<String>, buf: String } impl<T: io::BufRead> Deserializer<T> { pub fn from_reader(mut reader: T) -> Self { // consume magic bytes for unicode text files if reader.fill_buf().unwrap_or(&[]).starts_with(&UNICODE_FILE_MAGIC_BYTES) { reader.consume(3); } Self { reader, state: Uninit, stack: Vec::new(), buf: String::new(), } } fn next(&mut self) -> Result<char, Error> { let buf = self.reader.fill_buf()?; let len = if buf[0] & 0x80 == 0 { 1 } else if buf[0] & 0xE0 == 0xC0 { 2 } else if buf[0] & 0xF0 == 0xE0 { 3 } else { 4 }; let mut buf = [0u8; 4]; self.reader.read_exact(&mut buf[..len])?; Ok(std::str::from_utf8(&buf[..len])?.chars().next().unwrap()) } fn read_until(&mut self, mut f: impl FnMut(u8) -> bool, peek: bool) -> Result<u8, Error> { loop { let buf = self.reader.fill_buf()?; if buf.is_empty() { return Err(Error::unexpected_eof("")); } let (i, &ch) = buf.iter() .enumerate() .find(|(_, c)| f(**c)) .unwrap_or((buf.len(), &0)); self.buf.push_str(std::str::from_utf8(&buf[..i])?); if i != buf.len() { self.reader.consume(i + if peek { 0 } else { 1 }); return Ok(ch); } else { let __tmp__ = buf.len(); self.reader.consume(__tmp__); } } } fn next_value<U: FromStr>(&mut self) -> Result<U, Error> { self.state = match self.state { AttrValue => self.read_until(|ch| ch == b'"', false).map(|_| AttrKey)?, BodyValue => self.read_until(|ch| ch == b'<', true).map(|_| BodyKey)?, TagValue(true) => { Body::<String>::deserialize(&mut*self)?; self.state }, state => return Err(invalid_state(state, file!(), line!())) }; self.dbg_val(&self.buf); let v = convert_xml(self.buf.trim()).parse::<U>() .map_err(|_| Error::Parse(format!("failed to parse {}", std::any::type_name::<U>()))); self.buf.clear(); v } fn dbg_key(&self, key: &str) { if cfg!(feature = "xml_dbg") { print!("\n{}{:10}", "\t".repeat(self.stack.len()), key); } } fn dbg_val(&self, val: &str) { if cfg!(feature = "xml_dbg") { print!(" = \"{}\"", val); } } } impl<'a, 'de, T: io::BufRead> serde::Deserializer<'de> for &'a mut Deserializer<T> { type Error = Error; fn deserialize_identifier<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { match self.state { // parse a tag's name TagKey => { let ch = self.read_until(|ch| ch.is_ascii_whitespace() || ch == b'>', false)?; self.state = TagValue(ch == b'>'); self.dbg_key(&self.buf); let r = visitor.visit_str(self.buf.as_str()); self.stack.push(mem::take(&mut self.buf)); r } // parse attribute name AttrKey => { self.read_until(|ch| ch.is_ascii_whitespace() || ch == b'=', false)?; self.state = AttrValue; self.dbg_key(&self.buf); let r = visitor.visit_str(self.buf.as_str()); self.buf.clear(); r } // return value identifier BodyKey => { self.state = BodyValue; self.dbg_key("$value"); visitor.visit_str("$value") }, state => Err(invalid_state(state, file!(), line!())) } } fn deserialize_map<V: Visitor<'de>>(mut self, visitor: V) -> Result<V::Value, Self::Error> { struct Seed<'de, T: Visitor<'de>>(T, PhantomData<&'de ()>); impl<'de, T: Visitor<'de>> de::DeserializeSeed<'de> for Seed<'de, T> { type Value = T::Value; fn deserialize<D: de::Deserializer<'de>>(self, deserializer: D) -> Result<Self::Value, D::Error> { deserializer.deserialize_map(self.0) } } match self.state { TagValue(new_state) => { self.state = if new_state { BodyKey } else { AttrKey }; visitor.visit_map(self) } Uninit => { self.state = BodyKey; self.next_entry_seed(IgnoredIdSeed, Seed(visitor, PhantomData)) .transpose() .expect("XML document was empty") .map(|(_, v)| v) } state => Err(invalid_state(state, file!(), line!())) } } fn deserialize_seq<V: Visitor<'de>>(mut self, visitor: V) -> Result<V::Value, Self::Error> { struct Seed<'de, T: Visitor<'de>>(T, PhantomData<&'de ()>); impl<'de, T: Visitor<'de>> de::DeserializeSeed<'de> for Seed<'de, T> { type Value = T::Value; fn deserialize<D: de::Deserializer<'de>>(self, deserializer: D) -> Result<Self::Value, D::Error> { deserializer.deserialize_seq(self.0) } } match self.state { TagValue(new_state) => { self.state = if new_state { BodyKey } else { AttrKey }; visitor.visit_seq(self) } Uninit => { self.state = BodyKey; self.next_entry_seed(IgnoredIdSeed, Seed(visitor, PhantomData)) .transpose() .expect("XML document was empty") .map(|(_, v)| v) } state => Err(invalid_state(state, file!(), line!())) } } fn deserialize_enum<V: Visitor<'de>>( self, _name: &'static str, _variants: &'static [&'static str], visitor: V ) -> Result<V::Value, Self::Error> { match self.state { AttrValue | BodyValue => visitor.visit_enum( self.next_value::<String>()?.into_deserializer()), Uninit | TagValue(_) => { self.deserialize_map(IgnoredAny)?; visitor.visit_enum(self) }, state => Err(invalid_state(state, file!(), line!())) } } fn deserialize_any<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { match self.state { TagKey | AttrKey | BodyKey => self.deserialize_identifier(visitor), Uninit | TagValue(_) => self.deserialize_map(visitor), AttrValue | BodyValue => visitor.visit_string(self.next_value()?) } } fn deserialize_bool<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { visitor.visit_bool(self.next_value()?) } fn deserialize_i8<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { visitor.visit_i8(self.next_value()?) } fn deserialize_i16<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { visitor.visit_i16(self.next_value()?) } fn deserialize_i32<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { visitor.visit_i32(self.next_value()?) } fn deserialize_i64<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { visitor.visit_i64(self.next_value()?) } fn deserialize_u8<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { visitor.visit_u8(self.next_value()?) } fn deserialize_u16<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { visitor.visit_u16(self.next_value()?) } fn deserialize_u32<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { visitor.visit_u32(self.next_value()?) } fn deserialize_u64<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { visitor.visit_u64(self.next_value()?) } fn deserialize_f32<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { visitor.visit_f32(self.next_value()?) } fn deserialize_f64<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { visitor.visit_f64(self.next_value()?) } fn deserialize_char<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { visitor.visit_char(self.next_value()?) } fn deserialize_str<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { visitor.visit_string(self.next_value()?) } fn deserialize_string<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { visitor.visit_string(self.next_value()?) } fn deserialize_bytes<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { visitor.visit_byte_buf(self.next_value::<String>()?.into_bytes()) } fn deserialize_byte_buf<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { visitor.visit_byte_buf(self.next_value::<String>()?.into_bytes()) } fn deserialize_option<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { visitor.visit_some(self) } fn deserialize_unit<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { self.next_value::<String>()?; visitor.visit_unit() } fn deserialize_unit_struct<V>(self, _name: &'static str, visitor: V) -> Result<V::Value, Self::Error> where V: Visitor<'de> { self.deserialize_unit(visitor) } fn deserialize_newtype_struct<V>(self, _name: &'static str, visitor: V) -> Result<V::Value, Self::Error> where V: Visitor<'de> { visitor.visit_newtype_struct(self) } fn deserialize_tuple<V>(self, _len: usize, visitor: V) -> Result<V::Value, Self::Error> where V: Visitor<'de> { self.deserialize_seq(visitor) } fn deserialize_tuple_struct<V>(self, _name: &'static str, _len: usize, visitor: V) -> Result<V::Value, Self::Error> where V: Visitor<'de> { self.deserialize_seq(visitor) } fn deserialize_struct<V>(self, _name: &'static str, _fields: &'static [&'static str], visitor: V) -> Result<V::Value, Self::Error> where V: Visitor<'de> { self.deserialize_map(visitor) } fn deserialize_ignored_any<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> { self.deserialize_any(visitor) } fn is_human_readable(&self) -> bool { true } } impl<'a, 'de, R: io::BufRead> de::MapAccess<'de> for &'a mut Deserializer<R> { type Error = Error; fn next_key_seed<K: DeserializeSeed<'de>>(&mut self, seed: K) -> Result<Option<K::Value>, Self::Error> { let ch = 'outer: loop { match self.state { AttrKey => loop { match self.next()? { ch if ch.is_whitespace() => (), '>' => { self.state = BodyKey; break; } '/' => return match self.next()? { '>' => { self.state = BodyKey; self.stack.pop().unwrap(); Ok(None) } ch => Err(Error::invalid_token(&format!("'{}'", ch as char), "'>'")) }, ch => break 'outer ch } } BodyKey => { loop { match self.next()? { ch if ch.is_whitespace() => (), '<' => break, ch => break 'outer ch } } match self.next()? { // closing tag '/' => loop { match self.next()? { ch if ch.is_whitespace() => (), '>' => return match self.stack.pop() { Some(tag) if tag == self.buf => { self.buf.clear(); Ok(None) }, tag => Err(Error::Custom(format!( "invalid closing tag: `{}`, expected `{:?}`", mem::take(&mut self.buf), tag ))) }, ch => self.buf.push(ch) } } // comment/doctype, skip '!' => loop { while self.next()? != '-' {} if self.next()? == '-' && self.next()? == '>' { break; } } '?' => loop { while self.next()? != '?' {} if self.next()? == '>' { break; } } // opening tag ch => { self.state = TagKey; break 'outer ch; } } } state => return Err(invalid_state(state, file!(), line!())) } }; self.buf.push(ch); seed.deserialize(&mut**self).map(Some) } fn next_value_seed<V: DeserializeSeed<'de>>(&mut self, seed: V) -> Result<V::Value, Self::Error>{ match self.state { TagValue(_) | BodyValue => (), AttrValue => loop { match self.next()? { '"' => break, ch if ch.is_whitespace() || ch == '=' => (), ch => return Err(Error::invalid_token(&format!("'{}'", ch), "'\"', '=' or whitespace")) } } state => return Err(invalid_state(state, file!(), line!())) } seed.deserialize(&mut**self) } } impl<'a, 'de, R: io::BufRead> de::EnumAccess<'de> for &'a mut Deserializer<R> { type Error = Error; type Variant = Self; fn variant_seed<V: DeserializeSeed<'de>>(mut self, seed: V) -> Result<(V::Value, Self::Variant), Self::Error> { match self.next_entry_seed(seed, NoopSeed)? { Some((k, _)) => Ok((k, self)), None => panic!("no key was found") } } } impl<'a, 'de, R: io::BufRead> de::VariantAccess<'de> for &'a mut Deserializer<R> { type Error = Error; fn unit_variant(self) -> Result<(), Self::Error> { use de::Deserializer; self.deserialize_ignored_any(IgnoredAny).map(|_| ()) } fn newtype_variant_seed<T: DeserializeSeed<'de>>(self, seed: T) -> Result<T::Value, Self::Error> { seed.deserialize(self) } fn tuple_variant<V: Visitor<'de>>(self, len: usize, visitor: V) -> Result<V::Value, Self::Error> { use de::Deserializer; self.deserialize_tuple(len, visitor) } fn struct_variant<V: Visitor<'de>>(self, _fields: &'static [&'static str], visitor: V) -> Result<V::Value, Self::Error> { use de::Deserializer; self.deserialize_map(visitor) } } impl<'a, 'de, R: io::BufRead> de::SeqAccess<'de> for &'a mut Deserializer<R> { type Error = Error; fn next_element_seed<T: DeserializeSeed<'de>>(&mut self, seed: T) -> Result<Option<T::Value>, Self::Error> { struct Visitor; impl<'de> de::Visitor<'de> for Visitor { type Value = String; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { write!(formatter, "an identifier") } fn visit_str<E: de::Error>(self, v: &str) -> Result<Self::Value, E> { Ok(v.to_string()) } } struct Seed; impl<'de> de::DeserializeSeed<'de> for Seed { type Value = String; fn deserialize<D: de::Deserializer<'de>>(self, deserializer: D) -> Result<Self::Value, D::Error> { deserializer.deserialize_identifier(Visitor) } } match self.next_key_seed(Seed)? { Some(id) => seed.deserialize(&mut SeqDeserializer { inner: Some(&mut**self), id: Some(id.as_str()) }).map(Some), None => Ok(None) } } } struct SeqDeserializer<'a, R: io::BufRead> { inner: Option<&'a mut Deserializer<R>>, id: Option<&'a str>, } impl<'a, 'de, R: io::BufRead> de::Deserializer<'de> for &'a mut SeqDeserializer<'a, R> { type Error = Error; fn deserialize_any<V>(self, visitor: V) -> Result<<V as Visitor<'de>>::Value, Self::Error> where V: Visitor<'de> { self.id.unwrap(); visitor.visit_map(self) } fn deserialize_enum<V: Visitor<'de>>( self, _name: &'static str, _variants: &'static [&'static str], visitor: V ) -> Result<V::Value, Self::Error> { visitor.visit_enum(self) } fn deserialize_identifier<V>(self, visitor: V) -> Result<<V as Visitor<'de>>::Value, Self::Error> where V: Visitor<'de> { visitor.visit_str(self.id.take().unwrap()) } forward_to_deserialize_any!( bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes byte_buf option unit unit_struct newtype_struct seq tuple tuple_struct map struct ignored_any ); } impl<'a, 'de, R: io::BufRead> de::MapAccess<'de> for SeqDeserializer<'a, R> { type Error = Error; fn next_key_seed<K: DeserializeSeed<'de>>(&mut self, seed: K) -> Result<Option<K::Value>, Self::Error> { match self.inner.as_ref() { Some(_) => seed.deserialize::<&mut Self>(unsafe { std::mem::transmute::<&mut Self, _>(self) }).map(Some), None => Ok(None) } } fn next_value_seed<V: DeserializeSeed<'de>>(&mut self, seed: V) -> Result<V::Value, Self::Error> { self.inner.take().unwrap().next_value_seed(seed) } } impl<'a, 'de, R: io::BufRead> de::EnumAccess<'de> for &'a mut SeqDeserializer<'a, R> { type Error = Error; type Variant = &'a mut Deserializer<R>; fn variant_seed<V: DeserializeSeed<'de>>(self, seed: V) -> Result<(V::Value, Self::Variant), Self::Error> { let mut __tmp__ = self.inner.take().unwrap(); match seed.deserialize(self) { Ok(v) => { __tmp__.next_value_seed(NoopSeed)?; Ok((v, __tmp__)) }, Err(e) => Err(e) } } } /// converts xml expressions (e.g.: &quot; -> ") fn convert_xml(mut s: &str) -> String { let mut buf = String::new(); while let Some(off) = s.find('&') { let ext = s.find(';').unwrap(); buf.push_str(&s[..off]); buf.push(match &s[off + 1..ext] { "exclamation" => '!', "quot" => '"', "percent" => '%', "amp" => '&', "apos" => '\'', "add" => '+', "lt" => '<', "equal" => '=', "gt" => '>', _ => panic!() }); s = &s[ext + 1..]; } buf.push_str(s); buf }
mod caffeine; mod menu_item; mod notification; mod sysbar; pub use self::caffeine::Caffeine; pub use self::menu_item::MenuItem; pub use self::notification::Image as NotificationImage; pub use self::notification::Notification; pub use self::notification::Sound as NotificationSound; pub use self::sysbar::SysBar;
use itertools::Itertools; use std::collections::{HashMap, HashSet}; use std::fs; type Position = [isize; 3]; type Vector = [isize; 3]; fn main() { let filename = "input/test.txt"; let relative_beacon_positions_to_scanners = parse_input_file(filename); let num_scanners = relative_beacon_positions_to_scanners.len(); // If we store the vectors (slope, magnitude) that describe the difference in space in a given scanner's set of Positions, // we should be able to intersect those vectors against some other scanner's positions in some orientation. // If we find at least 12 vectors in common, then we know that those vectors correspond to the _same_ position in space. // We should probably build up a set of "absolute" points as we go, i.e. all points relative to scanner 0. // "True" cardinality can be set by scanner 0's orientation, other scanner's beacon's vectors can be translated through different orientations until a match is found. // we only know the "correct" vector mappings of scanner 0 at the start, since that's our defined "absolute" orientation let scanner0_beacons = relative_beacon_positions_to_scanners.get(0).unwrap(); let mut scanner_vectors_in_absolute_orientation: Vec<HashMap<Vector, [Position; 2]>> = vec![HashMap::new(); num_scanners]; scanner_vectors_in_absolute_orientation[0] = vectors(scanner0_beacons); let mut all_beacon_positions: Vec<Vec<Vec<Position>>> = vec![]; for relative_beacons in relative_beacon_positions_to_scanners.iter() { let scanner_beacon_positions: Vec<Vec<Position>> = transpose( relative_beacons .iter() .map(|p| all_orientations(p).collect()) .collect(), ); all_beacon_positions.push(scanner_beacon_positions); } let mut known_beacons: HashSet<Position> = HashSet::from_iter(scanner0_beacons.iter().cloned()); let mut known_scanner_idxs: HashSet<usize> = HashSet::new(); known_scanner_idxs.insert(0); let mut scanner_positions: Vec<Option<Position>> = Vec::from_iter(relative_beacon_positions_to_scanners.iter().map(|_| None)); scanner_positions[0] = Some([0, 0, 0]); while scanner_positions.iter().any(|p| p.is_none()) { for unknown_scanner_idx in scanner_positions .clone() .iter() .enumerate() .filter_map(|(i, p)| match p { None => Some(i), Some(_) => None, }) { find_intersecting_scanner( unknown_scanner_idx, &all_beacon_positions, &mut scanner_vectors_in_absolute_orientation, &mut scanner_positions, &mut known_beacons, ); } } assert!(scanner_positions.iter().all(|p| p.is_some())); println!("scanner_positions: {:?}", scanner_positions); println!("known_beacons: {:?}", known_beacons); println!("num known_beacons: {:?}", known_beacons.len()); let known_scanner_positions: Vec<Position> = scanner_positions.into_iter().map(|p| p.unwrap()).collect(); let max_manhattan_dist: isize = known_scanner_positions .iter() .cartesian_product(known_scanner_positions.iter()) .map(|(a, b)| diff(a, b).iter().sum()) .max() .unwrap(); println!("max_manhattan_dist: {}", max_manhattan_dist); } fn find_intersecting_scanner( target_scanner_idx: usize, all_beacon_positions: &Vec<Vec<Vec<Position>>>, scanner_vectors_in_absolute_orientation: &mut Vec<HashMap<Vector, [Position; 2]>>, scanner_positions: &mut Vec<Option<Position>>, known_beacons: &mut HashSet<Position>, ) { for target_beacons in all_beacon_positions.get(target_scanner_idx).unwrap() { let target_scanner_vector_mapping = vectors(target_beacons); let target_scanner_vectors: HashSet<&Vector> = target_scanner_vector_mapping.keys().collect(); for (source_scanner_idx, source_scanner_pos) in scanner_positions .iter() .enumerate() .filter_map(|(i, pos)| pos.map(|p| (i, p))) { let source_scanner_vector_mapping = scanner_vectors_in_absolute_orientation .get(source_scanner_idx) .unwrap(); let source_scanner_vectors: HashSet<&Vector> = source_scanner_vector_mapping.keys().collect(); let vector_intersection: HashSet<&Vector> = source_scanner_vectors .intersection(&target_scanner_vectors) .cloned() .collect(); let matching_positions: HashSet<Position> = vector_intersection .clone() .into_iter() .flat_map(|v| source_scanner_vector_mapping[v]) .collect(); // overlaps if more than 12 matching positions if matching_positions.len() >= 12 { // one vector should yield positions in both scanners, diff between positions is the diff between scanners let v = vector_intersection.into_iter().next().unwrap(); let beacon_source = source_scanner_vector_mapping[v][0]; let beacon_target = target_scanner_vector_mapping[v][0]; let scanner_diff = diff(&beacon_source, &beacon_target); let target_scanner_pos = translate(&source_scanner_pos, &scanner_diff); // Set "absolute" scanner position scanner_positions[target_scanner_idx] = Some(target_scanner_pos); // Save vectors between beacons of scanner in "absolute" orientation scanner_vectors_in_absolute_orientation[target_scanner_idx] = target_scanner_vector_mapping; // TODO: translate from current pos using source scanner pos as diff vector let target_beacons_relative_to_scanner_0: Vec<Position> = target_beacons .iter() .map(|p| translate(p, &target_scanner_pos)) .collect(); // TODO: translate in terms of scanner 0 // add beacons in "absolute" orientation translated to scanner 0 reference to known set for tb in target_beacons_relative_to_scanner_0 { known_beacons.insert(tb); } return; } } } } // can improve performance by returning position references as values, cloning at last second fn vectors(beacons: &[Position]) -> HashMap<Vector, [Position; 2]> { beacons .iter() .cartesian_product(beacons.iter()) .filter(|(a, b)| a != b) .map(|(a, b)| (diff(a, b), [*a, *b])) .collect() } fn transpose<T>(v: Vec<Vec<T>>) -> Vec<Vec<T>> { assert!(!v.is_empty()); let len = v[0].len(); let mut iters: Vec<_> = v.into_iter().map(|n| n.into_iter()).collect(); (0..len) .map(|_| { iters .iter_mut() .map(|n| n.next().unwrap()) .collect::<Vec<T>>() }) .collect() } type PositionTransformation = ((isize, isize, isize), (usize, usize, usize)); const orientation_transformations: [PositionTransformation; 6] = [ ((1, 1, 1), (0, 1, 2)), ((-1, -1, 1), (0, 1, 2)), ((1, -1, 1), (1, 0, 2)), ((-1, 1, 1), (1, 0, 2)), ((1, 1, -1), (2, 1, 0)), ((-1, 1, 1), (2, 1, 0)), ]; // yields transformations: (x, y, z), (x, z, -y), (x, -y, -z), (x, -z, y) const rotation_transformations: [PositionTransformation; 4] = [ ((1, 1, 1), (0, 1, 2)), ((1, 1, -1), (0, 2, 1)), ((1, -1, -1), (0, 1, 2)), ((1, -1, 1), (0, 2, 1)), ]; fn all_orientations(p: &Position) -> impl Iterator<Item = Position> { orientations(*p).flat_map(rotations) } // Face scanner observing beacon (1, 2, 3) in different cardinal directions // xpos: (1, 2, 3) // xneg: (-1, -2, 3) // ypos: (2, -1, 3) // yneg: (-2, 1, 3) // zpos: (3, 2, -1) // zneg: (-3, 2, 1) // yields transformations (x, y, z), (-x, -y, z), (y, -x, z), (-y, x, z), (z, y, -x), (-z, y, x) fn orientations(p: Position) -> impl Iterator<Item = Position> { orientation_transformations.into_iter().map( move |((x_sign, y_sign, z_sign), (x_idx, y_idx, z_idx))| { [x_sign * p[x_idx], y_sign * p[y_idx], z_sign * p[z_idx]] }, ) } // Rotate scanner observing beacon (1, 2, 3) around x axis // x value is fixed // (1, 2, 3) // (1, 3, -2) // (1, -2, -3) // (1, -3, 2) // yields transformations: (x, y, z), (x, z, -y), (x, -y, -z), (x, -z, y) fn rotations(p: Position) -> impl Iterator<Item = Position> { rotation_transformations.into_iter().map( move |((x_sign, y_sign, z_sign), (x_idx, y_idx, z_idx))| { [x_sign * p[x_idx], y_sign * p[y_idx], z_sign * p[z_idx]] }, ) } fn diff(a: &Position, b: &Position) -> Vector { [a[0] - b[0], a[1] - b[1], a[2] - b[2]] } fn translate(p: &Position, v: &Vector) -> Position { [p[0] + v[0], p[1] + v[1], p[2] + v[2]] } fn parse_input_file(filename: &str) -> Vec<Vec<Position>> { let file_contents = fs::read_to_string(filename).unwrap(); file_contents .split("\n\n") .map(parse_scanner_beacons) .collect() } fn parse_scanner_beacons(beacons_str: &str) -> Vec<Position> { beacons_str .split('\n') .skip(1) .map(|pos_str| { println!("{}", pos_str); let positions: Vec<isize> = pos_str.split(',').map(|n| n.parse().unwrap()).collect(); assert_eq!(positions.len(), 3); [positions[0], positions[1], positions[2]] }) .collect() }
#[doc = "Register `RX_ORDSET` reader"] pub type R = crate::R<RX_ORDSET_SPEC>; #[doc = "Field `RXORDSET` reader - RXORDSET"] pub type RXORDSET_R = crate::FieldReader; #[doc = "Field `RXSOP3OF4` reader - RXSOP3OF4"] pub type RXSOP3OF4_R = crate::BitReader; #[doc = "Field `RXSOPKINVALID` reader - RXSOPKINVALID"] pub type RXSOPKINVALID_R = crate::FieldReader; impl R { #[doc = "Bits 0:2 - RXORDSET"] #[inline(always)] pub fn rxordset(&self) -> RXORDSET_R { RXORDSET_R::new((self.bits & 7) as u8) } #[doc = "Bit 3 - RXSOP3OF4"] #[inline(always)] pub fn rxsop3of4(&self) -> RXSOP3OF4_R { RXSOP3OF4_R::new(((self.bits >> 3) & 1) != 0) } #[doc = "Bits 4:6 - RXSOPKINVALID"] #[inline(always)] pub fn rxsopkinvalid(&self) -> RXSOPKINVALID_R { RXSOPKINVALID_R::new(((self.bits >> 4) & 7) as u8) } } #[doc = "UCPD Rx Ordered Set Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rx_ordset::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct RX_ORDSET_SPEC; impl crate::RegisterSpec for RX_ORDSET_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`rx_ordset::R`](R) reader structure"] impl crate::Readable for RX_ORDSET_SPEC {} #[doc = "`reset()` method sets RX_ORDSET to value 0"] impl crate::Resettable for RX_ORDSET_SPEC { const RESET_VALUE: Self::Ux = 0; }
// < begin copyright > // Copyright Ryan Marcus 2020 // // See root directory of this project for license terms. // // < end copyright > use crate::models::*; use superslice::*; use crate::models::utils::{plr, radix_index}; const NUM_RADIX_BITS: u8 = 20; fn bottom_up_plr(data: &ModelDataWrapper) -> (Vec<u64>, Vec<f64>) { if data.len() == 0 { return (Vec::new(), Vec::new()); } let mut delta = 1.0; let (mut points, mut coeffs) = plr(data, delta, data.len() < 10000); while points.len() > 524_288 { delta *= 2.0; let (p, c) = plr(data, delta, false); points = p; coeffs = c; } assert!(points[0] <= data.iter_int_int().next().unwrap().0); return (points, coeffs); } pub struct BottomUpPLR { radix: Vec<u64>, points: Vec<u64>, coeffs: Vec<f64> } impl BottomUpPLR { pub fn new(data: &ModelDataWrapper) -> BottomUpPLR { let (points, coeffs) = bottom_up_plr(data); let radix = radix_index(&points, NUM_RADIX_BITS); return BottomUpPLR { radix, points, coeffs }; } } impl Model for BottomUpPLR { fn predict_to_float(&self, inp: ModelInput) -> f64 { let val = inp.as_int();//4098767424329; //inp.as_int(); // TODO we could accelerate training time by using the radix index here let mut line_index = self.points.upper_bound(&val) - 1; if line_index == self.points.len() { line_index -= 1; } assert!(self.points[line_index] <= val, "previous segment (idx {}) stops at {} and val is {}", line_index, self.points[line_index-1], val); assert!(line_index == self.points.len() - 1 || self.points[line_index + 1] > val); // verify that the radix table would have given valid bounds let radix_hint = val >> (64 - NUM_RADIX_BITS); let radix_ub = self.radix[radix_hint as usize] as usize; let radix_lb = if radix_hint == 0 { 0 } else { self.radix[radix_hint as usize - 1] as usize - 1 }; assert!(radix_lb <= line_index, "radix key: {} radix lb: {}, radix ub: {}, correct: {}, key: {}, value: {}", radix_hint, radix_lb, radix_ub, line_index, val, self.points[line_index]); assert!(radix_ub > line_index, "radix key: {} radix lb: {} radix ub: {}, correct: {}, key: {}, value: {}", radix_hint, radix_lb, radix_ub, line_index, val, self.points[line_index]); let a = self.coeffs[2*line_index]; let b = self.coeffs[2*line_index + 1]; let pred = (val as f64) * a + b; //println!("{} from {}", pred, line_index); //panic!(); return pred; } fn input_type(&self) -> ModelDataType { return ModelDataType::Int; } fn output_type(&self) -> ModelDataType { return ModelDataType::Float; } fn params(&self) -> Vec<ModelParam> { return vec![self.points.len().into(), self.radix.clone().into(), self.points.clone().into(), self.coeffs.clone().into()]; } fn code(&self) -> String { return format!(" inline uint64_t plr(const uint64_t size, const uint64_t radix[], const uint64_t pivots[], const double coeffs[], uint64_t key) {{ uint64_t key_radix = key >> (64 - {}); unsigned int radix_ub = radix[key_radix]; unsigned int radix_lb = (key_radix == 0 ? 0 : radix[key_radix - 1] - 1); uint64_t li = bs_upper_bound(pivots + radix_lb, radix_ub - radix_lb, key) + radix_lb - 1; double alpha = coeffs[2*li]; double beta = coeffs[2*li + 1]; return alpha * (double)key + beta; }} ", NUM_RADIX_BITS); } fn standard_functions(&self) -> HashSet<StdFunctions> { let mut to_r = HashSet::new(); to_r.insert(StdFunctions::BinarySearch); return to_r; } fn function_name(&self) -> String { return String::from("plr"); } fn restriction(&self) -> ModelRestriction { return ModelRestriction::MustBeBottom; } }
pub mod temperature_conversion { /// Represents a valid temperature in Fahrenheit or Celsius pub enum Temperature { F(f64), C(f64), } // Convert Fahrenheit to Celsius fn farenheit_to_celsius(f: f64) -> f64 { (f - 32.0) * (5.0 / 9.0) } // Convert Celsius to Fahrenheit fn celsius_to_farenheit(c: f64) -> f64 { c * (9.0 / 5.0) + 32.0 } /// Convert temperatures. Take a degree Fahrenheit or Celsius /// and convert it to the opposite scale. pub fn convert(temperature: &Temperature) -> f64 { match *temperature { Temperature::F(degrees) => farenheit_to_celsius(degrees), Temperature::C(degrees) => celsius_to_farenheit(degrees), } } // Print degrees Fahrenheit to Celsius fn print_farenheit_to_celsius(degrees: f64) { println!( "\n{:.2}॰F = {:.2}॰C\n", degrees, convert(&Temperature::F(degrees)) ); } // Print degrees Celsius to Fahrenheit fn print_celsius_to_farenheit(degrees: f64) { println!( "\n{:.2}॰C = {:.2}॰F\n", degrees, convert(&Temperature::C(degrees)) ); } /// Print temperature conversions pub fn print_temperature_conversion(temperature: &Temperature) { match *temperature { Temperature::F(degrees) => print_farenheit_to_celsius(degrees), Temperature::C(degrees) => print_celsius_to_farenheit(degrees), } } /// Print a common table of conversions pub fn print_common_table() { let mut celsius_table: Vec<f64> = vec![-40.0]; for n in (-40..100).step_by(10) { celsius_table.push(n as f64 + 10.0); } let fahrenheit_table: Vec<f64> = celsius_table .iter() .cloned() .map(|x| convert(&Temperature::C(x))) .collect(); for (x, y) in celsius_table .iter() .cloned() .zip(fahrenheit_table.iter().cloned()) { println!("{:7.2}॰C = {:7.2}॰F", x, y); } println!(); } } // Run some unit tests #[cfg(test)] mod tests { use super::*; #[test] fn test_0_degrees_celsius_to_farenheit() { assert_eq!( temperature_conversion::convert(&temperature_conversion::Temperature::C(0.0)), 32.0 ); } #[test] fn test_100_degrees_celsius_to_farenheit() { assert_eq!( temperature_conversion::convert(&temperature_conversion::Temperature::C(100.0)), 212.0 ); } #[test] fn test_32_degrees_farenheit_to_celsius() { assert_eq!( temperature_conversion::convert(&temperature_conversion::Temperature::F(32.0)), 0.0 ); } #[test] fn test_212_degrees_farenheit_to_celsius() { assert_eq!( temperature_conversion::convert(&temperature_conversion::Temperature::F(212.0)), 100.0 ); } #[test] fn test_98_6f_to_c() { assert_eq!( temperature_conversion::convert(&temperature_conversion::Temperature::F(98.6)), 37.0 ); } #[test] fn test_negative_degrees_40_equality() { assert_eq!( temperature_conversion::convert(&temperature_conversion::Temperature::C(-40.0)), -40.0 ); } }
// The `Iterator` trait is used to implement iterators over collections // such as arrays. // // The trait requires only a method to be defined for the `next` element, // which may be manually defined in an `impl` block or automatically // defined (as in arrays and ranges). // // As a point of convenience for common situations, the `for` construct // turns some collections into iterators using the `.into_iter()` method struct Fibonacci { curr: u32, next: u32, } impl Iterator for Fibonacci { type Item = u32; fn next(&mut self) -> Option<u32> { let new_next = self.curr + self.next; self.curr = self.next; self.next = new_next; Some(self.curr) } } fn fibonacci() -> Fibonacci { Fibonacci { curr: 0, next: 1 } } fn main() { // `0..3` is an `Iterator` that generates: 0, 1, and 2 let mut sequence = 0..3; println!("Four consecutive `next` calls on 0..3"); println!("> {:?}", sequence.next()); println!("> {:?}", sequence.next()); println!("> {:?}", sequence.next()); println!("> {:?}", sequence.next()); // `for` works through an `Iterator` until it returns `None` // Each `Some` value is unwrapped and bound to a varible (here, `i`) println!("Iterate through 0..3 using `for`"); for i in 0..3 { println!("> {}", i); } // The `take(n)` method reduces an `Iterator` to its first `n` terms println!("The first four terms of the Fibonacci sequence are: "); for i in fibonacci().take(4) { println!("> {}", i); } println!("The next for terms of the Fibonacci sequence are: "); for i in fibonacci().skip(4).take(4) { println!("> {}", i); } let array = [1u32, 3, 3, 7]; println!("Iterate the following array {:?}", &array); for i in array.iter() { println!("> {}", i); } }
pub mod user; pub mod product;
use proc_macro::TokenStream; use quote::quote; use syn; #[proc_macro_derive(BindUniform)] pub fn bind_uniform_derive(input: TokenStream) -> TokenStream { let ast: syn::DeriveInput = syn::parse(input).unwrap(); let ident = &ast.ident; let data = if let syn::Data::Struct(data) = &ast.data { data } else { unimplemented!() }; let fields = if let syn::Fields::Named(fields) = &data.fields { fields } else { unimplemented!() }; let calls = fields .named .iter() .map(|field| { let ident = field.ident.as_ref().unwrap(); let ident_str = ident.to_string(); quote! { self .#ident .bind_uniform(gl, shader, &format!("{}.{}", name, #ident_str)); } }) .collect::<Vec<_>>(); let imp = quote! { impl crate::shader::BindUniform for #ident { unsafe fn bind_uniform(&self, gl: &glow::Context, shader: &mut crate::shader::ActiveShader, name: &str) { #(#calls)* } } }; imp.into() } #[proc_macro_derive(ShaderTypeDef)] pub fn bind_shader_type_def(input: TokenStream) -> TokenStream { let ast: syn::DeriveInput = syn::parse(input).unwrap(); let ident = &ast.ident; let data = if let syn::Data::Struct(data) = &ast.data { data } else { unimplemented!() }; let fields = if let syn::Fields::Named(fields) = &data.fields { fields } else { unimplemented!() }; let shader_fields = fields .named .iter() .map(|field| { let ident = field.ident.as_ref().unwrap(); let ident_str = ident.to_string(); let extract_segment = |ty: &syn::Type| { if let syn::Type::Path(path) = ty { path.path.segments[0].clone() } else { unimplemented!() } }; let ty = extract_segment(&field.ty); let primitive_type = |ident: &syn::Ident| match ident.to_string().as_str() { "f32" => "float", "Vec4" => "vec4", "Vec3" => "vec3", "Vec2" => "vec2", "Mat4" => "mat4", "T2d" | "Texture" => "sampler2D", "TCubemap" => "samplerCube", id => unimplemented!("primtive {}", id), }; match &ty.arguments { syn::PathArguments::AngleBracketed(args) => { let arg = if let syn::GenericArgument::Type(arg) = args.args.first().unwrap() { extract_segment(arg) } else { unimplemented!() }; match ty.ident.to_string().as_str() { "Vec" => { format!( "{} {}[4]; int {}_len;", primitive_type(&arg.ident), ident_str, ident_str ) } "Texture" => { format!("{} {};", primitive_type(&arg.ident), ident_str) } _ => unimplemented!(), } } syn::PathArguments::None => { format!("{} {};", primitive_type(&ty.ident), ident_str) } _ => unimplemented!(), } }) .collect::<Vec<_>>(); let type_def = format!( r#"struct {} {{ {} }};"#, ident.to_string(), shader_fields.join("\n") ); let imp = quote! { impl crate::shader::ShaderTypeDef for #ident { const TYPE_DEF: &'static str = #type_def; } }; imp.into() } #[proc_macro_derive(ShaderBlockDef)] pub fn bind_shader_block_def(input: TokenStream) -> TokenStream { let ast: syn::DeriveInput = syn::parse(input).unwrap(); let ident = &ast.ident; let data = if let syn::Data::Struct(data) = &ast.data { data } else { unimplemented!() }; let fields = if let syn::Fields::Named(fields) = &data.fields { fields } else { unimplemented!() }; let shader_fields = fields .named .iter() .map(|field| { let ident = field.ident.as_ref().unwrap(); let ty = if let syn::Type::Path(path) = &field.ty { path.path.segments[1].ident.to_string() } else { unimplemented!() }; format!("{} {};", ty, ident.to_string()) }) .collect::<Vec<_>>(); let type_def = format!( r#"layout (std140) uniform {} {{ {} }};"#, ident.to_string(), shader_fields.join("\n") ); let imp = quote! { impl crate::shader::ShaderBlockDef for #ident { const BLOCK_DEF: &'static str = #type_def; } }; imp.into() }
//inner http client trait HttpClient { fn get(&self, path: &str) -> u16; } struct DefaultHttpClient {} impl Default for DefaultHttpClient { fn default() -> Self { DefaultHttpClient{} } } impl HttpClient for DefaultHttpClient { fn get(&self, url: &str) -> u16 { let resp = reqwest::get(url).unwrap(); resp.status().as_u16() } } struct HttpBin<'a> { host: &'a str, client: Box<dyn HttpClient> } impl<'a> Default for HttpBin<'a> { fn default() -> Self { HttpBin{ host: "https://httpbin.org", client: Box::new(DefaultHttpClient::default()) } } } impl<'a> HttpBin<'a> { pub fn is_path_ok(&self, path: &'a str) -> bool { let url = format!("{}{}", self.host, path); let status_code = self.client.get(url.as_str()); status_code == 200 } } #[cfg(test)] mod test { use super::*; //simple fake http client struct FakeHttpClient { status: u16 } impl FakeHttpClient { pub fn new(status: u16) -> Self { FakeHttpClient{status} } } impl HttpClient for FakeHttpClient { fn get(&self, _path: &str) -> u16 { self.status } } #[test] fn test_httpbin_is_path_ok() { let s = HttpBin{ host: "https://httpbin.org", client: Box::new(FakeHttpClient{status: 200}) }; let current = s.is_path_ok("/status/200"); let expected = true; assert_eq!(expected, current); } #[test] fn test_httpbin_is_path_ok_error() { let s = HttpBin{ host: "https://httpbin.org", client: Box::new(FakeHttpClient{status: 404}) }; let current = s.is_path_ok("/status/200"); let expected = false; assert_eq!(expected, current); } }
extern { fn test_start(f: extern fn()); fn test_end(); fn CppTest(); } fn main() { unsafe { CppTest(); test_start(test_middle); } } struct A; impl Drop for A { fn drop(&mut self) { panic!() } } extern fn test_middle() { let _a = A; foo(); } fn foo() { let _a = A; unsafe { test_end(); } }
use image; use num_complex; use colorscale; use std::fs::File; use std::env; fn main() { let mut param: Vec<String> = env::args().collect(); if param[1] == "animate" { let file_out = File::create("animation.gif").unwrap(); let mut encoder = image::gif::GifEncoder::new(file_out); let r : f32 = param[2].parse().unwrap(); let i : f32 = param[3].parse().unwrap(); let c : num_complex::Complex<f32> = num_complex::Complex::new(r, i); let size : u32 = param[4].parse().unwrap(); let iterations : u32 = 300; let mut colormap = Vec::new(); colormap.push([0, 0, 80]); colormap.push([180, 255, 100]); colormap.push([255, 50, 0]); colormap.push([0, 0, 0]); let save = Option::<String>::None; let nb_frames : u32 = param[5].parse().unwrap(); for k in 0..nb_frames { let a = (2.0*std::f32::consts::PI/(nb_frames as f32)) * k as f32; let c : num_complex::Complex<f32> = num_complex::Complex::new(0.0, a).exp() * c; let frame = image::Frame::new(julia(c, size, iterations, &colormap, &save).unwrap()); encoder.encode_frame(frame).unwrap(); } } else if param[1] == "frames" { let r : f32 = param[2].parse().unwrap(); let i : f32 = param[3].parse().unwrap(); let c : num_complex::Complex<f32> = num_complex::Complex::new(r, i); let size : u32 = param[4].parse().unwrap(); let iterations : u32 = 300; let mut colormap = Vec::new(); colormap.push([0, 0, 80]); colormap.push([180, 255, 100]); colormap.push([255, 50, 0]); colormap.push([0, 0, 0]); let nb_frames : u32 = param[5].parse().unwrap(); let path = param.remove(6); let format = param.remove(6); for k in 0..nb_frames { let mut name = String::from(&path); name.push_str(&(k.to_string())); name.push('.'); name.push_str(&format); let save = Some(name); let a = (2.0*std::f32::consts::PI/(nb_frames as f32)) * k as f32; let c : num_complex::Complex<f32> = num_complex::Complex::new(0.0, a).exp() * c; julia(c, size, iterations, &colormap, &save); } } else { let r : f32 = param[1].parse().unwrap(); let i : f32 = param[2].parse().unwrap(); let c : num_complex::Complex<f32> = num_complex::Complex::new(r, i); let size : u32 = param[3].parse().unwrap(); let iterations : u32 = param[4].parse().unwrap(); let mut colormap = Vec::new(); colormap.push([0, 0, 0]); colormap.push([0, 255, 0]); colormap.push([0, 255, 0]); colormap.push([255, 255, 255]); let save = Some(param.remove(5)); julia(c, size, iterations, &colormap, &save); } } fn julia(c : num_complex::Complex<f32>, size : u32, iterations : u32, colormap : &[[u8;3]], save : &Option<String>) -> Option<image::RgbaImage> { let n : u32 = iterations; let imgx : u32 = size; let imgy : u32 = imgx; let centerx : f32 = 1.5; let centery : f32 = 1.5; let scalex : f32 = 3.0/ imgx as f32; let scaley : f32 = 3.0/ imgy as f32; let mut imgbuf = image::ImageBuffer::new(imgx, imgy); let grad = colorscale::multi_lin_grad(colormap, n); for x in 0..imgx { for y in 0..imgy { let cx = x as f32 *scalex - centerx; let cy = y as f32 *scaley - centery; let mut z = num_complex::Complex::new(cx, cy); let mut i = 0; while i < n && z.norm() <= 2.0 { z = z*z + c; i += 1; } let pixel = imgbuf.get_pixel_mut(x, y); *pixel = image::Rgb(grad[(i as usize)]); } } if let Some(name) = save { imgbuf.save(name).unwrap(); None } else { Some(image::DynamicImage::ImageRgb8(imgbuf).to_rgba8()) } }
use super::{ConnectorOptions, IntifaceCLIErrorEnum, IntifaceError}; use super::frontend::{self, FrontendPBufChannel}; use argh::FromArgs; #[cfg(target_os = "windows")] use buttplug::server::comm_managers::xinput::XInputDeviceCommunicationManagerBuilder; use buttplug::server::{ comm_managers::{ btleplug::BtlePlugCommunicationManagerBuilder, lovense_connect_service::LovenseConnectServiceCommunicationManagerBuilder, lovense_dongle::{ LovenseHIDDongleCommunicationManagerBuilder, LovenseSerialDongleCommunicationManagerBuilder, }, serialport::SerialPortCommunicationManagerBuilder, websocket_server::websocket_server_comm_manager::WebsocketServerDeviceCommunicationManagerBuilder, DeviceCommunicationManagerBuilder, }, ButtplugRemoteServer, }; use std::fs; use tokio_util::sync::CancellationToken; use tracing::Level; const VERSION: &'static str = env!("CARGO_PKG_VERSION"); /// command line interface for intiface/buttplug. /// /// Note: Commands are one word to keep compat with C#/JS executables currently. #[derive(FromArgs)] struct IntifaceCLIArguments { // Options that do something then exit /// print version and exit. #[argh(switch)] version: bool, /// print version and exit. #[argh(switch)] serverversion: bool, // Options that set up the server networking /// if passed, websocket server listens on all interfaces. Otherwise, only /// listen on 127.0.0.1. #[argh(switch)] wsallinterfaces: bool, /// insecure port for websocket servers. #[argh(option)] wsinsecureport: Option<u16>, /// pipe name for ipc server #[argh(option)] ipcpipe: Option<String>, // Options that set up communications with intiface GUI /// if passed, output protobufs for parent process via stdio, instead of strings. #[argh(switch)] frontendpipe: bool, // Options that set up Buttplug server parameters /// name of server to pass to connecting clients. #[argh(option)] #[argh(default = "\"Buttplug Server\".to_owned()")] servername: String, /// path to the device configuration file #[argh(option)] deviceconfig: Option<String>, /// path to user device configuration file #[argh(option)] userdeviceconfig: Option<String>, /// ping timeout maximum for server (in milliseconds) #[argh(option)] #[argh(default = "0")] pingtime: u32, /// if passed, server will stay running after client disconnection #[argh(switch)] stayopen: bool, /// set log level for output #[allow(dead_code)] #[argh(option)] log: Option<Level>, /// allow raw messages (dangerous, only use for development) #[argh(switch)] allowraw: bool, /// turn off bluetooth le device support #[argh(switch)] without_bluetooth_le: bool, /// turn off serial device support #[argh(switch)] without_serial: bool, /// turn off hid device support #[allow(dead_code)] #[argh(switch)] without_hid: bool, /// turn off lovense dongle serial device support #[argh(switch)] without_lovense_dongle_serial: bool, /// turn off lovense dongle hid device support #[argh(switch)] without_lovense_dongle_hid: bool, /// turn off xinput gamepad device support (windows only) #[argh(switch)] without_xinput: bool, /// turn on lovense connect app device support (off by default) #[argh(switch)] with_lovense_connect: bool, /// turn on websocket server device comm manager #[argh(switch)] with_websocket_server_device: bool, } fn try_add_comm_manager<T>(server: &ButtplugRemoteServer, builder: T) where T: DeviceCommunicationManagerBuilder, { if let Err(e) = server.device_manager().add_comm_manager(builder) { info!("Can't add Comm Manager: {:?}", e); } } pub fn setup_server_device_comm_managers(server: &ButtplugRemoteServer) { let args: IntifaceCLIArguments = argh::from_env(); if !args.without_bluetooth_le { info!("Including Bluetooth LE (btleplug) Device Comm Manager Support"); try_add_comm_manager(server, BtlePlugCommunicationManagerBuilder::default()); } if !args.without_lovense_dongle_hid { info!("Including Lovense HID Dongle Support"); try_add_comm_manager( server, LovenseHIDDongleCommunicationManagerBuilder::default(), ); } if !args.without_lovense_dongle_serial { info!("Including Lovense Serial Dongle Support"); try_add_comm_manager( server, LovenseSerialDongleCommunicationManagerBuilder::default(), ); } if !args.without_serial { info!("Including Serial Port Support"); try_add_comm_manager(server, SerialPortCommunicationManagerBuilder::default()); } #[cfg(target_os = "windows")] if !args.without_xinput { info!("Including XInput Gamepad Support"); try_add_comm_manager(server, XInputDeviceCommunicationManagerBuilder::default()); } if args.with_lovense_connect { info!("Including Lovense Connect App Support"); try_add_comm_manager( server, LovenseConnectServiceCommunicationManagerBuilder::default(), ); } if args.with_websocket_server_device { info!("Including Websocket Server Device Support"); try_add_comm_manager( server, WebsocketServerDeviceCommunicationManagerBuilder::default().listen_on_all_interfaces(true), ); } } pub fn check_log_level() -> Option<Level> { let args: IntifaceCLIArguments = argh::from_env(); args.log } pub fn check_frontend_pipe(token: CancellationToken) -> Option<FrontendPBufChannel> { let args: IntifaceCLIArguments = argh::from_env(); if args.frontendpipe { Some(frontend::run_frontend_task(token)) } else { None } } pub fn parse_options() -> Result<Option<ConnectorOptions>, IntifaceCLIErrorEnum> { let args: IntifaceCLIArguments = argh::from_env(); // Options that will do a thing then exit: // // - serverversion // - generatecert if args.serverversion || args.version { debug!("Server version command sent, printing and exiting."); println!( "Intiface CLI (Rust Edition) Version {}, Commit {}, Built {}", VERSION, env!("VERGEN_GIT_SHA_SHORT"), env!("VERGEN_BUILD_TIMESTAMP") ); return Ok(None); } // Options that set up the server networking let mut connector_info = ConnectorOptions::default(); let mut connector_info_set = false; if args.wsallinterfaces { info!("Intiface CLI Options: Websocket Use All Interfaces option passed."); connector_info.ws_listen_on_all_interfaces = true; connector_info_set = true; } if let Some(wsinsecureport) = &args.wsinsecureport { info!( "Intiface CLI Options: Websocket Insecure Port {}", wsinsecureport ); connector_info.ws_insecure_port = Some(*wsinsecureport); connector_info_set = true; } if let Some(ipcpipe) = &args.ipcpipe { // TODO We should actually implement pipes :( info!("Intiface CLI Options: IPC Pipe Name {}", ipcpipe); } // If we don't have a device configuration by this point, panic. if !connector_info_set { return Err( IntifaceError::new( "Must have a connection argument (wsinsecureport, wssecureport, ipcport) to run!", ) .into(), ); } connector_info .server_builder .name(&args.servername) .max_ping_time(args.pingtime) .allow_raw_messages(args.allowraw); if args.frontendpipe { info!("Intiface CLI Options: Using frontend pipe"); connector_info.use_frontend_pipe = true; } if args.stayopen { info!("Intiface CLI Options: Leave server open after disconnect."); connector_info.stay_open = true; } // Options that set up Buttplug server parameters if let Some(deviceconfig) = &args.deviceconfig { info!( "Intiface CLI Options: External Device Config {}", deviceconfig ); match fs::read_to_string(deviceconfig) { Ok(cfg) => connector_info .server_builder .device_configuration_json(Some(cfg)), Err(err) => panic!("Error opening external device configuration: {:?}", err), }; } if let Some(userdeviceconfig) = &args.userdeviceconfig { info!( "Intiface CLI Options: User Device Config {}", userdeviceconfig ); match fs::read_to_string(userdeviceconfig) { Ok(cfg) => connector_info .server_builder .user_device_configuration_json(Some(cfg)), Err(err) => panic!("Error opening user device configuration: {:?}", err), }; } Ok(Some(connector_info)) }
use amethyst::{ assets::Loader, ecs::Entity, prelude::*, ui::{Anchor, TtfFormat, UiText, UiTransform}, }; pub struct FpsText { pub fps: Entity, } pub fn init_fps_display(world: &mut World) { let font = world.read_resource::<Loader>().load( "resources/fonts/square.ttf", TtfFormat, Default::default(), (), &world.read_resource(), ); let fps_transform = UiTransform::new( "FPS".to_string(), Anchor::TopLeft, 50.0, -50.0, 1.0, 200.0, 50.0, 0, ); let fps = world .create_entity() .with(fps_transform) .with(UiText::new( font.clone(), "N".to_string(), [0.0, 0.0, 0.0, 1.0], 50.0, )) .build(); world.add_resource(FpsText { fps }); }
use charts::{Chart, ScaleLinear, MarkerType, PointLabelPosition, Color, ScatterView}; pub fn save_chart(data: &Vec<(f64, f64)>, labels: &(String, String)) { let width = 1000; let height = 700; let (top, right, bottom, left) = (50, 40, 50, 60); let mut parsed: Vec<(f32, f32)> = Vec::new(); for el in data { parsed.push((el.0 as f32, el.1 as f32)); } let mut max_x = f32::MIN; let mut max_y = f32::MIN; let mut min_x = f32::MAX; let mut min_y = f32::MAX; for el in &parsed { if el.0 > max_x { max_x = el.0; } if el.1 > max_y { max_y = el.1; } if el.0 < min_x { min_x = el.0; } if el.1 < min_y { min_y = el.1; } } let x = ScaleLinear::new() .set_domain(vec![min_x as f32, max_x]) .set_range(vec![0, width - left - right]); let y = ScaleLinear::new() .set_domain(vec![min_y as f32, max_y]) .set_range(vec![height - top - bottom, 0]); // Create Scatter series view that is going to represent the data. let scatter_view = ScatterView::new() .set_x_scale(&x) .set_y_scale(&y) .set_label_position(PointLabelPosition::E) .set_marker_type(MarkerType::Square) .set_colors(Color::from_vec_of_hex_strings(vec!["#409EFF"])) .set_label_visibility(false) .load_data(&parsed).unwrap(); Chart::new() .set_width(width) .set_height(height) .set_margins(top, right, bottom, left) .add_title(String::from("Datas")) .add_view(&scatter_view) .add_axis_bottom(&x) .add_axis_left(&y) .add_bottom_axis_label(&labels.0) .add_left_axis_label(&labels.1) .save("charts/chart.svg").unwrap(); }
extern crate clap; use clap::{Arg, App}; mod display; mod clock; fn main() { let matches = App::new("fiv") .version("0.1.0") .author("kevin970401 <kevin970401@gmail.com>") .about("fast image Viewer") .arg(Arg::with_name("INPUTS") .help("Path to images to display") .required(false) .index(1) .min_values(0)) .get_matches(); let inputs = matches.values_of("INPUTS"); match inputs { Some(x) => { let paths: Vec<&str> = x.collect(); display::display(&paths); println!("show images"); }, None => { println!("just show clock"); } } }
use std::env; fn main() { let args: Vec<String> = env::args().collect(); match args.len() { 1 => { println!("引数の個数が正しくありません"); } 2 => { let number: i32 = match args[1].parse() { Ok(x) => x, Err(_) => { eprintln!("not number!"); return; } }; println!(".intel_syntax noprefix"); println!(".globl main"); println!("main:"); println!(" mov rax, {0}", number); println!(" ret"); } _ => {} } }
use serde::{Deserialize, Serialize}; use std::time::{Duration, SystemTime}; use crate::protocol::Protocol; const HEADER: u8 = 0xc3; #[derive(Copy, Clone, Serialize, Deserialize, Debug)] pub enum FanStrength { Low = 0x3, Medium = 0x2, High = 0x1, Auto = 0x5, } #[derive(Copy, Clone, Serialize, Deserialize, Debug)] pub enum Mode { Hot = 0x4, Cold = 0x1, Fan = 0x6, Dehydrate = 0x2, Unknown = 0x0, } #[derive(Copy, Clone, Serialize, Deserialize, Debug)] pub struct Electra { pub power: bool, pub mode: Mode, pub fan: FanStrength, pub temp: u8, pub swing_h: bool, pub swing_v: bool, } impl Electra { pub fn new() -> Electra { Electra { power: false, mode: Mode::Cold, fan: FanStrength::Low, temp: 25, swing_h: false, swing_v: false, } } } fn checksum(bytes: [u8; 12]) -> u8 { let crc = bytes.to_vec().into_iter().fold(0u16, |a, b| a + (b as u16)); return (crc % (2u16 << 8)) as u8; } impl Protocol for Electra { fn duration_one(&self) -> Duration { Duration::from_micros(1690) } fn duration_zero(&self) -> Duration { Duration::from_micros(560) } fn duration_separator(&self) -> Duration { Duration::from_micros(560) } fn message_intro(&self) -> Vec<Duration> { vec![9000, 4500] .into_iter() .map(|us| Duration::from_micros(us)) .collect() } fn build_payload(&self) -> Vec<u8> { let power = if self.power { 0x20 } else { 0x00 }; let temperature = (self.temp - 8) & 0x1f; let swing_v = if self.swing_v { 0x0 } else { 0x7 }; let swing_h = if self.swing_h { 0x0 } else { 0x7 }; let fan = self.fan as u8; let mode = self.mode as u8; let now = time::OffsetDateTime::from(SystemTime::now()); let payload: [u8; 12] = [ HEADER, temperature << 3 | swing_v, swing_h << 5 | (now.hour() as u8), now.minute() as u8, fan << 5, 0x00, mode << 5, 0x00, 0x00, power, 0x00, 0x00, // TODO: Should be button pressed. Can't determine if relevant. ]; let checksum_byte = checksum(payload); let mut result = payload.to_vec(); result.push(checksum_byte); return result; } }
pub mod note_archive; pub use note_archive::*; pub mod user; pub mod client; pub mod goal; pub mod collateral; pub mod pronouns; pub mod note_day; pub mod note; pub mod utils; pub mod constants; pub mod blank_enums; pub const USR_FL: &str = "users.txt"; pub const CLT_FL: &str = "clients.txt"; pub const G_FL: &str = "goals.txt"; pub const COL_FL: &str = "collaterals.txt"; pub const GCOL_FL: &str = "general_collaterals.txt"; pub const PRN_FL: &str = "pronouns.txt"; pub const ND_FL: &str = "note_days.txt"; pub const NT_FL: &str = "note_templates.txt"; pub const N_FL: &str = "note.txt"; fn main() { let _enabled = ansi_term::enable_ansi_support(); let filepaths = [ (String::from("user_filepath"), String::from(USR_FL),), (String::from("client_filepath"), String::from(CLT_FL),), (String::from("goal_filepath"), String::from(G_FL),), (String::from("collateral_filepath"), String::from(COL_FL),), (String::from("general_collateral_filepath"), String::from(GCOL_FL),), (String::from("pronouns_filepath"), String::from(PRN_FL),), (String::from("note_day_filepath"), String::from(ND_FL),), (String::from("note_template_filepath"), String::from(NT_FL),), (String::from("note_filepath"), String::from(N_FL),), ].iter().cloned().collect(); let mut a = NoteArchive::new(filepaths); a.run(); }