lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
Rust
pallet/src/mock.rs
maciejnems/aleph-node
24fe79f21530e4436b4bc350ba022af825f3a15f
#![cfg(test)] use super::*; use crate as pallet_aleph; use frame_support::{ construct_runtime, parameter_types, sp_io, traits::{OnFinalize, OnInitialize}, weights::RuntimeDbWeight, }; use primitives::AuthorityId; use sp_core::H256; use sp_runtime::{ impl_opaque_keys, testing::{Header, TestXt, UintAuthorityId}, traits::{ConvertInto, IdentityLookup, OpaqueKeys}, Perbill, }; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<Test>; type Block = frame_system::mocking::MockBlock<Test>; pub(crate) type AccountId = u64; construct_runtime!( pub enum Test where Block = Block, NodeBlock = Block, UncheckedExtrinsic = UncheckedExtrinsic, { System: frame_system::{Pallet, Call, Config, Storage, Event<T>}, Balances: pallet_balances::{Pallet, Call, Storage, Config<T>, Event<T>}, Session: pallet_session::{Pallet, Call, Storage, Event, Config<T>}, Aleph: pallet_aleph::{Pallet, Call, Config<T>, Storage, Event<T>}, Timestamp: pallet_timestamp::{Pallet, Call, Storage, Inherent}, } ); impl_opaque_keys! { pub struct TestSessionKeys { pub aleph: super::Pallet<Test>, } } parameter_types! { pub const BlockHashCount: u64 = 250; pub BlockWeights: frame_system::limits::BlockWeights = frame_system::limits::BlockWeights::simple_max(1024); pub const TestDbWeight: RuntimeDbWeight = RuntimeDbWeight { read: 25, write: 100 }; } impl frame_system::Config for Test { type BaseCallFilter = frame_support::traits::Everything; type BlockWeights = (); type BlockLength = (); type Origin = Origin; type Call = Call; type Index = u64; type BlockNumber = u64; type Hash = H256; type Hashing = sp_runtime::traits::BlakeTwo256; type AccountId = u64; type Lookup = IdentityLookup<Self::AccountId>; type Header = Header; type Event = Event; type BlockHashCount = BlockHashCount; type DbWeight = TestDbWeight; type Version = (); type PalletInfo = PalletInfo; type AccountData = pallet_balances::AccountData<u128>; type OnNewAccount = (); type OnKilledAccount = (); type SystemWeightInfo = (); type SS58Prefix = (); type OnSetCode = (); } parameter_types! { pub const Period: u64 = 1; pub const Offset: u64 = 0; pub const DisabledValidatorsThreshold: Perbill = Perbill::from_percent(17); } parameter_types! { pub const ExistentialDeposit: u128 = 1; } impl pallet_balances::Config for Test { type Balance = u128; type MaxReserves = (); type ReserveIdentifier = [u8; 8]; type DustRemoval = (); type Event = Event; type ExistentialDeposit = ExistentialDeposit; type AccountStore = System; type WeightInfo = (); type MaxLocks = (); } impl pallet_session::Config for Test { type Event = Event; type ValidatorId = u64; type ValidatorIdOf = ConvertInto; type ShouldEndSession = pallet_session::PeriodicSessions<Period, Offset>; type NextSessionRotation = pallet_session::PeriodicSessions<Period, Offset>; type SessionManager = pallet_aleph::AlephSessionManager<Self>; type SessionHandler = <TestSessionKeys as OpaqueKeys>::KeyTypeIdProviders; type Keys = TestSessionKeys; type DisabledValidatorsThreshold = DisabledValidatorsThreshold; type WeightInfo = (); } impl<C> frame_system::offchain::SendTransactionTypes<C> for Test where Call: From<C>, { type Extrinsic = TestXt<Call, ()>; type OverarchingCall = Call; } parameter_types! { pub const MinimumPeriod: u64 = 3; } impl pallet_timestamp::Config for Test { type Moment = u64; type OnTimestampSet = (); type MinimumPeriod = MinimumPeriod; type WeightInfo = (); } parameter_types! {} impl Config for Test { type AuthorityId = AuthorityId; type Event = Event; } pub fn to_authorities(authorities: &[u64]) -> Vec<AuthorityId> { authorities .iter() .map(|id| UintAuthorityId(*id).to_public_key::<AuthorityId>()) .collect() } pub fn new_test_ext(authorities: &[(u64, u64)]) -> sp_io::TestExternalities { let mut t = frame_system::GenesisConfig::default() .build_storage::<Test>() .unwrap(); let balances: Vec<_> = (0..authorities.len()) .map(|i| (i as u64, 10_000_000)) .collect(); pallet_balances::GenesisConfig::<Test> { balances } .assimilate_storage(&mut t) .unwrap(); let session_keys: Vec<_> = authorities .iter() .map(|(id, weight)| (UintAuthorityId(*id).to_public_key::<AuthorityId>(), weight)) .enumerate() .map(|(i, (k, _))| (i as u64, i as u64, TestSessionKeys { aleph: k })) .collect(); pallet_session::GenesisConfig::<Test> { keys: session_keys } .assimilate_storage(&mut t) .unwrap(); t.into() } pub(crate) fn run_session(n: u64) { while System::block_number() < n { Session::on_finalize(System::block_number()); Aleph::on_finalize(System::block_number()); System::on_finalize(System::block_number()); System::initialize( &(System::block_number() + 1), &System::parent_hash(), &Default::default(), Default::default(), ); System::on_initialize(System::block_number()); Session::on_initialize(System::block_number()); Aleph::on_initialize(System::block_number()); } } pub(crate) fn initialize_session() { System::initialize( &1, &System::parent_hash(), &Default::default(), Default::default(), ); System::on_initialize(System::block_number()); Session::on_initialize(System::block_number()); Aleph::on_initialize(System::block_number()); }
#![cfg(test)] use super::*; use crate as pallet_aleph; use frame_support::{ construct_runtime, parameter_types, sp_io, traits::{OnFinalize, OnInitialize}, weights::RuntimeDbWeight, }; use primitives::AuthorityId; use sp_core::H256; use sp_runtime::{ impl_opaque_keys, testing::{Header, TestXt, UintAuthorityId}, traits::{ConvertInto, IdentityLookup, OpaqueKeys}, Perbill, }; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<Test>; type Block = frame_system::mocking::MockBlock<Test>; pub(crate) type AccountId = u64; construct_runtime!( pub enum Test where Block = Block, NodeBlock = Block, UncheckedExtrinsic = UncheckedExtrinsic, { System: frame_system::{Pallet, Call, Config, Storage, Event<T>}, Balances: pallet_balances::{Pallet, Call, Storage, Config<T>, Event<T>}, Session: pallet_session::{Pallet, Call, Storage, Event, Config<T>}, Aleph: pallet_aleph::{Pallet, Call, Config<T>, Storage, Event<T>}, Timestamp: pallet_timestamp::{Pallet, Call, Storage, Inherent}, } ); impl_opaque_keys! { pub struct TestSessionKeys { pub aleph: super::Pallet<Test>, } } parameter_types! { pub const BlockHashCount: u64 = 250; pub BlockWeights: frame_system::limits::BlockWeights = frame_system::limits::BlockWeights::simple_max(1024); pub const TestDbWeight: RuntimeDbWeight = RuntimeDbWeight { read: 25, write: 100 }; } impl frame_system::Config for Test { type BaseCallFilter = frame_support::traits::Everything; type BlockWeights = (); type BlockLength = (); type Origin = Origin; type Call = Call; type Index = u64; type BlockNumber = u64; type Hash = H256; type Hashing = sp_runtime::traits::BlakeTwo256; type AccountId = u64; type Lookup = IdentityLookup<Self::AccountId>; type Header = Header; type Event = Event; type BlockHashCount = BlockHashCount; type DbWeight = TestDbWeight; type Version = (); type PalletInfo = PalletInfo; type AccountData = pallet_balances::AccountData<u128>; type OnNewAccount = (); type OnKilledAccount = (); type SystemWeightInfo = (); type SS58Prefix = (); type OnSetCode = (); } parameter_types! { pub const Period: u64 = 1; pub const Offset: u64 = 0; pub const DisabledValidatorsThreshold: Perbill = Perbill::from_percent(17); } parameter_types! { pub const ExistentialDeposit: u128 = 1; } impl pallet_balances::Config for Test { type Balance = u128; type MaxReserves = (); type ReserveIdentifier = [u8; 8]; type DustRemoval = (); type Event = Event; type ExistentialDeposit = ExistentialDeposit; type AccountStore = System; type WeightInfo = (); type MaxLocks = (); } impl pallet_session::Config for Test { type Event = Event; type ValidatorId = u64; type ValidatorIdOf = ConvertInto; type ShouldEndSession = pallet_session::PeriodicSessions<Period, Offset>; type NextSessionRotation = pallet_session::PeriodicSessions<Period, Offset>; type SessionManager = pallet_aleph::AlephSessionMa
lect() } pub fn new_test_ext(authorities: &[(u64, u64)]) -> sp_io::TestExternalities { let mut t = frame_system::GenesisConfig::default() .build_storage::<Test>() .unwrap(); let balances: Vec<_> = (0..authorities.len()) .map(|i| (i as u64, 10_000_000)) .collect(); pallet_balances::GenesisConfig::<Test> { balances } .assimilate_storage(&mut t) .unwrap(); let session_keys: Vec<_> = authorities .iter() .map(|(id, weight)| (UintAuthorityId(*id).to_public_key::<AuthorityId>(), weight)) .enumerate() .map(|(i, (k, _))| (i as u64, i as u64, TestSessionKeys { aleph: k })) .collect(); pallet_session::GenesisConfig::<Test> { keys: session_keys } .assimilate_storage(&mut t) .unwrap(); t.into() } pub(crate) fn run_session(n: u64) { while System::block_number() < n { Session::on_finalize(System::block_number()); Aleph::on_finalize(System::block_number()); System::on_finalize(System::block_number()); System::initialize( &(System::block_number() + 1), &System::parent_hash(), &Default::default(), Default::default(), ); System::on_initialize(System::block_number()); Session::on_initialize(System::block_number()); Aleph::on_initialize(System::block_number()); } } pub(crate) fn initialize_session() { System::initialize( &1, &System::parent_hash(), &Default::default(), Default::default(), ); System::on_initialize(System::block_number()); Session::on_initialize(System::block_number()); Aleph::on_initialize(System::block_number()); }
nager<Self>; type SessionHandler = <TestSessionKeys as OpaqueKeys>::KeyTypeIdProviders; type Keys = TestSessionKeys; type DisabledValidatorsThreshold = DisabledValidatorsThreshold; type WeightInfo = (); } impl<C> frame_system::offchain::SendTransactionTypes<C> for Test where Call: From<C>, { type Extrinsic = TestXt<Call, ()>; type OverarchingCall = Call; } parameter_types! { pub const MinimumPeriod: u64 = 3; } impl pallet_timestamp::Config for Test { type Moment = u64; type OnTimestampSet = (); type MinimumPeriod = MinimumPeriod; type WeightInfo = (); } parameter_types! {} impl Config for Test { type AuthorityId = AuthorityId; type Event = Event; } pub fn to_authorities(authorities: &[u64]) -> Vec<AuthorityId> { authorities .iter() .map(|id| UintAuthorityId(*id).to_public_key::<AuthorityId>()) .col
random
[ { "content": "pub fn session_id_from_block_num<B: Block>(num: NumberFor<B>, period: SessionPeriod) -> SessionId {\n\n SessionId(num.saturated_into::<u32>() / period.0)\n\n}\n\n\n\npub struct AlephConfig<B: Block, N, C, SC> {\n\n pub network: N,\n\n pub client: Arc<C>,\n\n pub select_chain: SC,\n\n ...
Rust
src/systems/collision.rs
Jazarro/space-menace
8be706d2e993d594557ec3f88d5ac2d331b955d7
use amethyst::{ core::{math::Vector2, Named, Transform}, ecs::{Entities, Join, LazyUpdate, ReadExpect, ReadStorage, System, WriteStorage}, }; use crate::{ components::{ Boundary, Bullet, Collidee, CollideeDetails, Collider, Direction, Directions, Flier, FlierAi, Marine, Motion, Pincer, PincerAi, }, entities::{show_bullet_impact, show_explosion}, resources::{AssetType, Context, PrefabList}, }; pub struct CollisionSystem; impl<'s> System<'s> for CollisionSystem { type SystemData = ( Entities<'s>, ReadStorage<'s, Collider>, WriteStorage<'s, Collidee>, ReadStorage<'s, Boundary>, ReadStorage<'s, Motion>, ReadStorage<'s, Named>, ); fn run(&mut self, data: Self::SystemData) { let (entities, colliders, mut collidees, boundaries, motions, names) = data; for (entity_a, collider_a, collidee, boundary, motion_a) in (&entities, &colliders, &mut collidees, &boundaries, &motions).join() { let velocity_a = motion_a.velocity; let bbox_a = &collider_a.bounding_box; let position_a_x = bbox_a.position.x; let half_size_a_x = bbox_a.half_size.x; let correction; if velocity_a.x != 0. || velocity_a.y != 0. && collider_a.is_collidable { for (entity_b, collider_b, motion_b, name_b) in (&entities, &colliders, &motions, &names).join() { let velocity_b = motion_b.velocity; let use_hit_box = (velocity_a.x * velocity_b.x != 0.) || (velocity_a.y * velocity_b.y != 0.); if entity_a != entity_b && collider_a.is_overlapping_with(collider_b, use_hit_box) { collidee.set_collidee_details( name_b.name.to_string(), collider_a, collider_b, velocity_a, velocity_b, use_hit_box, ); } } } correction = if (position_a_x - half_size_a_x) <= boundary.left { (position_a_x - half_size_a_x) - boundary.left } else if (position_a_x + half_size_a_x) >= boundary.right { (position_a_x + half_size_a_x) - boundary.right } else { 0. }; if correction != 0. { collidee.horizontal = Some(CollideeDetails { name: String::from("Boundary"), position: Vector2::new(0., 0.), half_size: Vector2::new(0., 0.), correction, }); } } } } pub struct PincerCollisionSystem; impl<'s> System<'s> for PincerCollisionSystem { type SystemData = ( Entities<'s>, ReadStorage<'s, Marine>, WriteStorage<'s, Pincer>, ReadStorage<'s, Collidee>, WriteStorage<'s, Direction>, WriteStorage<'s, Motion>, ReadExpect<'s, PrefabList>, ReadStorage<'s, Transform>, ReadExpect<'s, LazyUpdate>, ReadExpect<'s, Context>, ); fn run(&mut self, data: Self::SystemData) { let ( entities, marines, mut pincers, collidees, mut dirs, mut motions, prefab_list, transforms, lazy_update, ctx, ) = data; let marine_opt = (&entities, &marines) .join() .map(|(entity, _)| entity) .next(); for (entity, pincer, collidee, dir, motion, transform) in ( &*entities, &mut pincers, &collidees, &mut dirs, &mut motions, &transforms, ) .join() { if let Some(collidee_horizontal) = &collidee.horizontal { match collidee_horizontal.name.as_ref() { "Boundary" => { pincer.ai = PincerAi::Patrolling; motion.velocity.x *= -1.; dir.set_x_velocity(motion.velocity.x); } "Bullet" => { if let Some(marine) = marine_opt { pincer.ai = PincerAi::Attacking { target: marine }; } pincer.hit_count += 1; if pincer.hit_count == 4 { let small_explosion_prefab_handle = { prefab_list.get(AssetType::SmallExplosion).unwrap().clone() }; let pincer_translation = transform.translation(); show_explosion( &entities, small_explosion_prefab_handle, pincer_translation.x, pincer_translation.y, &lazy_update, &ctx, ); let _ = entities.delete(entity); } } _ => {} } } } } } pub struct FlierCollisionSystem; impl<'s> System<'s> for FlierCollisionSystem { type SystemData = ( Entities<'s>, ReadStorage<'s, Marine>, WriteStorage<'s, Flier>, ReadStorage<'s, Collidee>, WriteStorage<'s, Direction>, WriteStorage<'s, Motion>, ReadExpect<'s, PrefabList>, ReadStorage<'s, Transform>, ReadExpect<'s, LazyUpdate>, ReadExpect<'s, Context>, ); fn run(&mut self, data: Self::SystemData) { let ( entities, marines, mut fliers, collidees, mut dirs, mut motions, prefab_list, transforms, lazy_update, ctx, ) = data; let marine_opt = (&entities, &marines) .join() .map(|(entity, _)| entity) .next(); for (entity, flier, collidee, dir, motion, transform) in ( &*entities, &mut fliers, &collidees, &mut dirs, &mut motions, &transforms, ) .join() { if let Some(collidee_horizontal) = &collidee.horizontal { match collidee_horizontal.name.as_ref() { "Boundary" => { flier.ai = FlierAi::Patrolling; motion.velocity.x *= -1.; dir.set_x_velocity(motion.velocity.x); } "Bullet" => { if let Some(marine) = marine_opt { flier.ai = FlierAi::Attacking { target: marine }; } flier.hit_count += 1; if flier.hit_count == 6 { let small_explosion_prefab_handle = { prefab_list.get(AssetType::SmallExplosion).unwrap().clone() }; let flier_translation = transform.translation(); show_explosion( &entities, small_explosion_prefab_handle, flier_translation.x, flier_translation.y, &lazy_update, &ctx, ); let _ = entities.delete(entity); } } _ => {} } } } } } pub struct BulletCollisionSystem; impl<'s> System<'s> for BulletCollisionSystem { type SystemData = ( Entities<'s>, ReadStorage<'s, Bullet>, ReadStorage<'s, Collider>, ReadStorage<'s, Collidee>, WriteStorage<'s, Direction>, WriteStorage<'s, Motion>, ReadExpect<'s, PrefabList>, ReadExpect<'s, LazyUpdate>, ReadExpect<'s, Context>, ); fn run(&mut self, data: Self::SystemData) { let ( entities, bullets, colliders, collidees, mut dirs, mut motions, prefab_list, lazy_update, ctx, ) = data; for (entity, _, collider, collidee, dir, motion) in ( &*entities, &bullets, &colliders, &collidees, &mut dirs, &mut motions, ) .join() { if let Some(collidee_horizontal) = &collidee.horizontal { match collidee_horizontal.name.as_ref() { "Boundary" => {} _ => { let bullet_impact_prefab_handle = { prefab_list.get(AssetType::BulletImpact).unwrap().clone() }; let impact_position_x = match dir.x { Directions::Right => { collidee_horizontal.position.x - collidee_horizontal.half_size.x } Directions::Left => { collidee_horizontal.position.x + collidee_horizontal.half_size.x } _ => 0., }; show_bullet_impact( &entities, bullet_impact_prefab_handle, impact_position_x, collider.bounding_box.position.y, motion.velocity.x, &lazy_update, &ctx, ); } } let _ = entities.delete(entity); } } } } pub struct MarineCollisionSystem; impl<'s> System<'s> for MarineCollisionSystem { type SystemData = ( ReadStorage<'s, Marine>, WriteStorage<'s, Collider>, ReadStorage<'s, Collidee>, ); fn run(&mut self, data: Self::SystemData) { let (marines, mut colliders, collidees) = data; for (_, collider, collidee) in (&marines, &mut colliders, &collidees).join() { if let Some(collidee_horizontal) = &collidee.horizontal { if let "Pincer" = collidee_horizontal.name.as_ref() { collider.is_collidable = false; } if let "Flier" = collidee_horizontal.name.as_ref() { collider.is_collidable = false; } } } } }
use amethyst::{ core::{math::Vector2, Named, Transform}, ecs::{Entities, Join, LazyUpdate, ReadExpect, ReadStorage, System, WriteStorage}, }; use crate::{ components::{ Boundary, Bullet, Collidee, CollideeDetails, Collider, Direction, Directions, Flier, FlierAi, Marine, Motion, Pincer, PincerAi, }, entities::{show_bullet_impact, show_explosion}, resources::{AssetType, Context, PrefabList}, }; pub struct CollisionSystem; impl<'s> System<'s> for CollisionSystem {
ider_a.is_collidable { for (entity_b, collider_b, motion_b, name_b) in (&entities, &colliders, &motions, &names).join() { let velocity_b = motion_b.velocity; let use_hit_box = (velocity_a.x * velocity_b.x != 0.) || (velocity_a.y * velocity_b.y != 0.); if entity_a != entity_b && collider_a.is_overlapping_with(collider_b, use_hit_box) { collidee.set_collidee_details( name_b.name.to_string(), collider_a, collider_b, velocity_a, velocity_b, use_hit_box, ); } } } correction = if (position_a_x - half_size_a_x) <= boundary.left { (position_a_x - half_size_a_x) - boundary.left } else if (position_a_x + half_size_a_x) >= boundary.right { (position_a_x + half_size_a_x) - boundary.right } else { 0. }; if correction != 0. { collidee.horizontal = Some(CollideeDetails { name: String::from("Boundary"), position: Vector2::new(0., 0.), half_size: Vector2::new(0., 0.), correction, }); } } } } pub struct PincerCollisionSystem; impl<'s> System<'s> for PincerCollisionSystem { type SystemData = ( Entities<'s>, ReadStorage<'s, Marine>, WriteStorage<'s, Pincer>, ReadStorage<'s, Collidee>, WriteStorage<'s, Direction>, WriteStorage<'s, Motion>, ReadExpect<'s, PrefabList>, ReadStorage<'s, Transform>, ReadExpect<'s, LazyUpdate>, ReadExpect<'s, Context>, ); fn run(&mut self, data: Self::SystemData) { let ( entities, marines, mut pincers, collidees, mut dirs, mut motions, prefab_list, transforms, lazy_update, ctx, ) = data; let marine_opt = (&entities, &marines) .join() .map(|(entity, _)| entity) .next(); for (entity, pincer, collidee, dir, motion, transform) in ( &*entities, &mut pincers, &collidees, &mut dirs, &mut motions, &transforms, ) .join() { if let Some(collidee_horizontal) = &collidee.horizontal { match collidee_horizontal.name.as_ref() { "Boundary" => { pincer.ai = PincerAi::Patrolling; motion.velocity.x *= -1.; dir.set_x_velocity(motion.velocity.x); } "Bullet" => { if let Some(marine) = marine_opt { pincer.ai = PincerAi::Attacking { target: marine }; } pincer.hit_count += 1; if pincer.hit_count == 4 { let small_explosion_prefab_handle = { prefab_list.get(AssetType::SmallExplosion).unwrap().clone() }; let pincer_translation = transform.translation(); show_explosion( &entities, small_explosion_prefab_handle, pincer_translation.x, pincer_translation.y, &lazy_update, &ctx, ); let _ = entities.delete(entity); } } _ => {} } } } } } pub struct FlierCollisionSystem; impl<'s> System<'s> for FlierCollisionSystem { type SystemData = ( Entities<'s>, ReadStorage<'s, Marine>, WriteStorage<'s, Flier>, ReadStorage<'s, Collidee>, WriteStorage<'s, Direction>, WriteStorage<'s, Motion>, ReadExpect<'s, PrefabList>, ReadStorage<'s, Transform>, ReadExpect<'s, LazyUpdate>, ReadExpect<'s, Context>, ); fn run(&mut self, data: Self::SystemData) { let ( entities, marines, mut fliers, collidees, mut dirs, mut motions, prefab_list, transforms, lazy_update, ctx, ) = data; let marine_opt = (&entities, &marines) .join() .map(|(entity, _)| entity) .next(); for (entity, flier, collidee, dir, motion, transform) in ( &*entities, &mut fliers, &collidees, &mut dirs, &mut motions, &transforms, ) .join() { if let Some(collidee_horizontal) = &collidee.horizontal { match collidee_horizontal.name.as_ref() { "Boundary" => { flier.ai = FlierAi::Patrolling; motion.velocity.x *= -1.; dir.set_x_velocity(motion.velocity.x); } "Bullet" => { if let Some(marine) = marine_opt { flier.ai = FlierAi::Attacking { target: marine }; } flier.hit_count += 1; if flier.hit_count == 6 { let small_explosion_prefab_handle = { prefab_list.get(AssetType::SmallExplosion).unwrap().clone() }; let flier_translation = transform.translation(); show_explosion( &entities, small_explosion_prefab_handle, flier_translation.x, flier_translation.y, &lazy_update, &ctx, ); let _ = entities.delete(entity); } } _ => {} } } } } } pub struct BulletCollisionSystem; impl<'s> System<'s> for BulletCollisionSystem { type SystemData = ( Entities<'s>, ReadStorage<'s, Bullet>, ReadStorage<'s, Collider>, ReadStorage<'s, Collidee>, WriteStorage<'s, Direction>, WriteStorage<'s, Motion>, ReadExpect<'s, PrefabList>, ReadExpect<'s, LazyUpdate>, ReadExpect<'s, Context>, ); fn run(&mut self, data: Self::SystemData) { let ( entities, bullets, colliders, collidees, mut dirs, mut motions, prefab_list, lazy_update, ctx, ) = data; for (entity, _, collider, collidee, dir, motion) in ( &*entities, &bullets, &colliders, &collidees, &mut dirs, &mut motions, ) .join() { if let Some(collidee_horizontal) = &collidee.horizontal { match collidee_horizontal.name.as_ref() { "Boundary" => {} _ => { let bullet_impact_prefab_handle = { prefab_list.get(AssetType::BulletImpact).unwrap().clone() }; let impact_position_x = match dir.x { Directions::Right => { collidee_horizontal.position.x - collidee_horizontal.half_size.x } Directions::Left => { collidee_horizontal.position.x + collidee_horizontal.half_size.x } _ => 0., }; show_bullet_impact( &entities, bullet_impact_prefab_handle, impact_position_x, collider.bounding_box.position.y, motion.velocity.x, &lazy_update, &ctx, ); } } let _ = entities.delete(entity); } } } } pub struct MarineCollisionSystem; impl<'s> System<'s> for MarineCollisionSystem { type SystemData = ( ReadStorage<'s, Marine>, WriteStorage<'s, Collider>, ReadStorage<'s, Collidee>, ); fn run(&mut self, data: Self::SystemData) { let (marines, mut colliders, collidees) = data; for (_, collider, collidee) in (&marines, &mut colliders, &collidees).join() { if let Some(collidee_horizontal) = &collidee.horizontal { if let "Pincer" = collidee_horizontal.name.as_ref() { collider.is_collidable = false; } if let "Flier" = collidee_horizontal.name.as_ref() { collider.is_collidable = false; } } } } }
type SystemData = ( Entities<'s>, ReadStorage<'s, Collider>, WriteStorage<'s, Collidee>, ReadStorage<'s, Boundary>, ReadStorage<'s, Motion>, ReadStorage<'s, Named>, ); fn run(&mut self, data: Self::SystemData) { let (entities, colliders, mut collidees, boundaries, motions, names) = data; for (entity_a, collider_a, collidee, boundary, motion_a) in (&entities, &colliders, &mut collidees, &boundaries, &motions).join() { let velocity_a = motion_a.velocity; let bbox_a = &collider_a.bounding_box; let position_a_x = bbox_a.position.x; let half_size_a_x = bbox_a.half_size.x; let correction; if velocity_a.x != 0. || velocity_a.y != 0. && coll
random
[ { "content": "pub fn load_marine(world: &mut World, prefab: Handle<Prefab<AnimationPrefabData>>, ctx: &Context) {\n\n let scale = ctx.scale;\n\n let mut transform = Transform::default();\n\n transform.set_scale(Vector3::new(scale, scale, scale));\n\n transform.set_translation_x(384.);\n\n transfo...
Rust
day_12/src/main.rs
kimpers/advent-of-code-2020
50750317b691717e211c03c67b99708edccddfc3
use shared; #[derive(Debug)] struct NavigatorV1<'a> { facing_direction: &'a str, north: usize, west: usize, east: usize, south: usize, } impl NavigatorV1<'_> { fn new(facing_direction: &str) -> NavigatorV1 { NavigatorV1 { facing_direction, north: 0, west: 0, east: 0, south: 0, } } fn rotate(&mut self, direction: &str, deg: usize) { let num_shifts = deg / 90; let all_directions = ["N", "E", "S", "W"]; let mut current_idx = all_directions .iter() .position(|&d| d == self.facing_direction) .unwrap(); for _i in 0..num_shifts { match direction { "R" => { current_idx = (current_idx + 1) % all_directions.len(); } "L" => { if current_idx as isize - 1 >= 0 { current_idx -= 1; } else { current_idx = all_directions.len() - 1; } } _ => panic!("Unknown direction {}", direction), } } self.facing_direction = all_directions[current_idx]; } pub fn nav(&mut self, action: &str) { let cmd = &action[0..1]; let distance = action[1..].parse::<usize>().unwrap(); match cmd { "N" => { let new_south = self.south as isize - distance as isize; if new_south > 0 { self.south = new_south as usize; } else { self.north += distance - self.south; self.south = 0; } } "S" => { let new_north = self.north as isize - distance as isize; if new_north > 0 { self.north = new_north as usize; } else { self.south += distance - self.north; self.north = 0; } } "E" => { let new_west = self.west as isize - distance as isize; if new_west > 0 { self.west = new_west as usize; } else { self.east += distance - self.west; self.west = 0; } } "W" => { let new_east = self.east as isize - distance as isize; if new_east > 0 { self.east = new_east as usize; } else { self.west += distance - self.east; self.east = 0; } } "L" | "R" => self.rotate(cmd, distance), "F" => { let new_action = format!("{}{}", self.facing_direction, distance); self.nav(&new_action); } _ => panic!("Unknown command {}", cmd), } } pub fn manhattan_distance(&self) -> usize { self.north + self.west + self.east + self.south } } #[derive(Debug)] struct Point { pub north: usize, pub west: usize, pub east: usize, pub south: usize, } impl Point { pub fn set_direction(&mut self, direction: &str, value: usize) { match direction { "N" => self.north = value, "W" => self.west = value, "E" => self.east = value, "S" => self.south = value, _ => panic!("Unknown direction {}", direction), } } pub fn get_direction(&self, direction: &str) -> usize { match direction { "N" => self.north, "W" => self.west, "E" => self.east, "S" => self.south, _ => panic!("Unknown direction {}", direction), } } } #[derive(Debug)] struct NavigatorV2<'a> { facing_direction: &'a str, ship: Point, waypoint: Point, } impl NavigatorV2<'_> { fn new(facing_direction: &str) -> NavigatorV2 { let ship = Point { north: 0, west: 0, east: 0, south: 0, }; let waypoint = Point { north: 1, west: 0, east: 10, south: 0, }; NavigatorV2 { facing_direction, ship, waypoint, } } fn rotate(&mut self, direction: &str, deg: usize) { let num_shifts = deg / 90; let all_directions = ["N", "E", "S", "W"]; let mut new_waypoint = Point { north: 0, west: 0, east: 0, south: 0, }; for curr_dir in all_directions.iter() { let mut current_idx = all_directions.iter().position(|&d| d == *curr_dir).unwrap(); for _i in 0..num_shifts { match direction { "R" => { current_idx = (current_idx + 1) % all_directions.len(); } "L" => { if current_idx as isize - 1 >= 0 { current_idx -= 1; } else { current_idx = all_directions.len() - 1; } } _ => panic!("Unknown rotation {}", direction), } } let curr_value = self.waypoint.get_direction(curr_dir); let new_direction = all_directions[current_idx]; new_waypoint.set_direction(new_direction, curr_value); } self.waypoint = new_waypoint; } pub fn nav(&mut self, action: &str) { let cmd = &action[0..1]; let amount = action[1..].parse::<usize>().unwrap(); match cmd { "N" => { let new_south = self.waypoint.south as isize - amount as isize; if new_south > 0 { self.waypoint.south = new_south as usize; } else { self.waypoint.north += amount - self.waypoint.south; self.waypoint.south = 0; } } "S" => { let new_north = self.waypoint.north as isize - amount as isize; if new_north > 0 { self.waypoint.north = new_north as usize; } else { self.waypoint.south += amount - self.waypoint.north; self.waypoint.north = 0; } } "E" => { let new_west = self.waypoint.west as isize - amount as isize; if new_west > 0 { self.waypoint.west = new_west as usize; } else { self.waypoint.east += amount - self.waypoint.west; self.waypoint.west = 0; } } "W" => { let new_east = self.waypoint.east as isize - amount as isize; if new_east > 0 { self.waypoint.east = new_east as usize; } else { self.waypoint.west += amount - self.waypoint.east; self.waypoint.east = 0; } } "L" | "R" => self.rotate(cmd, amount), "F" => { self.ship.north = self.ship.north + amount * self.waypoint.north; self.ship.south = self.ship.south + amount * self.waypoint.south; if self.ship.north >= self.ship.south { self.ship.north = self.ship.north - self.ship.south; self.ship.south = 0; } else { self.ship.south = self.ship.south - self.ship.north; self.ship.north = 0; } self.ship.east = self.ship.east + amount * self.waypoint.east; self.ship.west = self.ship.west + amount * self.waypoint.west; if self.ship.east >= self.ship.west { self.ship.east = self.ship.east - self.ship.west; self.ship.west = 0; } else { self.ship.west = self.ship.west - self.ship.east; self.ship.east = 0; } } _ => panic!("Unknown command {}", cmd), } } pub fn manhattan_distance(&self) -> usize { self.ship.north + self.ship.west + self.ship.east + self.ship.south } } fn main() { let actions = shared::read_file("input.txt"); let mut navigator = NavigatorV1::new("E"); for action in actions { navigator.nav(&action); } println!( "Manhattan distance between location and starting position using method 1 is {}", navigator.manhattan_distance() ); let actions = shared::read_file("input.txt"); let mut navigator2 = NavigatorV2::new("E"); for action in actions { navigator2.nav(&action); } println!( "Manhattan distance between location and starting position using method 2 is {}", navigator2.manhattan_distance() ); } #[cfg(test)] mod tests { use super::*; #[test] fn it_calculates_manhattan_distance() { let actions = shared::parse_input_to_string_vec( " F10 N3 F7 R90 F11", ); let mut navigator = NavigatorV1::new("E"); for action in actions { navigator.nav(&action); } assert_eq!(navigator.manhattan_distance(), 25); } #[test] fn it_calculates_v2_manhattan_distance() { let actions = shared::parse_input_to_string_vec( " F10 N3 F7 R90 F11", ); let mut navigator = NavigatorV2::new("E"); for action in actions { navigator.nav(&action); } assert_eq!(navigator.manhattan_distance(), 286); } }
use shared; #[derive(Debug)] struct NavigatorV1<'a> { facing_direction: &'a str, north: usize, west: usize, east: usize, south: usize, } impl NavigatorV1<'_> { fn new(facing_direction: &str) -> NavigatorV1 { NavigatorV1 { facing_direction, north: 0, west: 0, east: 0, south: 0, } } fn rotate(&mut self, direction: &str, deg: usize) { let num_shifts = deg / 90; let all_directions = ["N", "E", "S", "W"]; let mut current_idx = all_directions .iter() .position(|&d| d == self.facing_direction) .unwrap(); for _i in 0..num_shifts { match direction { "R" => { current_idx = (current_idx + 1) % all_directions.len(); } "L" => { if current_idx as isize - 1 >= 0 { current_idx -= 1; } else { current_idx = all_directions.len() - 1; } } _ => panic!("Unknown direction {}", direction), } } self.facing_direction = all_directions[current_idx]; } pub fn nav(&mut self, action: &str) { let cmd = &action[0..1]; let distance = action[1..].parse::<usize>().unwrap(); match cmd { "N" => { let new_south = self.south as isize - distance as isize; if new_south > 0 { self.south = new_south as usize;
println!( "Manhattan distance between location and starting position using method 2 is {}", navigator2.manhattan_distance() ); } #[cfg(test)] mod tests { use super::*; #[test] fn it_calculates_manhattan_distance() { let actions = shared::parse_input_to_string_vec( " F10 N3 F7 R90 F11", ); let mut navigator = NavigatorV1::new("E"); for action in actions { navigator.nav(&action); } assert_eq!(navigator.manhattan_distance(), 25); } #[test] fn it_calculates_v2_manhattan_distance() { let actions = shared::parse_input_to_string_vec( " F10 N3 F7 R90 F11", ); let mut navigator = NavigatorV2::new("E"); for action in actions { navigator.nav(&action); } assert_eq!(navigator.manhattan_distance(), 286); } }
} else { self.north += distance - self.south; self.south = 0; } } "S" => { let new_north = self.north as isize - distance as isize; if new_north > 0 { self.north = new_north as usize; } else { self.south += distance - self.north; self.north = 0; } } "E" => { let new_west = self.west as isize - distance as isize; if new_west > 0 { self.west = new_west as usize; } else { self.east += distance - self.west; self.west = 0; } } "W" => { let new_east = self.east as isize - distance as isize; if new_east > 0 { self.east = new_east as usize; } else { self.west += distance - self.east; self.east = 0; } } "L" | "R" => self.rotate(cmd, distance), "F" => { let new_action = format!("{}{}", self.facing_direction, distance); self.nav(&new_action); } _ => panic!("Unknown command {}", cmd), } } pub fn manhattan_distance(&self) -> usize { self.north + self.west + self.east + self.south } } #[derive(Debug)] struct Point { pub north: usize, pub west: usize, pub east: usize, pub south: usize, } impl Point { pub fn set_direction(&mut self, direction: &str, value: usize) { match direction { "N" => self.north = value, "W" => self.west = value, "E" => self.east = value, "S" => self.south = value, _ => panic!("Unknown direction {}", direction), } } pub fn get_direction(&self, direction: &str) -> usize { match direction { "N" => self.north, "W" => self.west, "E" => self.east, "S" => self.south, _ => panic!("Unknown direction {}", direction), } } } #[derive(Debug)] struct NavigatorV2<'a> { facing_direction: &'a str, ship: Point, waypoint: Point, } impl NavigatorV2<'_> { fn new(facing_direction: &str) -> NavigatorV2 { let ship = Point { north: 0, west: 0, east: 0, south: 0, }; let waypoint = Point { north: 1, west: 0, east: 10, south: 0, }; NavigatorV2 { facing_direction, ship, waypoint, } } fn rotate(&mut self, direction: &str, deg: usize) { let num_shifts = deg / 90; let all_directions = ["N", "E", "S", "W"]; let mut new_waypoint = Point { north: 0, west: 0, east: 0, south: 0, }; for curr_dir in all_directions.iter() { let mut current_idx = all_directions.iter().position(|&d| d == *curr_dir).unwrap(); for _i in 0..num_shifts { match direction { "R" => { current_idx = (current_idx + 1) % all_directions.len(); } "L" => { if current_idx as isize - 1 >= 0 { current_idx -= 1; } else { current_idx = all_directions.len() - 1; } } _ => panic!("Unknown rotation {}", direction), } } let curr_value = self.waypoint.get_direction(curr_dir); let new_direction = all_directions[current_idx]; new_waypoint.set_direction(new_direction, curr_value); } self.waypoint = new_waypoint; } pub fn nav(&mut self, action: &str) { let cmd = &action[0..1]; let amount = action[1..].parse::<usize>().unwrap(); match cmd { "N" => { let new_south = self.waypoint.south as isize - amount as isize; if new_south > 0 { self.waypoint.south = new_south as usize; } else { self.waypoint.north += amount - self.waypoint.south; self.waypoint.south = 0; } } "S" => { let new_north = self.waypoint.north as isize - amount as isize; if new_north > 0 { self.waypoint.north = new_north as usize; } else { self.waypoint.south += amount - self.waypoint.north; self.waypoint.north = 0; } } "E" => { let new_west = self.waypoint.west as isize - amount as isize; if new_west > 0 { self.waypoint.west = new_west as usize; } else { self.waypoint.east += amount - self.waypoint.west; self.waypoint.west = 0; } } "W" => { let new_east = self.waypoint.east as isize - amount as isize; if new_east > 0 { self.waypoint.east = new_east as usize; } else { self.waypoint.west += amount - self.waypoint.east; self.waypoint.east = 0; } } "L" | "R" => self.rotate(cmd, amount), "F" => { self.ship.north = self.ship.north + amount * self.waypoint.north; self.ship.south = self.ship.south + amount * self.waypoint.south; if self.ship.north >= self.ship.south { self.ship.north = self.ship.north - self.ship.south; self.ship.south = 0; } else { self.ship.south = self.ship.south - self.ship.north; self.ship.north = 0; } self.ship.east = self.ship.east + amount * self.waypoint.east; self.ship.west = self.ship.west + amount * self.waypoint.west; if self.ship.east >= self.ship.west { self.ship.east = self.ship.east - self.ship.west; self.ship.west = 0; } else { self.ship.west = self.ship.west - self.ship.east; self.ship.east = 0; } } _ => panic!("Unknown command {}", cmd), } } pub fn manhattan_distance(&self) -> usize { self.ship.north + self.ship.west + self.ship.east + self.ship.south } } fn main() { let actions = shared::read_file("input.txt"); let mut navigator = NavigatorV1::new("E"); for action in actions { navigator.nav(&action); } println!( "Manhattan distance between location and starting position using method 1 is {}", navigator.manhattan_distance() ); let actions = shared::read_file("input.txt"); let mut navigator2 = NavigatorV2::new("E"); for action in actions { navigator2.nav(&action); }
random
[ { "content": "pub fn read_file(filename: &str) -> Vec<String> {\n\n let mut file = File::open(filename).unwrap();\n\n let mut contents = String::new();\n\n\n\n file.read_to_string(&mut contents).unwrap();\n\n let lines = contents\n\n .lines()\n\n .map(|l| l.to_string())\n\n .col...
Rust
truck-meshalgo/src/analyzers/topology.rs
roudy16/truck
b92af7761302a5d16b5e52b15f1be7b6c4dfd460
use super::*; use rustc_hash::{FxHashMap as HashMap, FxHashSet as HashSet}; use truck_topology::shell::ShellCondition; pub trait Topology { fn extract_boundaries(&self) -> Vec<Vec<usize>>; fn shell_condition(&self) -> ShellCondition; } #[derive(Clone, Debug)] struct Boundaries { checked: HashSet<[usize; 2]>, boundary: HashMap<[usize; 2], bool>, condition: ShellCondition, } impl Boundaries { #[inline(always)] fn new() -> Self { Boundaries { checked: Default::default(), boundary: Default::default(), condition: ShellCondition::Oriented, } } #[inline(always)] fn insert(&mut self, edge: [Vertex; 2]) { let ori = edge[0].pos < edge[1].pos; let edge = match ori { true => [edge[0].pos, edge[1].pos], false => [edge[1].pos, edge[0].pos], }; self.condition = self.condition & match (self.checked.insert(edge), self.boundary.insert(edge, ori)) { (true, None) => ShellCondition::Oriented, (false, None) => ShellCondition::Irregular, (true, Some(_)) => panic!("unexpected case!"), (false, Some(ori0)) => { self.boundary.remove(&edge); match ori == ori0 { true => ShellCondition::Regular, false => ShellCondition::Oriented, } } }; } #[inline(always)] fn condition(&self) -> ShellCondition { if self.condition == ShellCondition::Oriented && self.boundary.is_empty() { ShellCondition::Closed } else { self.condition } } } impl FromIterator<[Vertex; 2]> for Boundaries { fn from_iter<I: IntoIterator<Item = [Vertex; 2]>>(iter: I) -> Boundaries { let mut boundaries = Boundaries::new(); iter.into_iter().for_each(|edge| boundaries.insert(edge)); boundaries } } impl Topology for Faces { fn extract_boundaries(&self) -> Vec<Vec<usize>> { let mut vemap: HashMap<usize, usize> = self .face_iter() .flat_map(move |face| { let len = face.len(); (0..len).map(move |i| [face[i], face[(i + 1) % len]]) }) .collect::<Boundaries>() .boundary .into_iter() .map(|(edge, ori)| match ori { true => (edge[0], edge[1]), false => (edge[1], edge[0]), }) .collect(); let mut res = Vec::new(); while !vemap.is_empty() { let mut wire = Vec::new(); let front = vemap.iter().next().unwrap(); let front = (*front.0, *front.1); vemap.remove(&front.0); wire.push(front.0); let mut cursor = front.1; while cursor != front.0 { wire.push(cursor); cursor = vemap.remove(&cursor).unwrap_or(front.0); } res.push(wire); } res } fn shell_condition(&self) -> ShellCondition { let boundaries: Boundaries = self .face_iter() .flat_map(move |face| { let len = face.len(); (0..len).map(move |i| [face[i], face[(i + 1) % len]]) }) .collect(); boundaries.condition() } } impl Topology for PolygonMesh { fn extract_boundaries(&self) -> Vec<Vec<usize>> { self.faces().extract_boundaries() } fn shell_condition(&self) -> ShellCondition { self.faces().shell_condition() } }
use super::*; use rustc_hash::{FxHashMap as HashMap, FxHashSet as HashSet}; use truck_topology::shell::ShellCondition; pub trait Topology { fn extract_boundaries(&self) -> Vec<Vec<usize>>; fn shell_condition(&self) -> ShellCondition; } #[derive(Clone, Debug)] struct Boundaries { checked: HashSet<[usize; 2]>, boundary: HashMap<[usize; 2], bool>, condition: ShellCondition, } impl Boundaries { #[inline(always)] fn new() -> Self { Boundaries { checked: Default::default(), boundary: Default::default(), condition: ShellCondition::Oriented, } } #[inline(always)] fn insert(&mut self, edge: [Vertex; 2]) { let ori = edge[0].pos < edge[1].pos; let edge = match ori { true => [edge[0].pos, edge[1].pos], false => [edge[1].pos, edge[0].pos], }; self.condition = self.condition &
; } #[inline(always)] fn condition(&self) -> ShellCondition { if self.condition == ShellCondition::Oriented && self.boundary.is_empty() { ShellCondition::Closed } else { self.condition } } } impl FromIterator<[Vertex; 2]> for Boundaries { fn from_iter<I: IntoIterator<Item = [Vertex; 2]>>(iter: I) -> Boundaries { let mut boundaries = Boundaries::new(); iter.into_iter().for_each(|edge| boundaries.insert(edge)); boundaries } } impl Topology for Faces { fn extract_boundaries(&self) -> Vec<Vec<usize>> { let mut vemap: HashMap<usize, usize> = self .face_iter() .flat_map(move |face| { let len = face.len(); (0..len).map(move |i| [face[i], face[(i + 1) % len]]) }) .collect::<Boundaries>() .boundary .into_iter() .map(|(edge, ori)| match ori { true => (edge[0], edge[1]), false => (edge[1], edge[0]), }) .collect(); let mut res = Vec::new(); while !vemap.is_empty() { let mut wire = Vec::new(); let front = vemap.iter().next().unwrap(); let front = (*front.0, *front.1); vemap.remove(&front.0); wire.push(front.0); let mut cursor = front.1; while cursor != front.0 { wire.push(cursor); cursor = vemap.remove(&cursor).unwrap_or(front.0); } res.push(wire); } res } fn shell_condition(&self) -> ShellCondition { let boundaries: Boundaries = self .face_iter() .flat_map(move |face| { let len = face.len(); (0..len).map(move |i| [face[i], face[(i + 1) % len]]) }) .collect(); boundaries.condition() } } impl Topology for PolygonMesh { fn extract_boundaries(&self) -> Vec<Vec<usize>> { self.faces().extract_boundaries() } fn shell_condition(&self) -> ShellCondition { self.faces().shell_condition() } }
match (self.checked.insert(edge), self.boundary.insert(edge, ori)) { (true, None) => ShellCondition::Oriented, (false, None) => ShellCondition::Irregular, (true, Some(_)) => panic!("unexpected case!"), (false, Some(ori0)) => { self.boundary.remove(&edge); match ori == ori0 { true => ShellCondition::Regular, false => ShellCondition::Oriented, } } }
if_condition
[ { "content": "#[inline(always)]\n\npub fn vertex(pt: Point3) -> Vertex { Vertex::new(pt) }\n\n\n\n/// Returns a line from `vertex0` to `vertex1`.\n\n/// # Examples\n\n/// ```\n\n/// use truck_modeling::*;\n\n///\n\n/// // draw a line\n\n/// let vertex0 = builder::vertex(Point3::new(1.0, 2.0, 3.0));\n\n/// let v...
Rust
qapro-rs/src/qaruntime/monitor.rs
B34nK0/QUANTAXIS
94162f0f863682e443ef8ae11f5b54da6f93421b
extern crate redis; use actix::prelude::*; use actix::{Actor, Addr, AsyncContext, Context, Handler, Recipient, Supervised}; use chrono::{Local, TimeZone}; use log::{error, info, warn}; use redis::Commands; use serde_json::Value; use std::fmt::Debug; use std::time::Duration; use uuid::Version::Mac; use crate::qaaccount::account::QA_Account; use crate::qaaccount::marketpreset::MarketPreset; use crate::qaaccount::order::QAOrder; use crate::qaconnector::mongo::mongoclient::QAMongoClient; use crate::qadata::resample::{resample_db, QARealtimeResampler}; use crate::qaenv::localenv::CONFIG; use crate::qaprotocol::mifi::qafastkline::QAKlineBase; use crate::qaprotocol::qifi::account::QIFI; use crate::qaruntime::base::{Ack, AddMonitor, Instruct, Order, QAKline, QAOrderRsp, QifiRsp}; use crate::qaruntime::qacontext::{QAContext, StrategyFunc}; use crate::qaruntime::qamanagers::monitor_manager::MonitorManager; use crate::qautil::tradedate::QATradeDate; enum StateCode {} pub struct Monitor<T> { pub qactx: QAContext, pub stg: T, pub mor_manger: Addr<MonitorManager>, pub qarere: QARealtimeResampler, ur: bool, td: QATradeDate, settle_ts: i64, qifi_ts: i64, } impl<T: 'static> Monitor<T> where T: StrategyFunc + Debug, { pub fn new(qactx: QAContext, stg: T, mor_manger: Addr<MonitorManager>) -> Self { let f = qactx.frequence.clone(); let freq = f[0..f.len() - 3].parse::<i64>().unwrap(); let qarere = QARealtimeResampler::new(freq); let u = Self { qactx, stg, mor_manger, qarere, ur: true, td: QATradeDate::new(), settle_ts: 0, qifi_ts: 0, }; u } pub fn backtest(&mut self, mongo_data: &Vec<QAKlineBase>, redis_data: &Vec<QAKlineBase>) { info!("[{}] backtest mongo...", self.qactx.account_cookie); for (realtimebar, is_last) in mongo_data .into_iter() .map(|data| (data.clone().to_bar(), data.is_last)) { println!("{:#?}", realtimebar); if !self .qactx .acc .get_tradingday() .eq(&self.td.get_trade_day(realtimebar.datetime.clone())) { self.qactx.acc.settle(); self.settle_ts = Local::now().timestamp(); } if is_last { self.ur = true; self.qactx.update(realtimebar.clone(), &mut self.stg); self.qactx.switch(realtimebar); } else { if self.ur { self.qactx.next(realtimebar, &mut self.stg); self.ur = false; } else { self.qactx.update(realtimebar, &mut self.stg); } } } self.qactx.acc.settle(); self.settle_ts = Local::now().timestamp(); info!("[{}] backtest mongo end", self.qactx.account_cookie); info!("[{}] backtest redis...", self.qactx.account_cookie); for (realtimebar, is_last) in redis_data .into_iter() .map(|data| (data.clone().to_bar(), data.is_last)) { if !self .qactx .acc .get_tradingday() .eq(&self.td.get_trade_day(realtimebar.datetime.clone())) { self.qactx.acc.settle(); self.settle_ts = Local::now().timestamp(); } if is_last { self.ur = true; self.qactx.update(realtimebar.clone(), &mut self.stg); self.qactx.switch(realtimebar); } else { if self.ur { self.qactx.next(realtimebar, &mut self.stg); self.ur = false; } else { self.qactx.update(realtimebar, &mut self.stg); } } } info!("[{}] backtest redis end", self.qactx.account_cookie); self.qactx .acc .to_csv(format!("{}.csv", self.qactx.account_cookie)); self.qactx.order_que.clear(); } pub fn inner_handle(&mut self, msg: QAKlineBase) { let bar = self.qarere.next(msg.to_bar()); let (is_last, data) = (bar.is_last, bar.to_bar()); if is_last { self.ur = true; self.qactx.update(data.clone(), &mut self.stg); self.qactx.switch(data); } else { if self.ur { self.qactx.next(data, &mut self.stg); self.ur = false; } else { self.qactx.update(data, &mut self.stg); } } match self.mor_manger.try_send(QAOrderRsp { data: self.qactx.order_que.clone(), }) { Err(e) => { let m = format!("pub orders fail {:?}", e.to_string()); } _ => {} } self.qactx.order_que.clear(); match self.mor_manger.try_send(QifiRsp { t: 0, data: self.qactx.acc.get_qifi_slice(), }) { Err(e) => { let m = format!("qifi save fail {:?}", e.to_string()); } _ => { self.qifi_ts = Local::now().timestamp(); } } } pub fn manual_settle(&mut self, instruct: Instruct) { let ts = Local::now().timestamp(); if instruct.body.eq("--force") || ts - self.settle_ts > 60 * 60 * 24 { match self.mor_manger.try_send(QifiRsp { t: 1, data: self.qactx.acc.get_qifi_slice(), }) { Ok(_) => { self.settle_ts = ts; self.qactx.acc.settle(); let m = "settle success".to_owned(); self.ack(instruct, 200, m); } Err(e) => { let m = format!("save qifi_his fail{:?}", e.to_string()); self.ack(instruct, 500, m); } } } else { let m = "last time settle < 24h, or use [--force]".to_owned(); self.ack(instruct, 400, m); } } pub fn manual_send_order(&mut self, instruct: Instruct) { match serde_json::from_str(&instruct.body) { Ok(o) => { let time = Local::now().format("%Y-%m-%d %H:%M:%S").to_string(); let code = self.qactx.code.clone(); let order: Order = o; if order.direction.eq("BUY") && order.offset.eq("OPEN") { self.qactx.buy_open(&code, order.volume, &time, order.price) } else if order.direction.eq("BUY") && order.offset.eq("CLOSE") { self.qactx .buy_close(&code, order.volume, &time, order.price) } else if order.direction.eq("SELL") && order.offset.eq("OPEN") { self.qactx .sell_open(&code, order.volume, &time, order.price) } else if order.direction.eq("SELL") && order.offset.eq("CLOSE") { self.qactx .sell_close(&code, order.volume, &time, order.price) } else { let m = "send_order fail".to_owned(); self.ack(instruct, 400, m); return; } let m = "send_order success".to_owned(); self.ack(instruct, 200, m); } Err(e) => { let m = format!("Instruct order parse fail {}", e.to_string()); self.ack(instruct, 400, m); } } } pub fn get_clock(&mut self, instruct: Instruct) { match instruct.body.as_str() { "stg_status" => println!("{:#?}", &self.stg), _ => {} } self.ack(instruct, 200, self.qactx.clock.clone()); } pub fn ack(&mut self, instruct: Instruct, status: i32, ack: String) { match self.mor_manger.try_send(Ack { id: instruct.id.clone(), status, ack, answerer: self.qactx.account_cookie.clone(), }) { Err(e) => { let s = format!("[{}] ack fail {}", self.qactx.account_cookie, e.to_string()); println!("{:#?}", &s); } _ => {} }; } } impl<T: 'static> Actor for Monitor<T> where T: StrategyFunc + Debug, { type Context = Context<Self>; fn started(&mut self, ctx: &mut Self::Context) { ctx.set_mailbox_capacity(10000); match self.mor_manger.try_send(AddMonitor { account_cookie: self.qactx.account_cookie.clone(), rec: ctx.address().recipient().clone(), }) { Err(e) => error!("monitor register fail {:?}", e.to_string()), _ => {} } ctx.run_interval(Duration::from_secs(30), |mor, ctx| { let t = Local::now().timestamp(); if t - mor.qifi_ts > 30 { match mor.mor_manger.try_send(QifiRsp { t: 0, data: mor.qactx.acc.get_qifi_slice(), }) { Err(e) => error!("heartbeat save qifi fail {:?}", e.to_string()), _ => {} } } }); } } impl<T: 'static> Handler<QAKline> for Monitor<T> where T: StrategyFunc + Debug, { type Result = (); fn handle(&mut self, msg: QAKline, ctx: &mut Context<Self>) -> Self::Result { self.inner_handle(msg.data); } } impl<T: 'static> Handler<Instruct> for Monitor<T> where T: StrategyFunc + Debug, { type Result = (); fn handle(&mut self, msg: Instruct, ctx: &mut Context<Self>) -> Self::Result { match msg.topic.as_str() { "settle" => { self.manual_settle(msg); } "send_order" => { self.manual_send_order(msg); } "clock" => { self.get_clock(msg); } _ => {} } } } impl<T: 'static> Unpin for Monitor<T> where T: StrategyFunc + Debug {} impl<T: 'static> Supervised for Monitor<T> where T: StrategyFunc + Debug, { fn restarting(&mut self, _: &mut actix::Context<Self>) { warn!("[{}] Restarting!!!", self.qactx.account_cookie); } }
extern crate redis; use actix::prelude::*; use actix::{Actor, Addr, AsyncContext, Context, Handler, Recipient, Supervised}; use chrono::{Local, TimeZone}; use log::{error, info, warn}; use redis::Commands; use serde_json::Value; use std::fmt::Debug; use std::time::Duration; use uuid::Version::Mac; use crate::qaaccount::account::QA_Account; use crate::qaaccount::marketpreset::MarketPreset; use crate::qaaccount::order::QAOrder; use crate::qaconnector::mongo::mongoclient::QAMongoClient; use crate::qadata::resample::{resample_db, QARealtimeResampler}; use crate::qaenv::localenv::CONFIG; use crate::qaprotocol::mifi::qafastkline::QAKlineBase; use crate::qaprotocol::qifi::account::QIFI; use crate::qaruntime::base::{Ack, AddMonitor, Instruct, Order, QAKline, QAOrderRsp, QifiRsp}; use crate::qaruntime::qacontext::{QAContext, StrategyFunc}; use crate::qaruntime::qamanagers::monitor_manager::MonitorManager; use crate::qautil::tradedate::QATradeDate; enum StateCode {} pub struct Monitor<T> { pub qactx: QAContext, pub stg: T, pub mor_manger: Addr<MonitorManager>, pub qarere: QARealtimeResampler, ur: bool, td: QATradeDate, settle_ts: i64, qifi_ts: i64, } impl<T: 'static> Monitor<T> where T: StrategyFunc + Debug, { pub fn new(qactx: QAContext, stg: T, mor_manger: Addr<MonitorManager>) -> Self { let f = qactx.frequence.clone(); let freq = f[0..f.len() - 3].parse::<i64>().unwrap(); let qarere = QARealtimeResampler::new(freq); let u = Self { qactx, stg, mor_manger, qarere, ur: true, td: QATradeDate::new(), settle_ts: 0, qifi_ts: 0, }; u } pub fn backtest(&mut self, mongo_data: &Vec<QAKlineBase>, redis_data: &Vec<QAKlineBase>) { info!("[{}] backtest mongo...", self.qactx.account_cookie); for (realtimebar, is_last) in mongo_data .into_iter() .map(|data| (data.clone().to_bar(), data.is_last)) { println!("{:#?}", realtimebar); if !self .qactx .acc .get_tradingday() .eq(&self.td.get_trade_day(realtimebar.datetime.clone())) { self.qactx.acc.settle(); self.settle_ts = Local::now().timestamp(); } if is_last { self.ur = true; self.qactx.update(realtimebar.clone(), &mut self.stg); self.qactx.switch(realtimebar); } else { if self.ur { self.qactx.next(realtimebar, &mut self.stg); self.ur = false; } else { self.qactx.update(realtimebar, &mut self.stg); } } } self.qactx.acc.settle(); self.settle_ts = Local::now().timestamp(); info!("[{}] backtest mongo end", self.qactx.account_cookie); info!("[{}] backtest redis...", self.qactx.account_cookie); for (realtimebar, is_last) in redis_data .into_iter() .map(|data| (data.clone().to_bar(), data.is_last)) { if !self .qactx .acc .get_tradingday() .eq(&self.td.get_trade_day(realtimebar.datetime.clone())) { self.qactx.acc.settle(); self.settle_ts = Local::now().timestamp(); } if is_last { self.ur = true; self.qactx.update(realtimebar.clone(), &mut self.stg); self.qactx.switch(realtimebar); } else { if self.ur { self.qactx.next(realtimebar, &mut self.stg); self.ur = false; } else { self.qactx.update(realtimebar, &mut self.stg); } } } info!("[{}] backtest redis end", self.qactx.account_cookie); self.qactx .acc .to_csv(format!("{}.csv", self.qactx.account_cookie)); self.qactx.order_que.clear(); } pub fn inner_handle(&mut self, msg: QAKlineBase) { let bar = self.qarere.next(msg.to_bar()); let (is_last, data) = (bar.is_last, bar.to_bar()); if is_last { self.ur = true; self.qactx.update(data.clone(), &mut self.stg); self.qactx.switch(data); } else { if self.ur { self.qactx.next(data, &mut self.stg); self.ur = false; } else { self.qactx.update(data, &mut self.stg); } } match self.mor_manger.try_send(QAOrderRsp { data: self.qactx.order_que.clone(), }) { Err(e) => { let m = format!("pub orders fail {:?}", e.to_string()); } _ => {} } self.qactx.order_que.clear(); match self.mor_manger.try_send(QifiRsp { t: 0, data: self.qactx.acc.get_qifi_slice(), }) { Err(e) => { let m = format!("qifi save fail {:?}", e.to_string()); } _ => { self.qifi_ts = Local::now().timestamp(); } } } pub fn manual_settle(&mut self, instruct: Instruct) { let ts = Local::now().timestamp(); if instruct.body.eq("--force") || ts - self.settle_ts > 60 * 60 * 24 { match self.mor_manger.try_send(QifiRsp { t: 1, data: self.qactx.acc.get_qifi_slice(), }) { Ok(_) => { self.settle_ts = ts; self.qactx.acc.settle(); let m = "settle success".to_owned(); self.ack(instruct, 200, m); } Err(e) => { let m = format!("save qifi_his fail{:?}", e.to_string()); self.ack(instruct, 500, m); } } } else { let m = "last time settle < 24h, or use [--force]".to_owned(); self.ack(instruct, 400, m); } } pub fn manual_send_order(&mut self, instruct: Instruct) { match serde_json::from_str(&instruct.body) { Ok(o) => { let time = Local::now().format("%Y-%m-%d %H:%M:%S").to_string(); let code = self.qactx.code.clone(); let order: Order = o; if order.direction.eq("BUY") && order.offset.eq("OPEN") { self.qactx.buy_open(&code, order.volume, &time, order.price) } else if order.direction.eq("BUY") && order.offset.eq("CLOSE") { self.qactx .buy_close(&code, order.volume, &time, order.price) } else if order.direction.eq("SELL") && order.offset.eq("OPEN") { self.qactx .sell_open(&code, order.volume, &time, order.price) } else
let m = "send_order success".to_owned(); self.ack(instruct, 200, m); } Err(e) => { let m = format!("Instruct order parse fail {}", e.to_string()); self.ack(instruct, 400, m); } } } pub fn get_clock(&mut self, instruct: Instruct) { match instruct.body.as_str() { "stg_status" => println!("{:#?}", &self.stg), _ => {} } self.ack(instruct, 200, self.qactx.clock.clone()); } pub fn ack(&mut self, instruct: Instruct, status: i32, ack: String) { match self.mor_manger.try_send(Ack { id: instruct.id.clone(), status, ack, answerer: self.qactx.account_cookie.clone(), }) { Err(e) => { let s = format!("[{}] ack fail {}", self.qactx.account_cookie, e.to_string()); println!("{:#?}", &s); } _ => {} }; } } impl<T: 'static> Actor for Monitor<T> where T: StrategyFunc + Debug, { type Context = Context<Self>; fn started(&mut self, ctx: &mut Self::Context) { ctx.set_mailbox_capacity(10000); match self.mor_manger.try_send(AddMonitor { account_cookie: self.qactx.account_cookie.clone(), rec: ctx.address().recipient().clone(), }) { Err(e) => error!("monitor register fail {:?}", e.to_string()), _ => {} } ctx.run_interval(Duration::from_secs(30), |mor, ctx| { let t = Local::now().timestamp(); if t - mor.qifi_ts > 30 { match mor.mor_manger.try_send(QifiRsp { t: 0, data: mor.qactx.acc.get_qifi_slice(), }) { Err(e) => error!("heartbeat save qifi fail {:?}", e.to_string()), _ => {} } } }); } } impl<T: 'static> Handler<QAKline> for Monitor<T> where T: StrategyFunc + Debug, { type Result = (); fn handle(&mut self, msg: QAKline, ctx: &mut Context<Self>) -> Self::Result { self.inner_handle(msg.data); } } impl<T: 'static> Handler<Instruct> for Monitor<T> where T: StrategyFunc + Debug, { type Result = (); fn handle(&mut self, msg: Instruct, ctx: &mut Context<Self>) -> Self::Result { match msg.topic.as_str() { "settle" => { self.manual_settle(msg); } "send_order" => { self.manual_send_order(msg); } "clock" => { self.get_clock(msg); } _ => {} } } } impl<T: 'static> Unpin for Monitor<T> where T: StrategyFunc + Debug {} impl<T: 'static> Supervised for Monitor<T> where T: StrategyFunc + Debug, { fn restarting(&mut self, _: &mut actix::Context<Self>) { warn!("[{}] Restarting!!!", self.qactx.account_cookie); } }
if order.direction.eq("SELL") && order.offset.eq("CLOSE") { self.qactx .sell_close(&code, order.volume, &time, order.price) } else { let m = "send_order fail".to_owned(); self.ack(instruct, 400, m); return; }
if_condition
[ { "content": "pub fn set_bar(redis: Addr<RedisActor>, key: &str, data: String) {\n\n info!(\"write data to redis\");\n\n let fut1 = redis.do_send(Command(resp_array![\"set\", key, data]));\n\n}\n\n\n\npub async fn set_bar_async(redis: Addr<RedisActor>, key: String, data: String) {\n\n let keys = key.as...
Rust
src/integer/conversions.rs
declanvk/rimath
e0dbb421d6da09a8b5e92fc9f0d00e5a454e3118
use crate::{error::Error, integer::Integer}; use core::convert::{TryFrom, TryInto}; impl From<i8> for Integer { fn from(src: i8) -> Self { Self::from_c_long(src) } } impl From<&i8> for Integer { fn from(src: &i8) -> Self { Self::from_c_long(*src) } } impl TryFrom<Integer> for i8 { type Error = Error; fn try_from(src: Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from)) } } impl TryFrom<&Integer> for i8 { type Error = Error; fn try_from(src: &Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from)) } } impl From<u8> for Integer { fn from(src: u8) -> Self { Self::from_c_long(src) } } impl From<&u8> for Integer { fn from(src: &u8) -> Self { Self::from_c_long(*src) } } impl TryFrom<Integer> for u8 { type Error = Error; fn try_from(src: Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from)) } } impl TryFrom<&Integer> for u8 { type Error = Error; fn try_from(src: &Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from)) } } impl From<i16> for Integer { fn from(src: i16) -> Self { Self::from_c_long(src) } } impl From<&i16> for Integer { fn from(src: &i16) -> Self { Self::from_c_long(*src) } } impl TryFrom<Integer> for i16 { type Error = Error; fn try_from(src: Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from)) } } impl TryFrom<&Integer> for i16 { type Error = Error; fn try_from(src: &Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from)) } } impl From<u16> for Integer { fn from(src: u16) -> Self { Self::from_c_long(src) } } impl From<&u16> for Integer { fn from(src: &u16) -> Self { Self::from_c_long(*src) } } impl TryFrom<Integer> for u16 { type Error = Error; fn try_from(src: Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from)) } } impl TryFrom<&Integer> for u16 { type Error = Error; fn try_from(src: &Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from)) } } impl From<i32> for Integer { fn from(src: i32) -> Self { Self::from_c_long(src) } } impl From<&i32> for Integer { fn from(src: &i32) -> Self { Self::from_c_long(*src) } } impl TryFrom<Integer> for i32 { type Error = Error; fn try_from(src: Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from)) } } impl TryFrom<&Integer> for i32 { type Error = Error; fn try_from(src: &Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from)) } } cfg_if::cfg_if! { if #[cfg(all(target_pointer_width = "64", not(windows)))] { impl From<u32> for Integer { fn from(src: u32) -> Self { Self::from_c_long(src) } } impl From<&u32> for Integer { fn from(src: &u32) -> Self { Self::from_c_long(*src) } } impl TryFrom<Integer> for u32 { type Error = Error; fn try_from(src: Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from)) } } impl TryFrom<&Integer> for u32 { type Error = Error; fn try_from(src: &Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from)) } } impl From<i64> for Integer { fn from(src: i64) -> Self { Self::from_c_long(src) } } impl From<&i64> for Integer { fn from(src: &i64) -> Self { Self::from_c_long(*src) } } impl TryFrom<Integer> for i64 { type Error = Error; fn try_from(src: Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from)) } } impl TryFrom<&Integer> for i64 { type Error = Error; fn try_from(src: &Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from)) } } } else { impl From<u32> for Integer { fn from(src: u32) -> Self { Self::from_string_repr(src, 10).expect("Conversion from string failed") } } impl From<&u32> for Integer { fn from(src: &u32) -> Self { Self::from_string_repr(src, 10).expect("Conversion from string failed") } } impl TryFrom<Integer> for u32 { type Error = Error; fn try_from(src: Integer) -> Result<Self, Self::Error> { src.to_string().parse().map_err(|_| Error::ConversionOutsideRange) } } impl TryFrom<&Integer> for u32 { type Error = Error; fn try_from(src: &Integer) -> Result<Self, Self::Error> { src.to_string().parse().map_err(|_| Error::ConversionOutsideRange) } } impl From<i64> for Integer { fn from(src: i64) -> Self { Self::from_string_repr(src, 10).expect("Conversion from string failed") } } impl From<&i64> for Integer { fn from(src: &i64) -> Self { Self::from_string_repr(*src, 10).expect("Conversion from string failed") } } impl TryFrom<Integer> for i64 { type Error = Error; fn try_from(src: Integer) -> Result<Self, Self::Error> { src.to_string().parse().map_err(|_| Error::ConversionOutsideRange) } } impl TryFrom<&Integer> for i64 { type Error = Error; fn try_from(src: &Integer) -> Result<Self, Self::Error> { src.to_string().parse().map_err(|_| Error::ConversionOutsideRange) } } } } impl From<u64> for Integer { fn from(src: u64) -> Self { Self::from_string_repr(src, 10).expect("Conversion from string failed") } } impl From<&u64> for Integer { fn from(src: &u64) -> Self { Self::from_string_repr(*src, 10).expect("Conversion from string failed") } } impl TryFrom<Integer> for u64 { type Error = Error; fn try_from(src: Integer) -> Result<Self, Self::Error> { src.to_string() .parse() .map_err(|_| Error::ConversionOutsideRange) } } impl TryFrom<&Integer> for u64 { type Error = Error; fn try_from(src: &Integer) -> Result<Self, Self::Error> { src.to_string() .parse() .map_err(|_| Error::ConversionOutsideRange) } } impl From<i128> for Integer { fn from(src: i128) -> Self { Self::from_string_repr(src, 10).expect("Conversion from string failed") } } impl From<&i128> for Integer { fn from(src: &i128) -> Self { Self::from_string_repr(*src, 10).expect("Conversion from string failed") } } impl TryFrom<Integer> for i128 { type Error = Error; fn try_from(src: Integer) -> Result<Self, Self::Error> { src.to_string() .parse() .map_err(|_| Error::ConversionOutsideRange) } } impl TryFrom<&Integer> for i128 { type Error = Error; fn try_from(src: &Integer) -> Result<Self, Self::Error> { src.to_string() .parse() .map_err(|_| Error::ConversionOutsideRange) } } impl From<u128> for Integer { fn from(src: u128) -> Self { Self::from_string_repr(src, 10).expect("Conversion from string failed") } } impl From<&u128> for Integer { fn from(src: &u128) -> Self { Self::from_string_repr(*src, 10).expect("Conversion from string failed") } } impl TryFrom<Integer> for u128 { type Error = Error; fn try_from(src: Integer) -> Result<Self, Self::Error> { src.to_string() .parse() .map_err(|_| Error::ConversionOutsideRange) } } impl TryFrom<&Integer> for u128 { type Error = Error; fn try_from(src: &Integer) -> Result<Self, Self::Error> { src.to_string() .parse() .map_err(|_| Error::ConversionOutsideRange) } } #[cfg(test)] mod test { use super::*; use core::{convert::TryFrom, str::FromStr}; #[test] fn conversion_to_primitive() { let valid_i8 = Integer::from_str("56").unwrap(); let invalid_i8 = Integer::from_str("129").unwrap(); assert_eq!(TryFrom::try_from(valid_i8), Ok(56i8)); assert_eq!( TryFrom::try_from(invalid_i8), Err(Error::ConversionOutsideRange) as Result<i8, _> ); let valid_i32 = Integer::from_str("-2147483648").unwrap(); let invalid_i32 = Integer::from_str("-2147483649").unwrap(); assert_eq!(TryFrom::try_from(valid_i32), Ok(-2_147_483_648i32)); assert_eq!( TryFrom::try_from(invalid_i32), Err(Error::ConversionOutsideRange) as Result<i32, _> ); let valid_i128 = Integer::from_str("170141183460469231731687303715884105727").unwrap(); let invalid_i128 = Integer::from_str("170141183460469231731687303715884105728").unwrap(); assert_eq!( TryFrom::try_from(valid_i128), Ok(170_141_183_460_469_231_731_687_303_715_884_105_727i128) ); assert_eq!( TryFrom::try_from(invalid_i128), Err(Error::ConversionOutsideRange) as Result<i128, _> ); } }
use crate::{error::Error, integer::Integer}; use core::convert::{TryFrom, TryInto}; impl From<i8> for Integer { fn from(src: i8) -> Self { Self::from_c_long(src) } } impl From<&i8> for Integer { fn from(src: &i8) -> Self { Self::from_c_long(*src) } } impl TryFrom<Integer> for i8 { type Error = Error; fn try_from(src: Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from)) } } impl TryFrom<&Integer> for i8 { type Error = Error; fn try_from(src: &Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from)) } } impl From<u8> for Integer { fn from(src: u8) -> Self { Self::from_c_long(src) } } impl From<&u8> for Integer { fn from(src: &u8) -> Self { Self::from_c_long(*src) } } impl TryFrom<Integer> for u8 { type Error = Error; fn try_from(src: Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from)) } } impl TryFrom<&Integer> for u8 { type Error = Error; fn try_from(src: &Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from)) } } impl From<i16> for Integer { fn from(src: i16) -> Self { Self::from_c_long(src) } } impl From<&i16> for Integer { fn from(src: &i16) -> Self { Self::from_c_long(*src) } } impl TryFrom<Integer> for i16 { type Error = Error; fn try_from(src: Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from))
fn from(src: &i128) -> Self { Self::from_string_repr(*src, 10).expect("Conversion from string failed") } } impl TryFrom<Integer> for i128 { type Error = Error; fn try_from(src: Integer) -> Result<Self, Self::Error> { src.to_string() .parse() .map_err(|_| Error::ConversionOutsideRange) } } impl TryFrom<&Integer> for i128 { type Error = Error; fn try_from(src: &Integer) -> Result<Self, Self::Error> { src.to_string() .parse() .map_err(|_| Error::ConversionOutsideRange) } } impl From<u128> for Integer { fn from(src: u128) -> Self { Self::from_string_repr(src, 10).expect("Conversion from string failed") } } impl From<&u128> for Integer { fn from(src: &u128) -> Self { Self::from_string_repr(*src, 10).expect("Conversion from string failed") } } impl TryFrom<Integer> for u128 { type Error = Error; fn try_from(src: Integer) -> Result<Self, Self::Error> { src.to_string() .parse() .map_err(|_| Error::ConversionOutsideRange) } } impl TryFrom<&Integer> for u128 { type Error = Error; fn try_from(src: &Integer) -> Result<Self, Self::Error> { src.to_string() .parse() .map_err(|_| Error::ConversionOutsideRange) } } #[cfg(test)] mod test { use super::*; use core::{convert::TryFrom, str::FromStr}; #[test] fn conversion_to_primitive() { let valid_i8 = Integer::from_str("56").unwrap(); let invalid_i8 = Integer::from_str("129").unwrap(); assert_eq!(TryFrom::try_from(valid_i8), Ok(56i8)); assert_eq!( TryFrom::try_from(invalid_i8), Err(Error::ConversionOutsideRange) as Result<i8, _> ); let valid_i32 = Integer::from_str("-2147483648").unwrap(); let invalid_i32 = Integer::from_str("-2147483649").unwrap(); assert_eq!(TryFrom::try_from(valid_i32), Ok(-2_147_483_648i32)); assert_eq!( TryFrom::try_from(invalid_i32), Err(Error::ConversionOutsideRange) as Result<i32, _> ); let valid_i128 = Integer::from_str("170141183460469231731687303715884105727").unwrap(); let invalid_i128 = Integer::from_str("170141183460469231731687303715884105728").unwrap(); assert_eq!( TryFrom::try_from(valid_i128), Ok(170_141_183_460_469_231_731_687_303_715_884_105_727i128) ); assert_eq!( TryFrom::try_from(invalid_i128), Err(Error::ConversionOutsideRange) as Result<i128, _> ); } }
} } impl TryFrom<&Integer> for i16 { type Error = Error; fn try_from(src: &Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from)) } } impl From<u16> for Integer { fn from(src: u16) -> Self { Self::from_c_long(src) } } impl From<&u16> for Integer { fn from(src: &u16) -> Self { Self::from_c_long(*src) } } impl TryFrom<Integer> for u16 { type Error = Error; fn try_from(src: Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from)) } } impl TryFrom<&Integer> for u16 { type Error = Error; fn try_from(src: &Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from)) } } impl From<i32> for Integer { fn from(src: i32) -> Self { Self::from_c_long(src) } } impl From<&i32> for Integer { fn from(src: &i32) -> Self { Self::from_c_long(*src) } } impl TryFrom<Integer> for i32 { type Error = Error; fn try_from(src: Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from)) } } impl TryFrom<&Integer> for i32 { type Error = Error; fn try_from(src: &Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from)) } } cfg_if::cfg_if! { if #[cfg(all(target_pointer_width = "64", not(windows)))] { impl From<u32> for Integer { fn from(src: u32) -> Self { Self::from_c_long(src) } } impl From<&u32> for Integer { fn from(src: &u32) -> Self { Self::from_c_long(*src) } } impl TryFrom<Integer> for u32 { type Error = Error; fn try_from(src: Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from)) } } impl TryFrom<&Integer> for u32 { type Error = Error; fn try_from(src: &Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from)) } } impl From<i64> for Integer { fn from(src: i64) -> Self { Self::from_c_long(src) } } impl From<&i64> for Integer { fn from(src: &i64) -> Self { Self::from_c_long(*src) } } impl TryFrom<Integer> for i64 { type Error = Error; fn try_from(src: Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from)) } } impl TryFrom<&Integer> for i64 { type Error = Error; fn try_from(src: &Integer) -> Result<Self, Self::Error> { src.try_into_c_long() .and_then(|value| value.try_into().map_err(From::from)) } } } else { impl From<u32> for Integer { fn from(src: u32) -> Self { Self::from_string_repr(src, 10).expect("Conversion from string failed") } } impl From<&u32> for Integer { fn from(src: &u32) -> Self { Self::from_string_repr(src, 10).expect("Conversion from string failed") } } impl TryFrom<Integer> for u32 { type Error = Error; fn try_from(src: Integer) -> Result<Self, Self::Error> { src.to_string().parse().map_err(|_| Error::ConversionOutsideRange) } } impl TryFrom<&Integer> for u32 { type Error = Error; fn try_from(src: &Integer) -> Result<Self, Self::Error> { src.to_string().parse().map_err(|_| Error::ConversionOutsideRange) } } impl From<i64> for Integer { fn from(src: i64) -> Self { Self::from_string_repr(src, 10).expect("Conversion from string failed") } } impl From<&i64> for Integer { fn from(src: &i64) -> Self { Self::from_string_repr(*src, 10).expect("Conversion from string failed") } } impl TryFrom<Integer> for i64 { type Error = Error; fn try_from(src: Integer) -> Result<Self, Self::Error> { src.to_string().parse().map_err(|_| Error::ConversionOutsideRange) } } impl TryFrom<&Integer> for i64 { type Error = Error; fn try_from(src: &Integer) -> Result<Self, Self::Error> { src.to_string().parse().map_err(|_| Error::ConversionOutsideRange) } } } } impl From<u64> for Integer { fn from(src: u64) -> Self { Self::from_string_repr(src, 10).expect("Conversion from string failed") } } impl From<&u64> for Integer { fn from(src: &u64) -> Self { Self::from_string_repr(*src, 10).expect("Conversion from string failed") } } impl TryFrom<Integer> for u64 { type Error = Error; fn try_from(src: Integer) -> Result<Self, Self::Error> { src.to_string() .parse() .map_err(|_| Error::ConversionOutsideRange) } } impl TryFrom<&Integer> for u64 { type Error = Error; fn try_from(src: &Integer) -> Result<Self, Self::Error> { src.to_string() .parse() .map_err(|_| Error::ConversionOutsideRange) } } impl From<i128> for Integer { fn from(src: i128) -> Self { Self::from_string_repr(src, 10).expect("Conversion from string failed") } } impl From<&i128> for Integer {
random
[ { "content": "fn fib_iter() -> impl Iterator<Item = Integer> {\n\n struct FibIter {\n\n x: Integer,\n\n y: Integer,\n\n }\n\n\n\n impl Iterator for FibIter {\n\n type Item = Integer;\n\n\n\n fn next(&mut self) -> Option<Integer> {\n\n let next_y = &self.x + &self....
Rust
examples/pong.rs
legendiguess/example-rs
c401fcc7b08556cd6f5bae71986b727ffced6578
use ggez::{ event::{self, EventHandler, KeyCode, KeyMods}, graphics::{self, DrawMode, DrawParam, FilterMode, Mesh, MeshBuilder, Rect, Text}, nalgebra as na, Context, ContextBuilder, GameResult, }; use mun_examples::marshal_vec2; use mun_runtime::{invoke_fn, RetryResultExt, Runtime, RuntimeBuilder, StructRef}; use rand::Rng; use std::{cell::RefCell, rc::Rc}; extern "C" fn rand_f32() -> f32 { let mut rng = rand::thread_rng(); rng.gen() } fn main() { let (mut ctx, mut event_loop) = ContextBuilder::new("Pong", "Mun Team") .build() .expect("Failed to initialize ggez"); let runtime = RuntimeBuilder::new("pong.munlib") .insert_fn("rand_f32", rand_f32 as extern "C" fn() -> f32) .spawn() .expect("Failed to load munlib"); let state: StructRef = invoke_fn!(runtime, "new_state").wait(); let mut pong = PongGame { runtime, state }; match event::run(&mut ctx, &mut event_loop, &mut pong) { Ok(_) => (), Err(e) => println!("Error occurred: {}", e), } } struct PongGame { runtime: Rc<RefCell<Runtime>>, state: StructRef, } impl EventHandler for PongGame { fn key_down_event( &mut self, ctx: &mut Context, keycode: KeyCode, _keymods: KeyMods, _repeat: bool, ) { match keycode { KeyCode::W => { let mut paddle = self.state.get::<StructRef>("paddle_left").unwrap(); paddle.set("move_up", true).unwrap(); } KeyCode::S => { let mut paddle = self.state.get::<StructRef>("paddle_left").unwrap(); paddle.set("move_down", true).unwrap(); } KeyCode::Up => { let mut paddle = self.state.get::<StructRef>("paddle_right").unwrap(); paddle.set("move_up", true).unwrap(); } KeyCode::Down => { let mut paddle = self.state.get::<StructRef>("paddle_right").unwrap(); paddle.set("move_down", true).unwrap(); } KeyCode::Escape => { event::quit(ctx); } _ => (), } } fn key_up_event(&mut self, _ctx: &mut Context, keycode: KeyCode, _keymods: KeyMods) { match keycode { KeyCode::W => { let mut paddle = self.state.get::<StructRef>("paddle_left").unwrap(); paddle.set("move_up", false).unwrap(); } KeyCode::S => { let mut paddle = self.state.get::<StructRef>("paddle_left").unwrap(); paddle.set("move_down", false).unwrap(); } KeyCode::Up => { let mut paddle = self.state.get::<StructRef>("paddle_right").unwrap(); paddle.set("move_up", false).unwrap(); } KeyCode::Down => { let mut paddle = self.state.get::<StructRef>("paddle_right").unwrap(); paddle.set("move_down", false).unwrap(); } _ => (), } } fn update(&mut self, _ctx: &mut ggez::Context) -> ggez::GameResult { let _: () = invoke_fn!(self.runtime, "update", self.state.clone()).wait(); self.runtime.borrow_mut().update(); Ok(()) } fn draw(&mut self, ctx: &mut ggez::Context) -> ggez::GameResult { graphics::clear(ctx, graphics::BLACK); let ball = self.state.get::<StructRef>("ball").unwrap(); let paddle_left = self.state.get::<StructRef>("paddle_left").unwrap(); let paddle_right = self.state.get::<StructRef>("paddle_right").unwrap(); let ball_mesh = MeshBuilder::new() .circle( DrawMode::fill(), na::Point2::origin(), invoke_fn!(self.runtime, "ball_radius").unwrap(), invoke_fn!(self.runtime, "ball_tolerance").unwrap(), graphics::WHITE, ) .build(ctx)?; draw_mesh(ctx, &ball_mesh, &ball)?; let paddle_mesh = MeshBuilder::new() .rectangle( DrawMode::fill(), bounds( invoke_fn!(self.runtime, "paddle_width").unwrap(), invoke_fn!(self.runtime, "paddle_height").unwrap(), ), graphics::WHITE, ) .build(ctx)?; draw_mesh(ctx, &paddle_mesh, &paddle_left)?; draw_mesh(ctx, &paddle_mesh, &paddle_right)?; queue_score_text( ctx, &paddle_left, marshal_vec2(&invoke_fn!(self.runtime, "left_score_pos").unwrap()), ); queue_score_text( ctx, &paddle_right, marshal_vec2(&invoke_fn!(self.runtime, "right_score_pos").unwrap()), ); graphics::draw_queued_text(ctx, DrawParam::default(), None, FilterMode::Linear)?; graphics::present(ctx)?; Ok(()) } } fn bounds(width: f32, height: f32) -> Rect { Rect::new(0.0, 0.0, width, height) } fn draw_mesh(ctx: &mut Context, mesh: &Mesh, object: &StructRef) -> GameResult { graphics::draw( ctx, mesh, ( marshal_vec2(&object.get("pos").unwrap()), 0.0, graphics::WHITE, ), ) } fn queue_score_text(ctx: &mut Context, paddle: &StructRef, score_pos: na::Point2<f32>) { let score = paddle.get::<u32>("score").unwrap(); let score_text = Text::new(score.to_string()); graphics::queue_text(ctx, &score_text, score_pos, Some(graphics::WHITE)); }
use ggez::{ event::{self, EventHandler, KeyCode, KeyMods}, graphics::{self, DrawMode, DrawParam, FilterMode, Mesh, MeshBuilder, Rect, Text}, nalgebra as na, Context, ContextBuilder, GameResult, }; use mun_examples::marshal_vec2; use mun_runtime::{invoke_fn, RetryResultExt, Runtime, RuntimeBuilder, StructRef}; use rand::Rng; use std::{cell::RefCell, rc::Rc}; extern "C" fn rand_f32() -> f32 { let mut rng = rand::thread_rng(); rng.gen() } fn main() { let (mut ctx, mut event_loop) = ContextBuilder::new("Pong", "Mun Team") .build() .expect("Failed to initialize ggez"); let runtime = RuntimeBuilder::new("pong.munlib") .insert_fn("rand_f32", rand_f32 as extern "C" fn() -> f32) .spawn() .expect("Failed to load munlib"); let state: StructRef = invoke_fn!(runtime, "new_state").wait(); let mut pong = PongGame { runtime, state }; match event::run(&mut ctx, &mut event_loop, &mut pong) { Ok(_) => (), Err(e) => println!("Error occurred: {}", e), } } struct PongGame { runtime: Rc<RefCell<Runtime>>, state: StructRef, } impl EventHandler for PongGame { fn key_down_event( &mut self, ctx: &mut Context, keycode: KeyCode, _keymods: KeyMods, _repeat: bool, ) { match keycode { KeyCode::W => { let mut paddle = self.state.get::<StructRef>("paddle_left").unwrap(); paddle.set("move_up", true).unwrap(); } KeyCode::S => { let mut paddle = self.state.get::<StructRef>("paddle_left").unwrap(); paddle.set("move_down", true).unwrap(); } KeyCode::Up => { let mut paddle = self.state.get::<StructRef>("paddle_right").unwrap(); paddle.set("move_up", true).unwrap(); } KeyCode::Down => { let mut paddle = self.state.get::<StructRef>("paddle_right").unwrap(); paddle.set("move_down", true).unwrap(); } KeyCode::Escape => { event::quit(ctx); } _ => (), } } fn key_up_event(&mut self, _ctx: &mut Context, keycode: KeyCode, _keymods: KeyMods) { match keycode { KeyCode::W => { let mut paddle = self.state.get::<StructRef>("paddle_left").unwrap(); paddle.set("move_up", false).unwrap(); } KeyCode::S => { let mut paddle = self.state.get::<StructRef>("paddle_left").unwrap(); paddle.set("move_down", false).unwrap(); } KeyCode::Up => { let mut paddle = self.state.get::<StructRef>("paddle_right").unwrap(); paddle.set("move_up", false).unwrap(); } KeyCode::Down => { let mut paddle = self.state.get::<StructRef>("paddle_right").unwrap(); paddle.set("move_down", false).unwrap(); } _ => (), } } fn update(&mut self, _ctx: &mut ggez::Context) -> ggez::GameResult { let _: () = invoke_fn!(self.runtime, "update", self.state.clone()).wait(); self.runtime.borrow_mut().update(); Ok(()) } fn draw(&mut self, ctx: &mut ggez::Context) -> ggez::GameResult { graphics::clear(ctx, graphics::BLACK); let ball = self.state.get::<StructRef>("ball").unwrap(); let paddle_left = self.state.get::<StructRef>("paddle_left").unwrap(); let paddle_right = self.state.get::<StructRef>("paddle_right").unwrap(); let ball_mesh = MeshBuilder::ne
} fn bounds(width: f32, height: f32) -> Rect { Rect::new(0.0, 0.0, width, height) } fn draw_mesh(ctx: &mut Context, mesh: &Mesh, object: &StructRef) -> GameResult { graphics::draw( ctx, mesh, ( marshal_vec2(&object.get("pos").unwrap()), 0.0, graphics::WHITE, ), ) } fn queue_score_text(ctx: &mut Context, paddle: &StructRef, score_pos: na::Point2<f32>) { let score = paddle.get::<u32>("score").unwrap(); let score_text = Text::new(score.to_string()); graphics::queue_text(ctx, &score_text, score_pos, Some(graphics::WHITE)); }
w() .circle( DrawMode::fill(), na::Point2::origin(), invoke_fn!(self.runtime, "ball_radius").unwrap(), invoke_fn!(self.runtime, "ball_tolerance").unwrap(), graphics::WHITE, ) .build(ctx)?; draw_mesh(ctx, &ball_mesh, &ball)?; let paddle_mesh = MeshBuilder::new() .rectangle( DrawMode::fill(), bounds( invoke_fn!(self.runtime, "paddle_width").unwrap(), invoke_fn!(self.runtime, "paddle_height").unwrap(), ), graphics::WHITE, ) .build(ctx)?; draw_mesh(ctx, &paddle_mesh, &paddle_left)?; draw_mesh(ctx, &paddle_mesh, &paddle_right)?; queue_score_text( ctx, &paddle_left, marshal_vec2(&invoke_fn!(self.runtime, "left_score_pos").unwrap()), ); queue_score_text( ctx, &paddle_right, marshal_vec2(&invoke_fn!(self.runtime, "right_score_pos").unwrap()), ); graphics::draw_queued_text(ctx, DrawParam::default(), None, FilterMode::Linear)?; graphics::present(ctx)?; Ok(()) }
function_block-function_prefixed
[ { "content": "fn textures(ctx: &mut Context) -> [(Texture, Vec2<f32>); 5] {\n\n [\n\n (\n\n Texture::new(ctx, \"./assets/spaceship/sprites/spaceship.png\").unwrap(),\n\n Vec2::new(6., 7.),\n\n ),\n\n (\n\n Texture::new(ctx, \"./assets/spaceship/sprites/ro...
Rust
libzmq/src/socket/dish.rs
dmweis/libzmq-rs
8b8384c3f65960d9c842e9c8d54883239ad47d33
use crate::{ addr::Endpoint, auth::*, core::*, error::*, Ctx, CtxHandle, Group, GroupSlice, }; use libzmq_sys as sys; use sys::errno; use serde::{Deserialize, Serialize}; use std::{ ffi::c_void, str, sync::{Arc, Mutex}, }; fn join(socket_mut_ptr: *mut c_void, group: &GroupSlice) -> Result<(), Error> { let rc = unsafe { sys::zmq_join(socket_mut_ptr, group.as_c_str().as_ptr()) }; if rc == -1 { let errno = unsafe { sys::zmq_errno() }; let err = match errno { errno::EINVAL => { Error::new(ErrorKind::InvalidInput("cannot join group twice")) } errno::ETERM => Error::new(ErrorKind::InvalidCtx), errno::EINTR => Error::new(ErrorKind::Interrupted), errno::ENOTSOCK => panic!("invalid socket"), errno::EMTHREAD => panic!("no i/o thread available"), _ => panic!(msg_from_errno(errno)), }; Err(err) } else { Ok(()) } } fn leave(socket_mut_ptr: *mut c_void, group: &GroupSlice) -> Result<(), Error> { let rc = unsafe { sys::zmq_leave(socket_mut_ptr, group.as_c_str().as_ptr()) }; if rc == -1 { let errno = unsafe { sys::zmq_errno() }; let err = match errno { errno::EINVAL => Error::new(ErrorKind::InvalidInput( "cannot leave a group that wasn't joined", )), errno::ETERM => Error::new(ErrorKind::InvalidCtx), errno::EINTR => Error::new(ErrorKind::Interrupted), errno::ENOTSOCK => panic!("invalid socket"), errno::EMTHREAD => panic!("no i/o thread available"), _ => panic!(msg_from_errno(errno)), }; Err(err) } else { Ok(()) } } #[derive(Debug, Clone)] pub struct Dish { inner: Arc<RawSocket>, groups: Arc<Mutex<Vec<Group>>>, } impl Dish { pub fn new() -> Result<Self, Error> { let inner = Arc::new(RawSocket::new(RawSocketType::Dish)?); Ok(Self { inner, groups: Arc::default(), }) } pub fn with_ctx(handle: CtxHandle) -> Result<Self, Error> { let inner = Arc::new(RawSocket::with_ctx(RawSocketType::Dish, handle)?); Ok(Self { inner, groups: Arc::default(), }) } pub fn ctx(&self) -> CtxHandle { self.inner.ctx() } pub fn join<G>(&self, group: G) -> Result<(), Error> where G: Into<Group>, { let mut guard = self.groups.lock().unwrap(); let group = group.into(); join(self.raw_socket().as_mut_ptr(), &group)?; guard.push(group); Ok(()) } pub fn joined(&self) -> Vec<Group> { self.groups.lock().unwrap().to_owned() } pub fn leave<G>(&self, group: G) -> Result<(), Error> where G: AsRef<GroupSlice>, { let mut guard = self.groups.lock().unwrap(); let group = group.as_ref(); leave(self.raw_socket().as_mut_ptr(), group)?; let position = guard.iter().position(|g| g == group).unwrap(); guard.remove(position); Ok(()) } } impl PartialEq for Dish { fn eq(&self, other: &Dish) -> bool { self.inner == other.inner } } impl Eq for Dish {} impl GetRawSocket for Dish { fn raw_socket(&self) -> &RawSocket { &self.inner } } impl Socket for Dish {} impl RecvMsg for Dish {} unsafe impl Send for Dish {} unsafe impl Sync for Dish {} #[derive(Debug, Default, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] #[serde(from = "FlatDishConfig")] #[serde(into = "FlatDishConfig")] pub struct DishConfig { socket_config: SocketConfig, recv_config: RecvConfig, groups: Option<Vec<Group>>, } impl DishConfig { pub fn new() -> Self { Self::default() } pub fn build(&self) -> Result<Dish, Error> { self.with_ctx(Ctx::global()) } pub fn with_ctx(&self, handle: CtxHandle) -> Result<Dish, Error> { let dish = Dish::with_ctx(handle)?; self.apply(&dish)?; Ok(dish) } pub fn groups(&self) -> Option<&[Group]> { self.groups.as_deref() } pub fn set_groups<I>(&mut self, maybe_groups: Option<I>) where I: IntoIterator<Item = Group>, { let groups = maybe_groups.map(|g| g.into_iter().collect()); self.groups = groups; } pub fn apply(&self, dish: &Dish) -> Result<(), Error> { if let Some(ref groups) = self.groups { for group in groups { dish.join(group)?; } } self.recv_config.apply(dish)?; self.socket_config.apply(dish)?; Ok(()) } } #[derive(Clone, Serialize, Deserialize)] struct FlatDishConfig { connect: Option<Vec<Endpoint>>, bind: Option<Vec<Endpoint>>, recv_hwm: HighWaterMark, recv_timeout: Period, groups: Option<Vec<Group>>, mechanism: Option<Mechanism>, } impl From<DishConfig> for FlatDishConfig { fn from(config: DishConfig) -> Self { let socket_config = config.socket_config; let recv_config = config.recv_config; Self { connect: socket_config.connect, bind: socket_config.bind, mechanism: socket_config.mechanism, recv_hwm: recv_config.recv_hwm, recv_timeout: recv_config.recv_timeout, groups: config.groups, } } } impl From<FlatDishConfig> for DishConfig { fn from(flat: FlatDishConfig) -> Self { let socket_config = SocketConfig { connect: flat.connect, bind: flat.bind, mechanism: flat.mechanism, }; let recv_config = RecvConfig { recv_hwm: flat.recv_hwm, recv_timeout: flat.recv_timeout, }; Self { socket_config, recv_config, groups: flat.groups, } } } impl GetSocketConfig for DishConfig { fn socket_config(&self) -> &SocketConfig { &self.socket_config } fn socket_config_mut(&mut self) -> &mut SocketConfig { &mut self.socket_config } } impl ConfigureSocket for DishConfig {} impl GetRecvConfig for DishConfig { fn recv_config(&self) -> &RecvConfig { &self.recv_config } fn recv_config_mut(&mut self) -> &mut RecvConfig { &mut self.recv_config } } impl ConfigureRecv for DishConfig {} #[derive(Debug, Default, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] pub struct DishBuilder { inner: DishConfig, } impl DishBuilder { pub fn new() -> Self { Self::default() } pub fn build(&self) -> Result<Dish, Error> { self.inner.build() } pub fn with_ctx(&self, handle: CtxHandle) -> Result<Dish, Error> { self.inner.with_ctx(handle) } pub fn join<I, G>(&mut self, groups: I) -> &mut Self where I: IntoIterator<Item = G>, G: Into<Group>, { let groups: Vec<Group> = groups.into_iter().map(G::into).collect(); self.inner.set_groups(Some(groups)); self } } impl GetSocketConfig for DishBuilder { fn socket_config(&self) -> &SocketConfig { self.inner.socket_config() } fn socket_config_mut(&mut self) -> &mut SocketConfig { self.inner.socket_config_mut() } } impl BuildSocket for DishBuilder {} impl GetRecvConfig for DishBuilder { fn recv_config(&self) -> &RecvConfig { self.inner.recv_config() } fn recv_config_mut(&mut self) -> &mut RecvConfig { self.inner.recv_config_mut() } } impl BuildRecv for DishBuilder {} #[cfg(test)] mod test { use super::*; #[test] fn test_ser_de() { let config = DishConfig::new(); let ron = serde_yaml::to_string(&config).unwrap(); let de: DishConfig = serde_yaml::from_str(&ron).unwrap(); assert_eq!(config, de); } #[test] fn test_dish() { use crate::{prelude::*, TcpAddr, *}; use std::{thread, time::Duration}; let addr: TcpAddr = "127.0.0.1:*".try_into().unwrap(); let radio = RadioBuilder::new().bind(addr).build().unwrap(); let bound = radio.last_endpoint().unwrap(); let a: Group = "group a".try_into().unwrap(); let dish = DishBuilder::new().connect(bound).join(&a).build().unwrap(); thread::spawn(move || { let a: Group = "group a".try_into().unwrap(); let b: Group = "group b".try_into().unwrap(); let mut count = 0; loop { let mut msg = Msg::new(); let group = if count % 2 == 0 { &a } else { &b }; msg.set_group(group); radio.send(msg).unwrap(); std::thread::sleep(Duration::from_millis(1)); count += 1; } }); let msg = dish.recv_msg().unwrap(); assert_eq!(msg.group().unwrap(), &a); let msg = dish.recv_msg().unwrap(); assert_eq!(msg.group().unwrap(), &a); } }
use crate::{ addr::Endpoint, auth::*, core::*, error::*, Ctx, CtxHandle, Group, GroupSlice, }; use libzmq_sys as sys; use sys::errno; use serde::{Deserialize, Serialize}; use std::{ ffi::c_void, str, sync::{Arc, Mutex}, }; fn join(socket_mut_ptr: *mut c_void, group: &GroupSlice) -> Result<(), Error> { let rc = unsafe { sys::zmq_join(socket_mut_ptr, group.as_c_str().as_ptr()) }; if rc == -1 { let errno = unsafe { sys::zmq_errno() }; let err = match errno { errno::EINVAL => { Error::new(ErrorKind::InvalidInput("cannot join group twice")) } errno::ETERM => Error::new(ErrorKind::InvalidCtx), errno::EINTR => Error::new(ErrorKind::Interrupted), errno::ENOTSOCK => panic!("invalid socket"), errno::EMTHREAD => panic!("no i/o thread available"), _ => panic!(msg_from_errno(errno)), }; Err(err) } else { Ok(()) } } fn leave(socket_mut_ptr: *mut c_void, group: &GroupSlice) -> Result<(), Error> { let rc = unsafe { sys::zmq_leave(socket_mut_ptr, group.as_c_str().as_ptr()) }; if rc == -1 { let errno = unsafe { sys::zmq_errno() }; let err = match errno { errno::EINVAL => Error::new(ErrorKind::InvalidInput( "cannot leave a group that wasn't joined", )), errno::ETERM => Error::new(ErrorKind::InvalidCtx), errno::EINTR => Error::new(ErrorKind::Interrupted), errno::ENOTSOCK => panic!("invalid socket"), errno::EMTHREAD => panic!("no i/o thread available"), _ => panic!(msg_from_errno(errno)), }; Err(err) } else { Ok(()) } } #[derive(Debug, Clone)] pub struct Dish { inner: Arc<RawSocket>, groups: Arc<Mutex<Vec<Group>>>, } impl Dish { pub fn new() -> Result<Self, Error> { let inner = Arc::new(RawSocket::new(RawSocketType::Dish)?); Ok(Self { inner, groups: Arc::default(), }) } pub fn with_ctx(handle: CtxHandle) -> Result<Self, Error> { let inner = Arc::new(RawSocket::with_ctx(RawSocketType::Dish, handle)?); Ok(Self { inner, groups: Arc::default(), }) } pub fn ctx(&self) -> CtxHandle { self.inner.ctx() } pub fn join<G>(&self, group: G) -> Result<(), Error> where G: Into<Group>, { let mut guard = self.groups.lock().unwrap(); let group = group.into(); join(self.raw_socket().as_mut_ptr(), &group)?; guard.push(group); Ok(()) } pub fn joined(&self) -> Vec<Group> { self.groups.lock().unwrap().to_owned() } pub fn leave<G>(&self, group: G) -> Result<(), Error> where G: AsRef<GroupSlice>, { let mut guard = self.groups.lock().unwrap(); let group = group.as_ref(); leave(self.raw_socket().as_mut_ptr(), group)?; let position = guard.iter().position(|g| g == group).unwrap(); guard.remove(position); Ok(()) } } impl PartialEq for Dish { fn eq(&self, other: &Dish) -> bool { self.inner == other.inner } } impl Eq for Dish {} impl GetRawSocket for Dish { fn raw_socket(&self) -> &RawSocket { &self.inner } } impl Socket for Dish {} impl RecvMsg for Dish {} unsafe impl Send for Dish {} unsafe impl Sync for Dish {} #[derive(Debug, Default, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] #[serde(from = "FlatDishConfig")] #[serde(into = "FlatDishConfig")] pub struct DishConfig { socket_config: SocketConfig, recv_config: RecvConfig, groups: Option<Vec<Group>>, } impl DishConfig { pub fn new() -> Self { Self::default() } pub fn build(&self) -> Result<Dish, Error> { self.with_ctx(Ctx::global()) } pub f
pub fn groups(&self) -> Option<&[Group]> { self.groups.as_deref() } pub fn set_groups<I>(&mut self, maybe_groups: Option<I>) where I: IntoIterator<Item = Group>, { let groups = maybe_groups.map(|g| g.into_iter().collect()); self.groups = groups; } pub fn apply(&self, dish: &Dish) -> Result<(), Error> { if let Some(ref groups) = self.groups { for group in groups { dish.join(group)?; } } self.recv_config.apply(dish)?; self.socket_config.apply(dish)?; Ok(()) } } #[derive(Clone, Serialize, Deserialize)] struct FlatDishConfig { connect: Option<Vec<Endpoint>>, bind: Option<Vec<Endpoint>>, recv_hwm: HighWaterMark, recv_timeout: Period, groups: Option<Vec<Group>>, mechanism: Option<Mechanism>, } impl From<DishConfig> for FlatDishConfig { fn from(config: DishConfig) -> Self { let socket_config = config.socket_config; let recv_config = config.recv_config; Self { connect: socket_config.connect, bind: socket_config.bind, mechanism: socket_config.mechanism, recv_hwm: recv_config.recv_hwm, recv_timeout: recv_config.recv_timeout, groups: config.groups, } } } impl From<FlatDishConfig> for DishConfig { fn from(flat: FlatDishConfig) -> Self { let socket_config = SocketConfig { connect: flat.connect, bind: flat.bind, mechanism: flat.mechanism, }; let recv_config = RecvConfig { recv_hwm: flat.recv_hwm, recv_timeout: flat.recv_timeout, }; Self { socket_config, recv_config, groups: flat.groups, } } } impl GetSocketConfig for DishConfig { fn socket_config(&self) -> &SocketConfig { &self.socket_config } fn socket_config_mut(&mut self) -> &mut SocketConfig { &mut self.socket_config } } impl ConfigureSocket for DishConfig {} impl GetRecvConfig for DishConfig { fn recv_config(&self) -> &RecvConfig { &self.recv_config } fn recv_config_mut(&mut self) -> &mut RecvConfig { &mut self.recv_config } } impl ConfigureRecv for DishConfig {} #[derive(Debug, Default, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] pub struct DishBuilder { inner: DishConfig, } impl DishBuilder { pub fn new() -> Self { Self::default() } pub fn build(&self) -> Result<Dish, Error> { self.inner.build() } pub fn with_ctx(&self, handle: CtxHandle) -> Result<Dish, Error> { self.inner.with_ctx(handle) } pub fn join<I, G>(&mut self, groups: I) -> &mut Self where I: IntoIterator<Item = G>, G: Into<Group>, { let groups: Vec<Group> = groups.into_iter().map(G::into).collect(); self.inner.set_groups(Some(groups)); self } } impl GetSocketConfig for DishBuilder { fn socket_config(&self) -> &SocketConfig { self.inner.socket_config() } fn socket_config_mut(&mut self) -> &mut SocketConfig { self.inner.socket_config_mut() } } impl BuildSocket for DishBuilder {} impl GetRecvConfig for DishBuilder { fn recv_config(&self) -> &RecvConfig { self.inner.recv_config() } fn recv_config_mut(&mut self) -> &mut RecvConfig { self.inner.recv_config_mut() } } impl BuildRecv for DishBuilder {} #[cfg(test)] mod test { use super::*; #[test] fn test_ser_de() { let config = DishConfig::new(); let ron = serde_yaml::to_string(&config).unwrap(); let de: DishConfig = serde_yaml::from_str(&ron).unwrap(); assert_eq!(config, de); } #[test] fn test_dish() { use crate::{prelude::*, TcpAddr, *}; use std::{thread, time::Duration}; let addr: TcpAddr = "127.0.0.1:*".try_into().unwrap(); let radio = RadioBuilder::new().bind(addr).build().unwrap(); let bound = radio.last_endpoint().unwrap(); let a: Group = "group a".try_into().unwrap(); let dish = DishBuilder::new().connect(bound).join(&a).build().unwrap(); thread::spawn(move || { let a: Group = "group a".try_into().unwrap(); let b: Group = "group b".try_into().unwrap(); let mut count = 0; loop { let mut msg = Msg::new(); let group = if count % 2 == 0 { &a } else { &b }; msg.set_group(group); radio.send(msg).unwrap(); std::thread::sleep(Duration::from_millis(1)); count += 1; } }); let msg = dish.recv_msg().unwrap(); assert_eq!(msg.group().unwrap(), &a); let msg = dish.recv_msg().unwrap(); assert_eq!(msg.group().unwrap(), &a); } }
n with_ctx(&self, handle: CtxHandle) -> Result<Dish, Error> { let dish = Dish::with_ctx(handle)?; self.apply(&dish)?; Ok(dish) }
function_block-function_prefixed
[ { "content": "fn connect(socket_ptr: *mut c_void, c_string: CString) -> Result<(), Error> {\n\n let rc = unsafe { sys::zmq_connect(socket_ptr, c_string.as_ptr()) };\n\n\n\n if rc == -1 {\n\n let errno = unsafe { sys::zmq_errno() };\n\n let err = match errno {\n\n errno::EINVAL => ...
Rust
rust/subproc/subproc.rs
ChristianVisintin/Brol
e53b663915697aa6543406db2f279e31826bff0c
/* * * Copyright (C) 2020 Christian Visintin - christian.visintin1997@gmail.com * * This file is part of "Pyc" * * Pyc is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Pyc is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Pyc. If not, see <http://www.gnu.org/licenses/>. * */ extern crate nix; extern crate tempfile; extern crate uuid; use super::pipe::Pipe; use std::ffi::{CStr, CString}; use std::os::unix::io::RawFd; use std::path::PathBuf; use std::time::{Duration, Instant}; #[derive(Copy, Clone, PartialEq, std::fmt::Debug)] pub enum SubProcState { Running, Terminated, Unknown } #[derive(Copy, Clone, PartialEq, std::fmt::Debug)] pub enum SubProcError { CouldNotStartProcess, InvalidData, IoTimeout, SubProcStillRunning, SubProcTerminated, CouldNotKill, PipeError(nix::errno::Errno) } #[derive(std::fmt::Debug)] pub struct ShellProc { pub state: SubProcState, pub pid: i32, rc: u8, stdout_cache: Option<String>, stdin_pipe: Pipe, stdout_pipe: Pipe, stderr_pipe: Pipe } impl ShellProc { pub fn start(argv: Vec<String>) -> Result<ShellProc, SubProcError> { if argv.len() == 0 { return Err(SubProcError::CouldNotStartProcess) } let tmpdir: tempfile::TempDir = tempfile::TempDir::new().unwrap(); let stdin_pipe: Pipe = match Pipe::open(&tmpdir.path().join("stdin.fifo")) { Ok(p) => p, Err(err) => return Err(err) }; let stderr_pipe: Pipe = match Pipe::open(&tmpdir.path().join("stderr.fifo")) { Ok(p) => p, Err(err) => return Err(err) }; let stdout_pipe: Pipe = match Pipe::open(&tmpdir.path().join("stdout.fifo")) { Ok(p) => p, Err(err) => return Err(err) }; match nix::unistd::fork() { Ok(nix::unistd::ForkResult::Parent { child, .. }) => { Ok(ShellProc { state: SubProcState::Running, pid: child.as_raw(), rc: 255, stdout_cache: None, stdin_pipe: stdin_pipe, stderr_pipe: stderr_pipe, stdout_pipe: stdout_pipe }) }, Ok(nix::unistd::ForkResult::Child) => { std::process::exit(ShellProc::run(argv, stdin_pipe.fd, stderr_pipe.fd, stdout_pipe.fd)); }, Err(_) => { return Err(SubProcError::CouldNotStartProcess) } } } pub fn cleanup(&mut self) -> Result<u8, SubProcError> { if self.read_state() != SubProcState::Terminated { return Err(SubProcError::SubProcStillRunning) } let _ = self.stdin_pipe.close(); let _ = self.stdout_pipe.close(); let _ = self.stderr_pipe.close(); Ok(self.rc) } pub fn raise(&self, signal: nix::sys::signal::Signal) -> Result<(), SubProcError> { match nix::sys::signal::kill(nix::unistd::Pid::from_raw(self.pid), signal) { Ok(_) => Ok(()), Err(_) => Err(SubProcError::CouldNotKill) } } pub fn kill(&self) -> Result<(), SubProcError> { self.raise(nix::sys::signal::Signal::SIGKILL) } pub fn read(&mut self) -> Result<(Option<String>, Option<String>), SubProcError> { let stdout: Option<String> = match self.stdout_pipe.read(50, false) { Ok(stdout) => stdout, Err(err) => return Err(err) }; let stderr: Option<String> = match self.stderr_pipe.read(50, false) { Ok(stderr) => match stderr { None => None, Some(stderr) => Some(stderr) }, Err(err) => return Err(err) }; Ok((stdout, stderr)) } pub fn write(&mut self, mut data: String) -> Result<(), SubProcError> { if self.read_state() == SubProcState::Terminated { return Err(SubProcError::SubProcTerminated) } self.stdin_pipe.write(data, 5000) } fn run(argv: Vec<String>, stdin: RawFd, stderr: RawFd, stdout: RawFd) -> i32 { if let Err(_) = nix::unistd::dup2(stdin, 0) { return 255 } if let Err(_) = nix::unistd::dup2(stdout, 1) { return 255 } if let Err(_) = nix::unistd::dup2(stderr, 2) { return 255 } let mut c_argv: Vec<CString> = Vec::with_capacity(argv.len()); for arg in argv.iter() { c_argv.push(CString::new(arg.as_str()).unwrap()); } let mut c_argv_refs: Vec<&CStr> = Vec::with_capacity(c_argv.len()); for arg in c_argv.iter() { c_argv_refs.push(arg); } if let Err(_) = nix::unistd::execvp(c_argv_refs.get(0).unwrap(), c_argv_refs.as_slice()) { return 255 } return 0 } pub fn read_state(&mut self) -> SubProcState { match nix::sys::wait::waitpid(nix::unistd::Pid::from_raw(self.pid), Some(nix::sys::wait::WaitPidFlag::WNOHANG)) { Err(_) => {}, Ok(status) => match status { nix::sys::wait::WaitStatus::Exited(_, rc) => { self.state = SubProcState::Terminated; self.rc = rc as u8; }, nix::sys::wait::WaitStatus::Signaled(_, signal, _) => { self.state = SubProcState::Terminated; self.rc = signal as u8; }, _ => {}, } }; self.state } } impl Drop for ShellProc { fn drop(&mut self) { if let Err(_) = self.cleanup() { let _ = self.kill(); let _ = self.cleanup(); } } } #[cfg(test)] mod tests { use super::*; use nix::NixPath; use std::time::Duration; use std::thread::sleep; #[test] fn test_process_start_stop() { let mut shell_proc: ShellProc = ShellProc::start(vec![String::from("sh")]).unwrap(); println!("A new subproc started with PID {}", shell_proc.pid); assert_eq!(shell_proc.state, SubProcState::Running); assert_ne!(shell_proc.pid, 0); assert_eq!(shell_proc.rc, 255); assert!(shell_proc.stdout_cache.is_none()); sleep(Duration::from_millis(500)); assert_eq!(shell_proc.read_state(), SubProcState::Running); assert!(shell_proc.kill().is_ok()); sleep(Duration::from_millis(500)); assert_eq!(shell_proc.read_state(), SubProcState::Terminated); assert_eq!(shell_proc.state, SubProcState::Terminated); assert_eq!(shell_proc.rc, 9); assert!(shell_proc.cleanup().is_ok()); } #[test] fn test_process_start_error() { let mut shell_proc: ShellProc = ShellProc::start(vec![String::from("piroporopero")]).unwrap(); println!("A new subproc started with PID {}", shell_proc.pid); sleep(Duration::from_millis(1000)); assert_eq!(shell_proc.read_state(), SubProcState::Terminated); assert_eq!(shell_proc.rc, 255); } #[test] fn test_process_raise() { let mut shell_proc: ShellProc = ShellProc::start(vec![String::from("sh")]).unwrap(); println!("A new subproc started with PID {}", shell_proc.pid); sleep(Duration::from_millis(500)); assert_eq!(shell_proc.read_state(), SubProcState::Running); assert!(shell_proc.raise(nix::sys::signal::Signal::SIGINT).is_ok()); sleep(Duration::from_millis(500)); assert_eq!(shell_proc.read_state(), SubProcState::Terminated); assert_eq!(shell_proc.rc, 2); } }
/* * * Copyright (C) 2020 Christian Visintin - christian.visintin1997@gmail.com * * This file is part of "Pyc" * * Pyc is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Pyc is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Pyc. If not, see <http://www.gnu.org/licenses/>. * */ extern crate nix; extern crate tempfile; extern crate uuid; use super::pipe::Pipe; use std::ffi::{CStr, CString}; use std::os::unix::io::RawFd; use std::path::PathBuf; use std::time::{Duration, Instant}; #[derive(Copy, Clone, PartialEq, std::fmt::Debug)] pub enum SubProcState { Running, Terminated, Unknown } #[derive(Copy, Clone, PartialEq, std::fmt::Debug)] pub enum SubProcError { CouldNotStartProcess, InvalidData, IoTimeout, SubProcStillRunning, SubProcTerminated, CouldNotKill, PipeError(nix::errno::Errno) } #[derive(std::fmt::Debug)] pub struct ShellProc { pub state: SubProcState, pub pid: i32, rc: u8, stdout_cache: Option<String>, stdin_pipe: Pipe, stdout_pipe: Pipe, stderr_pipe: Pipe } impl ShellProc { pub fn start(argv: Vec<String>) -> Result<ShellProc, SubProcError> { if argv.len() == 0 { return Err(SubProcError::CouldNotStartProcess) } let tmpdir: tempfile::TempDir = tempfile::TempDir::new().unwrap(); let stdin_pipe: Pipe = match Pipe::open(&tmpdir.path().join("stdin.fifo")) { Ok(p) => p, Err(err) => return Err(err) }; let stderr_pipe: Pipe = match Pipe::open(&tmpdir.path().join("stderr.fifo")) { Ok(p) => p, Err(err) => return Err(err) }; let stdout_pipe: Pipe = match Pipe::open(&tmpdir.path().join("stdout.fifo")) { Ok(p) => p, Err(err) => return Err(err) }; match nix::unistd::fork() { Ok(nix::unistd::ForkResult::Parent { child, .. }) => { Ok(ShellProc { state: SubProcState::Running, pid: child.as_raw(), rc: 255, stdout_cache: None, stdin_pipe: stdin_pipe, stderr_pipe: stderr_pipe, stdout_pipe: stdout_pipe }) }, Ok(nix::unistd::ForkResult::Child) => { std::process::exit(ShellProc::run(argv, stdin_pipe.fd, stderr_pipe.fd, stdout_pipe.fd)); }, Err(_) => { return Err(SubProcError::CouldNotStartProcess) } } } pub fn cleanup(&mut self) -> Result<u8, SubProcError> { if self.read_state() != SubProcState::Terminated { return Err(SubProcError::SubProcStillRunning) } let _ = self.stdin_pipe.close(); let _ = self.stdout_pipe.close(); let _ = self.stderr_pipe.close(); Ok(self.rc) } pub fn raise(&self, signal: nix::sys::signal::Signal) -> Result<(), SubProcError> { match nix::sys::signal::kill(nix::unistd::Pid::from_raw(self.pid), signal) { Ok(_) => Ok(()), Err(_) => Err(SubProcError::CouldNotKill) } } pub fn kill(&self) -> Result<(), SubProcError> { self.raise(nix::sys::signal::Signal::SIGKILL) } pub fn read(&mut self) -> Result<(Option<String>, Option<String>), SubProcError> { let stdout: Option<String> = match self.stdout_pipe.read(50, false) { Ok(stdout) => stdout, Err(err) => return Err(err) }; let stderr: Option<String> = match self.stderr_pipe.read(50, false) { Ok(stderr) => match stderr { None => None, Some(stderr) => Some(stderr) }, Err(err) => return Err(err) }; Ok((stdout, stderr)) } pub fn write(&mut self, mut data: String) -> Result<(), SubProcError> { if self.read_state() == SubProcState::Terminated { return Err(SubProcError::SubProcTerminated) } self.stdin_pipe.write(data, 5000) } fn run(argv: Vec<String>, stdin: RawFd, stderr: RawFd, stdout: RawFd) -> i32 { if let Err(_) = nix::unistd::dup2(stdin, 0) { return 255 } if let Err(_) = nix::unistd::dup2(stdout, 1) { return 255 } if let Err(_) = nix::unistd::dup2(stderr, 2) { return 255 } let mut c_argv: Vec<CString> = Vec::with_capacity(argv.len()); for arg in argv.iter() { c_argv.push(CString::new(arg.as_str()).unwrap()); } let mut c_argv_refs: Vec<&CStr> = Vec::with_capacity(c_argv.len()); for arg in c_argv.iter() { c_argv_refs.push(arg); } if let Err(_) = nix::unistd::execvp(c_argv_refs.get(0).unwrap(), c_argv_refs.as_slice()) { return 255 } return 0 } pub fn read_state(&mut self) -> SubProcState { match nix::sys::wait::waitpid(nix::unistd::Pid::from_raw(self.pid), Some(nix::sys::wait::WaitPidFlag::WNOHANG)) { Err(_) => {}, Ok(status) => match status { nix::sys::wait::WaitStatus::Exited(_, rc) => { self.state = SubProcState::Terminated; self.rc = rc as u8; }, nix::sys::wait::WaitStatus::Signaled(_, signal, _) => { self.state = SubProcState::Terminated; self.rc = signal as u8; }, _ => {}, } }; self.state } } impl Drop for ShellProc { fn drop(&mut self) { if let Err(_) = self.cleanup() { let _ = self.kill(); let _ = self.cleanup(); } } } #[cfg(test)] mod tests { use super::*; use nix::NixPath; use std::time::Duration; use std::thread::sleep; #[test]
#[test] fn test_process_start_error() { let mut shell_proc: ShellProc = ShellProc::start(vec![String::from("piroporopero")]).unwrap(); println!("A new subproc started with PID {}", shell_proc.pid); sleep(Duration::from_millis(1000)); assert_eq!(shell_proc.read_state(), SubProcState::Terminated); assert_eq!(shell_proc.rc, 255); } #[test] fn test_process_raise() { let mut shell_proc: ShellProc = ShellProc::start(vec![String::from("sh")]).unwrap(); println!("A new subproc started with PID {}", shell_proc.pid); sleep(Duration::from_millis(500)); assert_eq!(shell_proc.read_state(), SubProcState::Running); assert!(shell_proc.raise(nix::sys::signal::Signal::SIGINT).is_ok()); sleep(Duration::from_millis(500)); assert_eq!(shell_proc.read_state(), SubProcState::Terminated); assert_eq!(shell_proc.rc, 2); } }
fn test_process_start_stop() { let mut shell_proc: ShellProc = ShellProc::start(vec![String::from("sh")]).unwrap(); println!("A new subproc started with PID {}", shell_proc.pid); assert_eq!(shell_proc.state, SubProcState::Running); assert_ne!(shell_proc.pid, 0); assert_eq!(shell_proc.rc, 255); assert!(shell_proc.stdout_cache.is_none()); sleep(Duration::from_millis(500)); assert_eq!(shell_proc.read_state(), SubProcState::Running); assert!(shell_proc.kill().is_ok()); sleep(Duration::from_millis(500)); assert_eq!(shell_proc.read_state(), SubProcState::Terminated); assert_eq!(shell_proc.state, SubProcState::Terminated); assert_eq!(shell_proc.rc, 9); assert!(shell_proc.cleanup().is_ok()); }
function_block-full_function
[ { "content": "/// ### read_file\n\n/// \n\n/// Read entire file\n\npub fn read_file<P>(filename: P) -> io::Result<String> where P: AsRef<Path>, {\n\n std::fs::read_to_string(filename)\n\n}\n\n\n\n/// ### read_lines\n\n/// \n\n/// Read lines from file\n", "file_path": "rust/file-utils/file.rs", "rank"...
Rust
core/executor/runtime-test/src/lib.rs
HPIPS/HPIPS_Chain
9c4a5ee923af876c89bb2cc629fd96b11add8196
#![cfg_attr(not(feature = "std"), no_std)] #![cfg_attr(feature = "strict", deny(warnings))] #[cfg(feature = "std")] include!(concat!(env!("OUT_DIR"), "/wasm_binary.rs")); use rstd::{vec::Vec, slice, vec}; use runtime_io::{ set_storage, storage, clear_prefix, print, blake2_128, blake2_256, twox_128, twox_256, ed25519_verify, sr25519_verify, enumerated_trie_root }; macro_rules! impl_stubs { ( $( $new_name:ident => $invoke:expr, )* ) => { $( impl_stubs!(@METHOD $new_name => $invoke); )* }; ( @METHOD $new_name:ident => $invoke:expr ) => { #[no_mangle] pub fn $new_name(input_data: *mut u8, input_len: usize) -> u64 { let input: &[u8] = if input_len == 0 { &[0u8; 0] } else { unsafe { slice::from_raw_parts(input_data, input_len) } }; let output: Vec<u8> = $invoke(input); let res = output.as_ptr() as u64 + ((output.len() as u64) << 32); rstd::mem::forget(output); res } }; } impl_stubs!( test_data_in => |input| { print("set_storage"); set_storage(b"input", input); print("storage"); let foo = storage(b"foo").unwrap(); print("set_storage"); set_storage(b"baz", &foo); print("finished!"); b"all ok!".to_vec() }, test_clear_prefix => |input| { clear_prefix(input); b"all ok!".to_vec() }, test_empty_return => |_| Vec::new(), test_exhaust_heap => |_| Vec::with_capacity(16777216), test_panic => |_| panic!("test panic"), test_conditional_panic => |input: &[u8]| { if input.len() > 0 { panic!("test panic") } input.to_vec() }, test_blake2_256 => |input| blake2_256(input).to_vec(), test_blake2_128 => |input| blake2_128(input).to_vec(), test_twox_256 => |input| twox_256(input).to_vec(), test_twox_128 => |input| twox_128(input).to_vec(), test_ed25519_verify => |input: &[u8]| { let mut pubkey = [0; 32]; let mut sig = [0; 64]; pubkey.copy_from_slice(&input[0..32]); sig.copy_from_slice(&input[32..96]); let msg = b"all ok!"; [ed25519_verify(&sig, &msg[..], &pubkey) as u8].to_vec() }, test_sr25519_verify => |input: &[u8]| { let mut pubkey = [0; 32]; let mut sig = [0; 64]; pubkey.copy_from_slice(&input[0..32]); sig.copy_from_slice(&input[32..96]); let msg = b"all ok!"; [sr25519_verify(&sig, &msg[..], &pubkey) as u8].to_vec() }, test_enumerated_trie_root => |_| { enumerated_trie_root::<primitives::Blake2Hasher>( &[ &b"zero"[..], &b"one"[..], &b"two"[..], ] ).as_ref().to_vec() }, test_sandbox => |code: &[u8]| { let ok = execute_sandboxed(code, &[]).is_ok(); [ok as u8].to_vec() }, test_sandbox_args => |code: &[u8]| { let ok = execute_sandboxed( code, &[ sandbox::TypedValue::I32(0x12345678), sandbox::TypedValue::I64(0x1234567887654321), ] ).is_ok(); [ok as u8].to_vec() }, test_sandbox_return_val => |code: &[u8]| { let ok = match execute_sandboxed( code, &[ sandbox::TypedValue::I32(0x1336), ] ) { Ok(sandbox::ReturnValue::Value(sandbox::TypedValue::I32(0x1337))) => true, _ => false, }; [ok as u8].to_vec() }, test_sandbox_instantiate => |code: &[u8]| { let env_builder = sandbox::EnvironmentDefinitionBuilder::new(); let code = match sandbox::Instance::new(code, &env_builder, &mut ()) { Ok(_) => 0, Err(sandbox::Error::Module) => 1, Err(sandbox::Error::Execution) => 2, Err(sandbox::Error::OutOfBounds) => 3, }; [code].to_vec() }, test_offchain_local_storage => |_| { let kind = primitives::offchain::StorageKind::PERSISTENT; assert_eq!(runtime_io::local_storage_get(kind, b"test"), None); runtime_io::local_storage_set(kind, b"test", b"asd"); assert_eq!(runtime_io::local_storage_get(kind, b"test"), Some(b"asd".to_vec())); let res = runtime_io::local_storage_compare_and_set(kind, b"test", Some(b"asd"), b""); assert_eq!(res, true); assert_eq!(runtime_io::local_storage_get(kind, b"test"), Some(b"".to_vec())); [0].to_vec() }, test_offchain_local_storage_with_none => |_| { let kind = primitives::offchain::StorageKind::PERSISTENT; assert_eq!(runtime_io::local_storage_get(kind, b"test"), None); let res = runtime_io::local_storage_compare_and_set(kind, b"test", None, b"value"); assert_eq!(res, true); assert_eq!(runtime_io::local_storage_get(kind, b"test"), Some(b"value".to_vec())); [0].to_vec() }, test_offchain_http => |_| { use primitives::offchain::HttpRequestStatus; let run = || -> Option<()> { let id = runtime_io::http_request_start("POST", "http://localhost:12345", &[]).ok()?; runtime_io::http_request_add_header(id, "X-Auth", "test").ok()?; runtime_io::http_request_write_body(id, &[1, 2, 3, 4], None).ok()?; runtime_io::http_request_write_body(id, &[], None).ok()?; let status = runtime_io::http_response_wait(&[id], None); assert!(status == vec![HttpRequestStatus::Finished(200)], "Expected Finished(200) status."); let headers = runtime_io::http_response_headers(id); assert_eq!(headers, vec![(b"X-Auth".to_vec(), b"hello".to_vec())]); let mut buffer = vec![0; 64]; let read = runtime_io::http_response_read_body(id, &mut buffer, None).ok()?; assert_eq!(read, 3); assert_eq!(&buffer[0..read], &[1, 2, 3]); let read = runtime_io::http_response_read_body(id, &mut buffer, None).ok()?; assert_eq!(read, 0); Some(()) }; [if run().is_some() { 0 } else { 1 }].to_vec() }, ); fn execute_sandboxed(code: &[u8], args: &[sandbox::TypedValue]) -> Result<sandbox::ReturnValue, sandbox::HostError> { struct State { counter: u32, } fn env_assert(_e: &mut State, args: &[sandbox::TypedValue]) -> Result<sandbox::ReturnValue, sandbox::HostError> { if args.len() != 1 { return Err(sandbox::HostError); } let condition = args[0].as_i32().ok_or_else(|| sandbox::HostError)?; if condition != 0 { Ok(sandbox::ReturnValue::Unit) } else { Err(sandbox::HostError) } } fn env_inc_counter(e: &mut State, args: &[sandbox::TypedValue]) -> Result<sandbox::ReturnValue, sandbox::HostError> { if args.len() != 1 { return Err(sandbox::HostError); } let inc_by = args[0].as_i32().ok_or_else(|| sandbox::HostError)?; e.counter += inc_by as u32; Ok(sandbox::ReturnValue::Value(sandbox::TypedValue::I32(e.counter as i32))) } let mut state = State { counter: 0 }; let env_builder = { let mut env_builder = sandbox::EnvironmentDefinitionBuilder::new(); env_builder.add_host_func("env", "assert", env_assert); env_builder.add_host_func("env", "inc_counter", env_inc_counter); let memory = match sandbox::Memory::new(1, Some(16)) { Ok(m) => m, Err(_) => unreachable!(" Memory::new() can return Err only if parameters are borked; \ We passing params here explicitly and they're correct; \ Memory::new() can't return a Error qed" ), }; env_builder.add_memory("env", "memory", memory.clone()); env_builder }; let mut instance = sandbox::Instance::new(code, &env_builder, &mut state)?; let result = instance.invoke(b"call", args, &mut state); result.map_err(|_| sandbox::HostError) }
#![cfg_attr(not(feature = "std"), no_std)] #![cfg_attr(feature = "strict", deny(warnings))] #[cfg(feature = "std")] include!(concat!(env!("OUT_DIR"), "/wasm_binary.rs")); use rstd::{vec::Vec, slice, vec}; use runtime_io::{ set_storage, storage, clear_prefix, print, blake2_128, blake2_256, twox_128, twox_256, ed25519_verify, sr25519_verify, enumerated_trie_root }; macro_rules! impl_stubs { ( $( $new_name:ident => $invoke:expr, )* ) => { $( impl_stubs!(@METHOD $new_name => $invoke); )* }; ( @METHOD $new_name:ident => $invoke:expr ) => { #[no_mangle] pub fn $new_name(input_data: *mut u8, input_len: usize) -> u64 { let input: &[u8] = if input_len == 0 { &[0u8; 0] } else { unsafe { slice::from_raw_parts(input_data, input_len) } }; let output: Vec<u8> = $invoke(input); let res = output.as_ptr() as u64 + ((output.len() as u64) << 32); rstd::mem::forget(output); res } }; } impl_stubs!( test_data_in => |input| { print("set_storage"); set_storage(b"input", input); print("storage"); let foo = storage(b"foo").unwrap(); print("set_storage"); set_storage(b"baz", &foo); print("finished!"); b"all ok!".to_vec() }, test_clear_prefix => |input| { clear_prefix(input); b"all ok!".to_vec() }, test_empty_return => |_| Vec::new(), test_exhaust_heap => |_| Vec::with_capacity(16777216), test_panic => |_| panic!("test panic"), test_conditional_panic => |input: &[u8]| { if input.len() > 0 { panic!("test panic") } input.to_vec() }, test_blake2_256 => |input| blake2_256(input).to_vec(), test_blake2_128 => |input| blake2_128(input).to_vec(), test_twox_256 => |input| twox_256(input).to_vec(), test_twox_128 => |input| twox_128(input).to_vec(), test_ed25519_verify => |input: &[u8]| { let mut pubkey = [0; 32]; let mut sig = [0; 64]; pubkey.copy_from_slice(&input[0..32]); sig.copy_from_slice(&input[32..96]); let msg = b"all ok!"; [ed25519_verify(&sig, &msg[..], &pubkey) as u8].to_vec() }, test_sr25519_verify => |input: &[u8]| { let mut pubkey = [0; 32]; let mut sig = [0; 64]; pubkey.copy_from_slice(&input[0..32]); sig.copy_from_slice(&input[32..96]); let msg = b"all ok!"; [sr25519_verify(&sig, &msg[..], &pubkey) as u8].to_vec() }, test_enumerated_trie_root => |_| { enumerated_trie_root::<primitives::Blake2Hasher>( &[ &b"zero"[..], &b"one"[..], &b"two"[..], ] ).as_ref().to_vec() }, test_sandbox => |code: &[u8]| { let ok = execute_sandboxed(code, &[]).is_ok(); [ok as u8].to_vec() }, test_sandbox_args => |code: &[u8]| { let ok = execute_sandboxed( code, &[ sandbox::TypedValue::I32(0x12345678), sandbox::TypedValue::I64(0x1234567887654321), ] ).is_ok(); [ok as u8].to_vec() }, test_sandbox_return_val => |code: &[u8]| { let ok = match execute_sandboxed( code, &[ sandbox::TypedValue::I32(0x1336), ] ) { Ok(sandbox::ReturnValue::Value(sandbox::TypedValue::I32(0x1337))) => true, _ => false, }; [ok as u8].to_vec() }, test_sandbox_instantiate => |code: &[u8]| { let env_builder = sandbox::EnvironmentDefinitionBuilder::new(); let code = match sandbox::Instance::new(code, &env_builder, &mut ()) { Ok(_) => 0, Err(sandbox::Error::Module) => 1, Err(sandbox::Error::Execution) => 2, Err(sandbox::Error::OutOfBounds) => 3, }; [code].to_vec() }, test_offchain_local_storage => |_| { let kind = primitives::offchain::StorageKind::PERSISTENT; assert_eq!(runtime_io::local_storage_get(kind, b"test"), None); runtime_io::local_storage_set(kind, b"test", b"asd"); assert_eq!(runtime_io::local_storage_get(kind, b"test"), Some(b"asd".to_vec())); let res = runtime_io::local_storage_compare_and_set(kind, b"test", Some(b"asd"), b""); assert_eq!(res, true); assert_eq!(runtime_io::local_storage_get(kind, b"test"), Some(b"".to_vec())); [0].to_vec() }, test_offchain_local_storage_with_none => |_| { let kind = primitives::offchain::StorageKind::PERSISTENT; assert_eq!(runtime_io::local_storage_get(kind, b"test"), None); let res = runtime_io::local_storage_compare_and_set(kind, b"test", None, b"value"); assert_eq!(res, true); assert_eq!(runtime_io::local_storage_get(kind, b"test"), Some(b"value".to_vec())); [0].to_vec() }, test_offchain_http => |_| { use primitives::offchain::HttpRequestStatus; let run = || -> Option<()> { let id = runtime_io::http_request_start("POST", "http://localhost:12345", &[]).ok()?; runtime_io::http_request_add_header(id, "X-Auth", "test").ok()?; runtime_io::http_request_write_body(id, &[1, 2, 3, 4], None).ok()?; runtime_io::http_request_write_body(id, &[], None).ok()?; let status = runtime_io::http_response_wait(&[id], None); assert!(status == vec![HttpRequestStatus::Finished(200)], "Expected Finished(200) status."); let headers = runtime_io::http_response_headers(id); assert_eq!(headers, vec![(b"X-Auth".to_vec(), b"hello".to_vec())]); let mut buffer = vec![0; 64]; let read = runtime_io::http_response_read_body(id, &mut buffer, None).ok()?; assert_eq!(read, 3); assert_eq!(&buffer[0..read], &[1, 2, 3]); let read = runtime_io::http_response_read_body(id, &mut buffer, None).ok()?; assert_eq!(read, 0); Some(()) }; [if run().is_some() { 0 } else { 1 }].to_vec() }, ); fn execute_sandboxed(code: &[u8], args: &[sandbox::TypedValue]) -> Result<sandbox::ReturnValue, sandbox::HostError> { struct State { counter: u32, } fn env_assert(_e: &mut State, args: &[sandbox::TypedValue]) -> Result<sandbox::ReturnValue, sandbox::HostError> { if args.len() !=
fn env_inc_counter(e: &mut State, args: &[sandbox::TypedValue]) -> Result<sandbox::ReturnValue, sandbox::HostError> { if args.len() != 1 { return Err(sandbox::HostError); } let inc_by = args[0].as_i32().ok_or_else(|| sandbox::HostError)?; e.counter += inc_by as u32; Ok(sandbox::ReturnValue::Value(sandbox::TypedValue::I32(e.counter as i32))) } let mut state = State { counter: 0 }; let env_builder = { let mut env_builder = sandbox::EnvironmentDefinitionBuilder::new(); env_builder.add_host_func("env", "assert", env_assert); env_builder.add_host_func("env", "inc_counter", env_inc_counter); let memory = match sandbox::Memory::new(1, Some(16)) { Ok(m) => m, Err(_) => unreachable!(" Memory::new() can return Err only if parameters are borked; \ We passing params here explicitly and they're correct; \ Memory::new() can't return a Error qed" ), }; env_builder.add_memory("env", "memory", memory.clone()); env_builder }; let mut instance = sandbox::Instance::new(code, &env_builder, &mut state)?; let result = instance.invoke(b"call", args, &mut state); result.map_err(|_| sandbox::HostError) }
1 { return Err(sandbox::HostError); } let condition = args[0].as_i32().ok_or_else(|| sandbox::HostError)?; if condition != 0 { Ok(sandbox::ReturnValue::Unit) } else { Err(sandbox::HostError) } }
function_block-function_prefixed
[ { "content": "/// Run whatever tests we have.\n\npub fn run_tests(mut input: &[u8]) -> Vec<u8> {\n\n\tuse runtime_io::print;\n\n\n\n\tprint(\"run_tests...\");\n\n\tlet block = Block::decode(&mut input).unwrap();\n\n\tprint(\"deserialized block.\");\n\n\tlet stxs = block.extrinsics.iter().map(Encode::encode).col...
Rust
src/lib.rs
Ekleog/netsim-embed
980e43f530bc760a338b89bbba0b4037e43aed60
use futures::channel::mpsc; use futures::future::Future; use futures::sink::SinkExt; use futures::stream::StreamExt; pub use netsim_embed_core::Ipv4Range; use netsim_embed_core::*; use netsim_embed_machine::*; use netsim_embed_nat::*; use netsim_embed_router::*; pub use pnet_packet::*; use std::net::Ipv4Addr; pub fn run<F>(f: F) where F: Future<Output = ()> + Send + 'static, { env_logger::init(); namespace::unshare_user().unwrap(); smol::run(f); } #[derive(Debug)] pub struct Machine<C, E> { addr: Ipv4Addr, tx: mpsc::Sender<C>, rx: mpsc::Receiver<E>, } impl<C: Send + 'static, E: Send + 'static> Machine<C, E> { pub fn addr(&self) -> Ipv4Addr { self.addr } pub async fn send(&mut self, cmd: C) { self.tx.send(cmd).await.unwrap(); } pub async fn recv(&mut self) -> Option<E> { self.rx.next().await } } #[derive(Debug)] pub struct Network<C, E> { range: Ipv4Range, machines: Vec<Machine<C, E>>, networks: Vec<Network<C, E>>, } impl<C: Send + 'static, E: Send + 'static> Network<C, E> { pub fn range(&self) -> Ipv4Range { self.range } pub fn subnet(&mut self, i: usize) -> &mut Network<C, E> { self.networks.get_mut(i).unwrap() } pub fn machine(&mut self, i: usize) -> &mut Machine<C, E> { self.machines.get_mut(i).unwrap() } } #[derive(Clone, Copy, Debug)] pub struct NatConfig { pub hair_pinning: bool, pub symmetric: bool, pub blacklist_unrecognized_addrs: bool, pub restrict_endpoints: bool, } impl Default for NatConfig { fn default() -> Self { Self { hair_pinning: false, symmetric: false, blacklist_unrecognized_addrs: false, restrict_endpoints: false, } } } #[derive(Debug)] pub struct NetworkBuilder<C, E> { range: Ipv4Range, router: Ipv4Router, machines: Vec<Machine<C, E>>, networks: Vec<Network<C, E>>, } impl<C: Send + 'static, E: Send + 'static> NetworkBuilder<C, E> { pub fn new(range: Ipv4Range) -> Self { let router = Ipv4Router::new(range.gateway_addr()); Self { range, router, machines: Default::default(), networks: Default::default(), } } pub fn spawn_machine<B, F>(&mut self, builder: B) -> Ipv4Addr where B: Fn(mpsc::Receiver<C>, mpsc::Sender<E>) -> F + Send + 'static, F: Future<Output = ()> + Send + 'static, { let (iface_a, iface_b) = wire(); let (cmd_tx, cmd_rx) = mpsc::channel(0); let (event_tx, event_rx) = mpsc::channel(0); let addr = self.range.random_client_addr(); let mask = self.range.netmask_prefix_length(); smol::Task::blocking(async move { let join = machine(addr, mask, iface_b, builder(cmd_rx, event_tx)); join.join().unwrap(); }) .detach(); let machine = Machine { addr, tx: cmd_tx, rx: event_rx, }; self.machines.push(machine); self.router.add_connection(iface_a, vec![addr.into()]); addr } pub fn spawn_network(&mut self, config: Option<NatConfig>, mut builder: NetworkBuilder<C, E>) { let (net_a, net_b) = wire(); if let Some(config) = config { builder .router .add_connection(net_b, vec![Ipv4Range::global().into()]); let (nat_a, nat_b) = wire(); let nat_addr = self.range.random_client_addr(); let mut nat = Ipv4Nat::new(nat_b, net_a, nat_addr, builder.range); nat.set_hair_pinning(config.hair_pinning); nat.set_symmetric(config.symmetric); nat.set_blacklist_unrecognized_addrs(config.blacklist_unrecognized_addrs); nat.set_restrict_endpoints(config.restrict_endpoints); smol::Task::spawn(nat).detach(); self.router.add_connection(nat_a, vec![nat_addr.into()]); } else { builder .router .add_connection(net_b, vec![Ipv4Range::global().into()]); self.router .add_connection(net_a, vec![builder.range.into()]); } let network = builder.spawn(); self.networks.push(network); } pub fn spawn(self) -> Network<C, E> { let Self { range, router, machines, networks, } = self; smol::Task::spawn(router).detach(); Network { range, machines, networks, } } }
use futures::channel::mpsc; use futures::future::Future; use futures::sink::SinkExt; use futures::stream::StreamExt; pub use netsim_embed_core::Ipv4Range; use netsim_embed_core::*; use netsim_embed_machine::*; use netsim_embed_nat::*; use netsim_embed_router::*; pub use pnet_packet::*; use std::net::Ipv4Addr; pub fn run<F>(f: F) where F: Future<Output = ()> + Send + 'static, { env_logger::init(); namespace::unshare_user().unwrap(); smol::run(f); } #[derive(Debug)] pub struct Machine<C, E> { addr: Ipv4Addr, tx: mpsc::Sender<C>, rx: mpsc::Receiver<E>, } impl<C: Send + 'static, E: Send + 'static> Machine<C, E> { pub fn addr(&self) -> Ipv4Addr { self.addr } pub async fn send(&mut self, cmd: C) { self.tx.send(cmd).await.unwrap(); } pub async fn recv(&mut self) -> Option<E> { self.rx.next().await } } #[derive(Debug)] pub struct Network<C, E> { range: Ipv4Range, machines: Vec<Machine<C, E>>, networks: Vec<Network<C, E>>, } impl<C: Send + 'static, E: Send + 'static> Network<C, E> { pub fn range(&self) -> Ipv4Range { self.range } pub fn subnet(&mut self, i: usize) -> &mut Network<C, E> { self.networks.get_mut(i).unwrap() } pub fn machine(&mut self, i: usize) -> &mut Machine<C, E> { self.machines.get_mut(i).unwrap() } } #[derive(Clone, Copy, Debug)] pub struct NatConfig { pub hair_pinning: bool, pub symmetric: bool, pub blacklist_unrecognized_addrs: bool, pub restrict_endpoints: bool, } impl Default for NatConfig { fn default() -> Self { Self { hair_pinning: false, symmetric: false, blacklist_unrecognized_addrs: false, restrict_endpoints: false, } } } #[derive(Debug)] pub struct NetworkBuilder<C, E> { range: Ipv4Range, router: Ipv4Router, machines: Vec<Machine<C, E>>, networks: Vec<Network<C, E>>, } impl<C: Send + 'static, E: Send + 'static> NetworkBuilder<C, E> { pub fn new(range: Ipv4Range) -> Self { let router = Ipv4Router::new(range.gateway_addr()); Self { range, router, machines: Default::default(), networks: Default::default(), } } pub fn spawn_machine<B, F>(&mut self, builder: B) -> Ipv4Addr where B: Fn(mpsc::Receiver<C>, mpsc::Sender<E>) -> F + Send + 'static, F: Future<Output = ()> + Send + 'static, { let (iface_a, iface_b) = wire(); let (cmd_tx, cmd_rx) = mpsc::channel(0); let (event_tx, event_rx) = mpsc::channel(0); let addr = self.range.random_client_addr(); let mask = self.range.netmask_prefix_length(); smol::Task::blocking(async move { let join = machine(addr, mask, iface_b, builder(cmd_rx, event_tx)); join.join().unwrap(); }) .detach(); let machine = Machine { addr, tx: cmd_tx, rx: event_rx, }; self.machines.push(machine); self.router.add_connection(iface_a, vec![addr.into()]); addr } pub fn spawn_network(&mut self, config: Option<NatConfig>, mut builder: NetworkBuilder<C, E>) { let (net_a, net_b) = wire(); if let Some(config) = config { builder .router .add_connection(net_b, vec![Ipv4Range::global().into()]); let (nat_a, nat_b) = wire(); let nat_addr = self.range.random_client_addr(); let mut nat = Ipv4Nat::new(nat_b, net_a, nat_addr, builder.range); nat.set_hair_pinning(config.hair_pinning); nat.set_symmetric(config.symmetric); nat.set_blacklist_unrecognized_addrs(config.blacklist_unrecognized_addrs); nat.set_restrict_endpoints(config.restrict_endpoints); smol::Task::spawn(nat).detach(); self.router.add_connection(nat_a, vec![nat_addr.into()]); } else { builder .router .add_connection(net_b, vec![Ipv4Range::global().into()]); self.router .add_connection(net_a, vec![builder.range.into()]); } let network = builder.spawn(); self.networks.push(network); } pub fn spawn(self) -> Network<C, E> { let Self {
}
range, router, machines, networks, } = self; smol::Task::spawn(router).detach(); Network { range, machines, networks, } }
function_block-function_prefix_line
[ { "content": "/// Spawns a thread in a new network namespace and configures a TUN interface that sends and\n\n/// receives IP packets from the tx/rx channels and runs some UDP/TCP networking code in task.\n\npub fn machine<F>(addr: Ipv4Addr, mask: u8, plug: Plug, task: F) -> thread::JoinHandle<F::Output>\n\nwhe...
Rust
src/libstd/ffi/c_str.rs
jauhien/rust
f3092b1d58fd7fba154e40f6b2279d67663298a5
use fmt; use iter::IteratorExt; use libc; use mem; use ops::Deref; use slice::{self, SliceExt}; use string::String; use vec::Vec; #[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] pub struct CString { inner: Vec<libc::c_char>, } impl CString { pub fn from_slice(v: &[u8]) -> CString { CString::from_vec(v.to_vec()) } pub fn from_vec(v: Vec<u8>) -> CString { assert!(!v.iter().any(|&x| x == 0)); unsafe { CString::from_vec_unchecked(v) } } pub unsafe fn from_vec_unchecked(mut v: Vec<u8>) -> CString { v.push(0); CString { inner: mem::transmute(v) } } pub fn as_slice_with_nul(&self) -> &[libc::c_char] { &self.inner } pub fn as_bytes(&self) -> &[u8] { unsafe { mem::transmute(&**self) } } pub fn as_bytes_with_nul(&self) -> &[u8] { unsafe { mem::transmute(self.as_slice_with_nul()) } } } impl Deref for CString { type Target = [libc::c_char]; fn deref(&self) -> &[libc::c_char] { &self.inner[..(self.inner.len() - 1)] } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Debug for CString { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Debug::fmt(&String::from_utf8_lossy(self.as_bytes()), f) } } pub unsafe fn c_str_to_bytes<'a>(raw: &'a *const libc::c_char) -> &'a [u8] { let len = libc::strlen(*raw); slice::from_raw_buf(&*(raw as *const _ as *const *const u8), len as uint) } pub unsafe fn c_str_to_bytes_with_nul<'a>(raw: &'a *const libc::c_char) -> &'a [u8] { let len = libc::strlen(*raw) + 1; slice::from_raw_buf(&*(raw as *const _ as *const *const u8), len as uint) } #[cfg(test)] mod tests { use prelude::v1::*; use super::*; use libc; use mem; #[test] fn c_to_rust() { let data = b"123\0"; let ptr = data.as_ptr() as *const libc::c_char; unsafe { assert_eq!(c_str_to_bytes(&ptr), b"123"); assert_eq!(c_str_to_bytes_with_nul(&ptr), b"123\0"); } } #[test] fn simple() { let s = CString::from_slice(b"1234"); assert_eq!(s.as_bytes(), b"1234"); assert_eq!(s.as_bytes_with_nul(), b"1234\0"); unsafe { assert_eq!(&*s, mem::transmute::<_, &[libc::c_char]>(b"1234")); assert_eq!(s.as_slice_with_nul(), mem::transmute::<_, &[libc::c_char]>(b"1234\0")); } } #[should_fail] #[test] fn build_with_zero1() { CString::from_slice(b"\0"); } #[should_fail] #[test] fn build_with_zero2() { CString::from_vec(vec![0]); } #[test] fn build_with_zero3() { unsafe { let s = CString::from_vec_unchecked(vec![0]); assert_eq!(s.as_bytes(), b"\0"); } } #[test] fn formatted() { let s = CString::from_slice(b"12"); assert_eq!(format!("{:?}", s), "\"12\""); } }
use fmt; use iter::IteratorExt; use libc; use mem; use ops::Deref; use slice::{self, SliceExt}; use string::String; use vec::Vec; #[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] pub struct CString { inner: Vec<libc::c_char>, } impl CString { pub fn from_slice(v: &[u8]) -> CString { CString::from_vec(v.to_vec()) } pub fn from_vec(v: Vec<u8>) -> CString { assert!(!v.iter().any(|&x| x == 0)); unsafe { CString::from_vec_unchecked(v) } } pub unsafe fn from_vec_unchecked(mut v: Vec<u8>) -> CString { v.push(0); CString { inner: mem::transmute(v) } } pub fn as_slice_with_nul(&self) -> &[libc::c_char] { &self.inner } pub fn as_bytes(&self) -> &[u8] { unsafe { mem::transmute(&**self) } } pub fn as_bytes_with_nul(&self) -> &[u8] { unsafe { mem::transmute(self.as_slice_with_nul()) } } } impl Deref for CString { type Target = [libc::c_char]; fn deref(&self) -> &[libc::c_char] { &self.inner[..(self.inner.len() - 1)] } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Debug for CString { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Debug::fmt(&String::from_utf8_lossy(self.as_bytes()), f) } } pub unsafe fn c_str_to_bytes<'a>(raw: &'a *const libc::c_char) -> &'a [u8] { let len = libc::strlen(*raw); slice::from_raw_buf(&*(raw as *const _ as *const *const u8), len as uint) } pub unsafe fn c_str_to_bytes_with_nul<'a>(raw: &'a *const libc::c_char) -> &'a [u8] { let len = libc::strlen(*raw) + 1; slice::from_raw_buf(&*(raw as *const _ as *const *const u8), len as uint) } #[cfg(test)] mod tests { use prelude::v1::*; use super::*; use libc; use mem; #[test] fn c_to_rust() { let da
#[test] fn simple() { let s = CString::from_slice(b"1234"); assert_eq!(s.as_bytes(), b"1234"); assert_eq!(s.as_bytes_with_nul(), b"1234\0"); unsafe { assert_eq!(&*s, mem::transmute::<_, &[libc::c_char]>(b"1234")); assert_eq!(s.as_slice_with_nul(), mem::transmute::<_, &[libc::c_char]>(b"1234\0")); } } #[should_fail] #[test] fn build_with_zero1() { CString::from_slice(b"\0"); } #[should_fail] #[test] fn build_with_zero2() { CString::from_vec(vec![0]); } #[test] fn build_with_zero3() { unsafe { let s = CString::from_vec_unchecked(vec![0]); assert_eq!(s.as_bytes(), b"\0"); } } #[test] fn formatted() { let s = CString::from_slice(b"12"); assert_eq!(format!("{:?}", s), "\"12\""); } }
ta = b"123\0"; let ptr = data.as_ptr() as *const libc::c_char; unsafe { assert_eq!(c_str_to_bytes(&ptr), b"123"); assert_eq!(c_str_to_bytes_with_nul(&ptr), b"123\0"); } }
function_block-function_prefixed
[ { "content": "// same as cci_iter_lib, more-or-less, but not marked inline\n\npub fn iter<F>(v: Vec<uint> , mut f: F) where F: FnMut(uint) {\n\n let mut i = 0u;\n\n let n = v.len();\n\n while i < n {\n\n f(v[i]);\n\n i += 1u;\n\n }\n\n}\n", "file_path": "src/test/auxiliary/cci_no_i...
Rust
rs/crypto/internal/crypto_lib/threshold_sig/tecdsa/tests/complaints.rs
pipe-blockchain/ic
69d3263702ac3b52fd18c35dd1cc46de08d92073
use rand::Rng; use std::collections::BTreeMap; use tecdsa::*; fn corrupt_ciphertext_single( ctext: &[EccScalar], corruption_target: usize, ) -> ThresholdEcdsaResult<Vec<EccScalar>> { let mut ctext = ctext.to_vec(); let curve_type = ctext[corruption_target].curve_type(); let randomizer = EccScalar::one(curve_type); ctext[corruption_target] = ctext[corruption_target].add(&randomizer)?; Ok(ctext) } fn corrupt_ciphertext_pairs( ctext: &[(EccScalar, EccScalar)], corruption_target: usize, ) -> ThresholdEcdsaResult<Vec<(EccScalar, EccScalar)>> { let mut ctext = ctext.to_vec(); let curve_type = ctext[corruption_target].0.curve_type(); let randomizer = EccScalar::one(curve_type); ctext[corruption_target].0 = ctext[corruption_target].0.add(&randomizer)?; Ok(ctext) } fn corrupt_dealing( dealing: &IDkgDealingInternal, corruption_target: usize, ) -> ThresholdEcdsaResult<IDkgDealingInternal> { let ciphertext = match &dealing.ciphertext { MEGaCiphertext::Single(c) => MEGaCiphertext::Single(MEGaCiphertextSingle { ephemeral_key: c.ephemeral_key, ctexts: corrupt_ciphertext_single(&c.ctexts, corruption_target)?, }), MEGaCiphertext::Pairs(c) => MEGaCiphertext::Pairs(MEGaCiphertextPair { ephemeral_key: c.ephemeral_key, ctexts: corrupt_ciphertext_pairs(&c.ctexts, corruption_target)?, }), }; Ok(IDkgDealingInternal { ciphertext, commitment: dealing.commitment.clone(), proof: dealing.proof.clone(), }) } #[test] fn should_complaint_system_work() -> ThresholdEcdsaResult<()> { let curve = EccCurveType::K256; let associated_data = b"assoc_data_test"; let mut rng = rand::thread_rng(); let sk0 = MEGaPrivateKey::generate(curve, &mut rng)?; let pk0 = sk0.public_key()?; let sk1 = MEGaPrivateKey::generate(curve, &mut rng)?; let pk1 = sk1.public_key()?; let dealer_index = 0; let threshold = 1; let dealing = IDkgDealingInternal::new( &SecretShares::Random, curve, Seed::from_rng(&mut rng), threshold, &[pk0, pk1], dealer_index, associated_data, )?; let mut dealings = BTreeMap::new(); let corruption_target = 0; dealings.insert( dealer_index, corrupt_dealing(&dealing, corruption_target as usize)?, ); let complaints = generate_complaints( &dealings, associated_data, corruption_target, &sk0, &pk0, Seed::from_rng(&mut rng), ) .expect("failed to generate complaints"); assert_eq!(complaints.len(), 1); for complaint in complaints.values() { complaint .verify( dealings.get(&dealer_index).unwrap(), dealer_index, corruption_target, &pk0, associated_data, ) .unwrap(); assert!(complaint .verify( dealings.get(&dealer_index).unwrap(), dealer_index, corruption_target, &pk0, &rng.gen::<[u8; 32]>(), ) .is_err()); assert!(complaint .verify( dealings.get(&dealer_index).unwrap(), dealer_index, corruption_target, &pk1, associated_data, ) .is_err()); assert!(complaint .verify( dealings.get(&dealer_index).unwrap(), dealer_index + 1, corruption_target, &pk0, associated_data, ) .is_err()); } let modified_ephemeral_key = MEGaCiphertextPair { ephemeral_key: EccPoint::hash_to_point(curve, &rng.gen::<[u8; 32]>(), "ad".as_bytes())?, ctexts: vec![ ( EccScalar::random(curve, &mut rng)?, EccScalar::random(curve, &mut rng)?, ), ( EccScalar::random(curve, &mut rng)?, EccScalar::random(curve, &mut rng)?, ), ], }; let bad_key_dealing = IDkgDealingInternal { ciphertext: modified_ephemeral_key.into(), commitment: dealing.commitment.clone(), proof: dealing.proof, }; assert_eq!( complaints .get(&0) .unwrap() .verify( &bad_key_dealing, dealer_index, corruption_target, &pk0, associated_data, ) .unwrap_err(), ThresholdEcdsaError::InvalidProof ); Ok(()) } #[test] fn should_complaint_verification_reject_spurious_complaints() -> ThresholdEcdsaResult<()> { let curve = EccCurveType::K256; let associated_data = b"assoc_data_test"; let mut rng = rand::thread_rng(); let sk = MEGaPrivateKey::generate(curve, &mut rng)?; let pk = sk.public_key()?; let dealer_index = 0; let receiver_index = 0; let threshold = 1; let dealing = IDkgDealingInternal::new( &SecretShares::Random, curve, Seed::from_rng(&mut rng), threshold, &[pk], dealer_index, associated_data, )?; let complaint = IDkgComplaintInternal::new( Seed::from_rng(&mut rng), &dealing, dealer_index, receiver_index, &sk, &pk, associated_data, )?; assert!(complaint .verify(&dealing, dealer_index, 0, &pk, associated_data) .is_err()); Ok(()) }
use rand::Rng; use std::collections::BTreeMap; use tecdsa::*; fn corrupt_ciphertext_single( ctext: &[EccScalar], corruption_target: usize, ) -> ThresholdEcdsaResult<Vec<EccScalar>> { let mut ctext = ctext.to_vec(); let curve_type = ctext[corruption_target].curve_type(); let randomizer = EccScalar::one(curve_type); ctext[corruption_target] = ctext[corruption_target].add(&randomizer)?; Ok(ctext) } fn corrupt_ciphertext_pairs( ctext: &[(EccScalar, EccScalar)], corruption_target: usize, ) -> ThresholdEcdsaResult<Vec<(EccScalar, EccScalar)>> { let mut ctext = ctext.to_vec(); let curve_type = ctext[corruption_target].0.curve_type(); let randomizer = EccScalar::one(curve_type); ctext[corruption_target].0 = ctext[corruption_target].0.add(&randomizer)?; Ok(ctext) } fn corrupt_dealing( dealing: &IDkgDealingInternal, corruption_target: usize, ) -> ThresholdEcdsaResult<IDkgDealingInternal> { let ciphertext = match &dealing.ciphertext { MEGaCiphertext::Single(c) => MEGaCiphertext::Single(MEGaCiphertextSingle { ephemeral_key: c.ephemeral_key, ctexts: corrupt_ciphertext_single(&c.ctexts, corruption_target)?, }), MEGaCiphertext::Pairs(c) => MEGaCiphertext::Pairs(MEGaCiphertextPair { ephemeral_key: c.ephemeral_key, ctexts: corrupt_ciphertext_pairs(&c.ctexts, corruption_target)?, }), };
} #[test] fn should_complaint_system_work() -> ThresholdEcdsaResult<()> { let curve = EccCurveType::K256; let associated_data = b"assoc_data_test"; let mut rng = rand::thread_rng(); let sk0 = MEGaPrivateKey::generate(curve, &mut rng)?; let pk0 = sk0.public_key()?; let sk1 = MEGaPrivateKey::generate(curve, &mut rng)?; let pk1 = sk1.public_key()?; let dealer_index = 0; let threshold = 1; let dealing = IDkgDealingInternal::new( &SecretShares::Random, curve, Seed::from_rng(&mut rng), threshold, &[pk0, pk1], dealer_index, associated_data, )?; let mut dealings = BTreeMap::new(); let corruption_target = 0; dealings.insert( dealer_index, corrupt_dealing(&dealing, corruption_target as usize)?, ); let complaints = generate_complaints( &dealings, associated_data, corruption_target, &sk0, &pk0, Seed::from_rng(&mut rng), ) .expect("failed to generate complaints"); assert_eq!(complaints.len(), 1); for complaint in complaints.values() { complaint .verify( dealings.get(&dealer_index).unwrap(), dealer_index, corruption_target, &pk0, associated_data, ) .unwrap(); assert!(complaint .verify( dealings.get(&dealer_index).unwrap(), dealer_index, corruption_target, &pk0, &rng.gen::<[u8; 32]>(), ) .is_err()); assert!(complaint .verify( dealings.get(&dealer_index).unwrap(), dealer_index, corruption_target, &pk1, associated_data, ) .is_err()); assert!(complaint .verify( dealings.get(&dealer_index).unwrap(), dealer_index + 1, corruption_target, &pk0, associated_data, ) .is_err()); } let modified_ephemeral_key = MEGaCiphertextPair { ephemeral_key: EccPoint::hash_to_point(curve, &rng.gen::<[u8; 32]>(), "ad".as_bytes())?, ctexts: vec![ ( EccScalar::random(curve, &mut rng)?, EccScalar::random(curve, &mut rng)?, ), ( EccScalar::random(curve, &mut rng)?, EccScalar::random(curve, &mut rng)?, ), ], }; let bad_key_dealing = IDkgDealingInternal { ciphertext: modified_ephemeral_key.into(), commitment: dealing.commitment.clone(), proof: dealing.proof, }; assert_eq!( complaints .get(&0) .unwrap() .verify( &bad_key_dealing, dealer_index, corruption_target, &pk0, associated_data, ) .unwrap_err(), ThresholdEcdsaError::InvalidProof ); Ok(()) } #[test] fn should_complaint_verification_reject_spurious_complaints() -> ThresholdEcdsaResult<()> { let curve = EccCurveType::K256; let associated_data = b"assoc_data_test"; let mut rng = rand::thread_rng(); let sk = MEGaPrivateKey::generate(curve, &mut rng)?; let pk = sk.public_key()?; let dealer_index = 0; let receiver_index = 0; let threshold = 1; let dealing = IDkgDealingInternal::new( &SecretShares::Random, curve, Seed::from_rng(&mut rng), threshold, &[pk], dealer_index, associated_data, )?; let complaint = IDkgComplaintInternal::new( Seed::from_rng(&mut rng), &dealing, dealer_index, receiver_index, &sk, &pk, associated_data, )?; assert!(complaint .verify(&dealing, dealer_index, 0, &pk, associated_data) .is_err()); Ok(()) }
Ok(IDkgDealingInternal { ciphertext, commitment: dealing.commitment.clone(), proof: dealing.proof.clone(), })
call_expression
[ { "content": "fn dealings(c: &mut Criterion) {\n\n c.bench_function(\"create_dealing(Random, 3/5)\", |b| {\n\n b.iter(|| create_random_dealing(3, 5))\n\n });\n\n\n\n c.bench_function(\"create_dealing(Random, 5/9)\", |b| {\n\n b.iter(|| create_random_dealing(5, 9))\n\n });\n\n}\n\n\n\nc...
Rust
src/ir/item.rs
dbdr/cargo-call-stack
681d8edad0d27f7cd25aac064c173968a08f304a
use nom::{types::CompleteStr, *}; use crate::ir::{define::Define, FnSig}; #[derive(Clone, Debug, PartialEq)] pub enum Item<'a> { Alias(&'a str, &'a str), Comment, SourceFilename, Target, Global, Type, Define(Define<'a>), Declare(Declare<'a>), Attributes, Metadata, } #[derive(Clone, Debug, PartialEq)] pub struct Declare<'a> { pub name: &'a str, pub sig: Option<FnSig<'a>>, } named!(comment<CompleteStr, Item>, map!(super::comment, |_| Item::Comment)); named!(source_filename<CompleteStr, Item>, do_parse!( tag!("source_filename") >> space >> char!('=') >> not_line_ending >> (Item::SourceFilename) )); named!(target<CompleteStr, Item>, do_parse!( tag!("target") >> space >> alt!(tag!("datalayout") | tag!("triple")) >> space >> char!('=') >> not_line_ending >> (Item::Target) )); named!(alias<CompleteStr, Item>, do_parse!( name: call!(super::function) >> space >> char!('=') >> space >> many0!(do_parse!(call!(super::attribute) >> space >> (()))) >> tag!("alias") >> space >> call!(super::type_) >> space0 >> char!(',') >> space >> call!(super::type_) >> space >> alias: call!(super::function) >> (Item::Alias(name.0, alias.0)) )); named!(global<CompleteStr, Item>, do_parse!( call!(super::global) >> space >> char!('=') >> space >> many0!(do_parse!(call!(super::attribute) >> space >> (()))) >> alt!(tag!("global") | tag!("constant")) >> space >> not_line_ending >> (Item::Global) )); named!(type_<CompleteStr, Item>, do_parse!( call!(super::alias) >> space >> char!('=') >> not_line_ending >> (Item::Type) )); fn declare(input: CompleteStr) -> IResult<CompleteStr, Item> { let (rest, (output, name)) = do_parse!( input, tag!("declare") >> space >> many0!(do_parse!(call!(super::attribute) >> space >> (()))) >> output: alt!(map!(call!(super::type_), Some) | map!(tag!("void"), |_| None)) >> space >> name: call!(super::function) >> char!('(') >> ((output, name.0)) )?; if name.starts_with("llvm.") { do_parse!( rest, not_line_ending >> (Item::Declare(Declare { name, sig: None })) ) } else { do_parse!( rest, inputs: separated_list!( do_parse!(char!(',') >> space >> (())), do_parse!( ty: call!(super::type_) >> many0!(do_parse!(space >> call!(super::attribute) >> (()))) >> (ty) ) ) >> char!(')') >> not_line_ending >> (Item::Declare(Declare { name, sig: Some(FnSig { output: output.map(Box::new), inputs }) })) ) } } named!(attributes<CompleteStr, Item>, do_parse!( tag!("attributes") >> space >> char!('#') >> not_line_ending >> (Item::Attributes) )); named!(metadata<CompleteStr, Item>, do_parse!( tag!("!") >> not_line_ending >> (Item::Metadata) )); named!(pub item<CompleteStr, Item>, alt!( comment | source_filename | target | type_ | global | alias | map!(call!(super::define::parse), Item::Define) | declare | attributes | metadata )); #[cfg(test)] mod tests { use nom::types::CompleteStr as S; use crate::ir::{Declare, FnSig, Item, Type}; #[test] fn alias() { assert_eq!( super::alias(S( r#"@__pre_init = unnamed_addr alias void (), void ()* @DefaultPreInit"# )), Ok((S(""), Item::Alias("__pre_init", "DefaultPreInit"))) ); } #[test] fn declare() { assert_eq!( super::declare(S(r#"declare noalias i8* @malloc(i64) unnamed_addr #3"#)), Ok(( S(""), Item::Declare(Declare { name: "malloc", sig: Some(FnSig { inputs: vec![Type::Integer(64)], output: Some(Box::new(Type::Pointer(Box::new(Type::Integer(8))))) }) }) )) ); } #[test] fn global() { assert_eq!( super::global(S( "@0 = private constant <{ [0 x i8] }> zeroinitializer, align 4, !dbg !0" )), Ok((S(""), Item::Global)) ); assert_eq!( super::global(S( "@DEVICE_PERIPHERALS = local_unnamed_addr global <{ [1 x i8] }> zeroinitializer, align 1, !dbg !175" )), Ok((S(""), Item::Global)) ); } #[test] fn type_() { assert_eq!( super::type_(S("%\"blue_pill::ItmLogger\" = type {}")), Ok((S(""), Item::Type)) ); } }
use nom::{types::CompleteStr, *}; use crate::ir::{define::Define, FnSig}; #[derive(Clone, Debug, PartialEq)] pub enum Item<'a> { Alias(&'a str, &'a str), Comment, SourceFilename, Target, Global, Type, Define(Define<'a>), Declare(Declare<'a>), Attributes, Metadata, } #[derive(Clone, Debug, PartialEq)] pub struct Declare<'a> { pub name: &'a str, pub sig: Option<FnSig<'a>>, } named!(comment<CompleteStr, Item>, map!(super::comment, |_| Item::Comment)); named!(source_filename<CompleteStr, Item>, do_parse!( tag!("source_filename") >> space >> char!('=') >> not_line_ending >> (Item::SourceFilename) )); named!(target<CompleteStr, Item>, do_parse!( tag!("target") >> space >> alt!(tag!("datalayout") | tag!("triple")) >> space >> char!('=') >> not_line_ending >> (Item::Target) )); named!(alias<CompleteStr, Item>, do_parse!( name: call!(super::function) >> space >> char!('=') >> space >> many0!(do_parse!(call!(super::attribute) >> space >> (()))) >> tag!("alias") >> space >> call!(super::type_) >> space0 >> char!(',') >> space >> call!(super::type_) >> space >> alias: call!(super::function) >> (Item::Alias(name.0, alias.0)) )); named!(global<CompleteStr, Item>, do_parse!( call!(super::global) >> space >> char!('=') >> space >> many0!(do_parse!(call!(super::attribute) >> space >> (()))) >> alt!(tag!("global") | tag!("constant")) >> space >> not_line_ending >> (Item::Global) )); named!(type_<CompleteStr, Item>, do_parse!( call!(super::alias) >> space >> char!('=') >> not_line_ending >> (Item::Type) )); fn declare(input: CompleteStr) -> IResult<CompleteStr, Item> {
if name.starts_with("llvm.") { do_parse!( rest, not_line_ending >> (Item::Declare(Declare { name, sig: None })) ) } else { do_parse!( rest, inputs: separated_list!( do_parse!(char!(',') >> space >> (())), do_parse!( ty: call!(super::type_) >> many0!(do_parse!(space >> call!(super::attribute) >> (()))) >> (ty) ) ) >> char!(')') >> not_line_ending >> (Item::Declare(Declare { name, sig: Some(FnSig { output: output.map(Box::new), inputs }) })) ) } } named!(attributes<CompleteStr, Item>, do_parse!( tag!("attributes") >> space >> char!('#') >> not_line_ending >> (Item::Attributes) )); named!(metadata<CompleteStr, Item>, do_parse!( tag!("!") >> not_line_ending >> (Item::Metadata) )); named!(pub item<CompleteStr, Item>, alt!( comment | source_filename | target | type_ | global | alias | map!(call!(super::define::parse), Item::Define) | declare | attributes | metadata )); #[cfg(test)] mod tests { use nom::types::CompleteStr as S; use crate::ir::{Declare, FnSig, Item, Type}; #[test] fn alias() { assert_eq!( super::alias(S( r#"@__pre_init = unnamed_addr alias void (), void ()* @DefaultPreInit"# )), Ok((S(""), Item::Alias("__pre_init", "DefaultPreInit"))) ); } #[test] fn declare() { assert_eq!( super::declare(S(r#"declare noalias i8* @malloc(i64) unnamed_addr #3"#)), Ok(( S(""), Item::Declare(Declare { name: "malloc", sig: Some(FnSig { inputs: vec![Type::Integer(64)], output: Some(Box::new(Type::Pointer(Box::new(Type::Integer(8))))) }) }) )) ); } #[test] fn global() { assert_eq!( super::global(S( "@0 = private constant <{ [0 x i8] }> zeroinitializer, align 4, !dbg !0" )), Ok((S(""), Item::Global)) ); assert_eq!( super::global(S( "@DEVICE_PERIPHERALS = local_unnamed_addr global <{ [1 x i8] }> zeroinitializer, align 1, !dbg !175" )), Ok((S(""), Item::Global)) ); } #[test] fn type_() { assert_eq!( super::type_(S("%\"blue_pill::ItmLogger\" = type {}")), Ok((S(""), Item::Type)) ); } }
let (rest, (output, name)) = do_parse!( input, tag!("declare") >> space >> many0!(do_parse!(call!(super::attribute) >> space >> (()))) >> output: alt!(map!(call!(super::type_), Some) | map!(tag!("void"), |_| None)) >> space >> name: call!(super::function) >> char!('(') >> ((output, name.0)) )?;
assignment_statement
[ { "content": "pub fn type_(input: CompleteStr) -> IResult<CompleteStr, Type> {\n\n let (rest, void) = opt!(input, tag!(\"void\"))?;\n\n\n\n if void.is_some() {\n\n // this must be a function\n\n let (mut rest, inputs) = do_parse!(rest, space >> inputs: fn_inputs >> (inputs))?;\n\n let...
Rust
sqlx-core/src/query.rs
rage311/sqlx
249efbd36b07ce609b6d946c3fcd50653a6eccd0
use std::marker::PhantomData; use async_stream::try_stream; use either::Either; use futures_core::stream::BoxStream; use futures_util::{future, StreamExt, TryFutureExt, TryStreamExt}; use crate::arguments::{Arguments, IntoArguments}; use crate::database::{Database, HasArguments}; use crate::encode::Encode; use crate::error::Error; use crate::executor::{Execute, Executor}; #[must_use = "query must be executed to affect database"] pub struct Query<'q, DB: Database, A> { pub(crate) query: &'q str, pub(crate) arguments: Option<A>, pub(crate) database: PhantomData<DB>, } #[must_use = "query must be executed to affect database"] pub struct Map<'q, DB: Database, F, A> { inner: Query<'q, DB, A>, mapper: F, } impl<'q, DB, A> Execute<'q, DB> for Query<'q, DB, A> where DB: Database, A: Send + IntoArguments<'q, DB>, { #[inline] fn query(&self) -> &'q str { self.query } #[inline] fn take_arguments(&mut self) -> Option<<DB as HasArguments<'q>>::Arguments> { self.arguments.take().map(IntoArguments::into_arguments) } } impl<'q, DB: Database> Query<'q, DB, <DB as HasArguments<'q>>::Arguments> { pub fn bind<T: 'q + Encode<'q, DB>>(mut self, value: T) -> Self { if let Some(arguments) = &mut self.arguments { arguments.add(value); } self } } impl<'q, DB, A: Send> Query<'q, DB, A> where DB: Database, A: 'q + IntoArguments<'q, DB>, { #[inline] pub fn map<F, O>(self, f: F) -> Map<'q, DB, impl Fn(DB::Row) -> Result<O, Error>, A> where F: Fn(DB::Row) -> O, { self.try_map(move |row| Ok(f(row))) } #[inline] pub fn try_map<F, O>(self, f: F) -> Map<'q, DB, F, A> where F: Fn(DB::Row) -> Result<O, Error>, { Map { inner: self, mapper: f, } } #[inline] pub async fn execute<'e, 'c: 'e, E>(self, executor: E) -> Result<u64, Error> where 'q: 'e, A: 'e, E: Executor<'c, Database = DB>, { executor.execute(self).await } #[inline] pub async fn execute_many<'e, 'c: 'e, E>(self, executor: E) -> BoxStream<'e, Result<u64, Error>> where 'q: 'e, A: 'e, E: Executor<'c, Database = DB>, { executor.execute_many(self) } #[inline] pub fn fetch<'e, 'c: 'e, E>(self, executor: E) -> BoxStream<'e, Result<DB::Row, Error>> where 'q: 'e, A: 'e, E: Executor<'c, Database = DB>, { executor.fetch(self) } #[inline] pub fn fetch_many<'e, 'c: 'e, E>( self, executor: E, ) -> BoxStream<'e, Result<Either<u64, DB::Row>, Error>> where 'q: 'e, A: 'e, E: Executor<'c, Database = DB>, { executor.fetch_many(self) } #[inline] pub async fn fetch_all<'e, 'c: 'e, E>(self, executor: E) -> Result<Vec<DB::Row>, Error> where 'q: 'e, A: 'e, E: Executor<'c, Database = DB>, { executor.fetch_all(self).await } #[inline] pub async fn fetch_one<'e, 'c: 'e, E>(self, executor: E) -> Result<DB::Row, Error> where 'q: 'e, A: 'e, E: Executor<'c, Database = DB>, { executor.fetch_one(self).await } #[inline] pub async fn fetch_optional<'e, 'c: 'e, E>(self, executor: E) -> Result<Option<DB::Row>, Error> where 'q: 'e, A: 'e, E: Executor<'c, Database = DB>, { executor.fetch_optional(self).await } } impl<'q, DB, F: Send, A: Send> Execute<'q, DB> for Map<'q, DB, F, A> where DB: Database, A: IntoArguments<'q, DB>, { #[inline] fn query(&self) -> &'q str { self.inner.query() } #[inline] fn take_arguments(&mut self) -> Option<<DB as HasArguments<'q>>::Arguments> { self.inner.take_arguments() } } impl<'q, DB, F, O, A> Map<'q, DB, F, A> where DB: Database, F: Send + Sync + Fn(DB::Row) -> Result<O, Error>, O: Send + Unpin, A: 'q + Send + IntoArguments<'q, DB>, { pub fn fetch<'e, 'c: 'e, E>(self, executor: E) -> BoxStream<'e, Result<O, Error>> where 'q: 'e, E: 'e + Executor<'c, Database = DB>, DB: 'e, F: 'e, O: 'e, { self.fetch_many(executor) .try_filter_map(|step| async move { Ok(match step { Either::Left(_) => None, Either::Right(o) => Some(o), }) }) .boxed() } pub fn fetch_many<'e, 'c: 'e, E>( self, executor: E, ) -> BoxStream<'e, Result<Either<u64, O>, Error>> where 'q: 'e, E: 'e + Executor<'c, Database = DB>, DB: 'e, F: 'e, O: 'e, { Box::pin(try_stream! { let mut s = executor.fetch_many(self.inner); while let Some(v) = s.try_next().await? { match v { Either::Left(v) => yield Either::Left(v), Either::Right(row) => { let mapped = (self.mapper)(row)?; yield Either::Right(mapped); } } } }) } pub async fn fetch_all<'e, 'c: 'e, E>(self, executor: E) -> Result<Vec<O>, Error> where 'q: 'e, E: 'e + Executor<'c, Database = DB>, DB: 'e, F: 'e, O: 'e, { self.fetch(executor).try_collect().await } pub async fn fetch_one<'e, 'c: 'e, E>(self, executor: E) -> Result<O, Error> where 'q: 'e, E: 'e + Executor<'c, Database = DB>, DB: 'e, F: 'e, O: 'e, { self.fetch_optional(executor) .and_then(|row| match row { Some(row) => future::ok(row), None => future::err(Error::RowNotFound), }) .await } pub async fn fetch_optional<'e, 'c: 'e, E>(self, executor: E) -> Result<Option<O>, Error> where 'q: 'e, E: 'e + Executor<'c, Database = DB>, DB: 'e, F: 'e, O: 'e, { let row = executor.fetch_optional(self.inner).await?; if let Some(row) = row { (self.mapper)(row).map(Some) } else { Ok(None) } } } #[inline] pub fn query<DB>(sql: &str) -> Query<'_, DB, <DB as HasArguments<'_>>::Arguments> where DB: Database, { Query { database: PhantomData, arguments: Some(Default::default()), query: sql, } } #[inline] pub fn query_with<'q, DB, A>(sql: &'q str, arguments: A) -> Query<'q, DB, A> where DB: Database, A: IntoArguments<'q, DB>, { Query { database: PhantomData, arguments: Some(arguments), query: sql, } }
use std::marker::PhantomData; use async_stream::try_stream; use either::Either; use futures_core::stream::BoxStream; use futures_util::{future, StreamExt, TryFutureExt, TryStreamExt}; use crate::arguments::{Arguments, IntoArguments}; use crate::database::{Database, HasArguments}; use crate::encode::Encode; use crate::error::Error; use crate::executor::{Execute, Executor}; #[must_use = "query must be executed to affect database"] pub struct Query<'q, DB: Database, A> { pub(crate) query: &'q str, pub(crate) arguments: Option<A>, pub(crate) database: PhantomData<DB>, } #[must_use = "query must be executed to affect database"] pub struct Map<'q, DB: Database, F, A> { inner: Query<'q, DB, A>, mapper: F, } impl<'q, DB, A> Execute<'q, DB> for Query<'q, DB, A> where DB: Database, A: Send + IntoArguments<'q, DB>, { #[inline] fn query(&self) -> &'q str { self.query } #[inline] fn take_arguments(&mut self) -> Option<<DB as HasArguments<'q>>::Arguments> { self.arguments.take().map(IntoArguments::into_arguments) } } impl<'q, DB: Database> Query<'q, DB, <DB as HasArguments<'q>>::Arguments> { pub fn bind<T: 'q + Encode<'q, DB>>(mut self, value: T) -> Self { if let Some(arguments) = &mut self.arguments { arguments.add(value); } self } } impl<'q, DB, A: Send> Query<'q, DB, A> where DB: Database, A: 'q + IntoArguments<'q, DB>, { #[inline] pub fn map<F, O>(self, f: F) -> Map<'q, DB, impl Fn(DB::Row) -> Result<O, Error>, A> where F: Fn(DB::Row) -> O, { self.try_map(move |row| Ok(f(row))) } #[inline] pub fn try_map<F, O>(self, f: F) -> Map<'q, DB, F, A> where F: Fn(DB::Row) -> Result<O, Error>, { Map { inner: self, mapper: f, } } #[inline] pub async fn execute<'e, 'c: 'e, E>(self, executor: E) -> Result<u64, Error> where 'q: 'e, A: 'e, E: Executor<'c, Database = DB>, { executor.execute(self).await } #[inline] pub async fn execute_many<'e, 'c: 'e, E>(self, executor: E) -> BoxStream<'e, Result<u64, Error>> where 'q: 'e, A: 'e, E: Executor<'c, Database = DB>, { executor.execute_many(self) } #[inline] pub fn fetch<'e, 'c: 'e, E>(self, executor: E) -> BoxStream<'e, Result<DB::Row, Error>> where 'q: 'e, A: 'e, E: Executor<'c, Database = DB>, { executor.fetch(self) } #[inline] pub fn fetch_many<'e, 'c: 'e, E>( self, executor: E, ) -> BoxStream<'e, Result<Either<u64, DB::Row>, Error>> where 'q: 'e, A: 'e, E: Executor<'c, Database = DB>, { executor.fetch_many(self) } #[inline] pub async fn fetch_all<'e, 'c: 'e, E>(self, executor: E) -> Result<Vec<DB::Row>, Error> where 'q: 'e, A: 'e, E: Executor<'c, Database = DB>, { executor.fetch_all(self).await } #[inline] pub async fn fetch_one<'e, 'c: 'e, E>(self, executor: E) -> Result<DB::Row, Error> where 'q: 'e, A: 'e, E: Executor<'c, Database = DB>, { executor.fetch_one(self).await } #[inline] pub async fn fetch_optional<'e, 'c: 'e, E>(self, executor: E) -> Result<Option<DB::Row>, Error> where 'q: 'e, A: 'e, E: Executor<'c, Database = DB>, { executor.fetch_optional(self).await } } impl<'q, DB, F: Send, A: Send> Execute<'q, DB> for Map<'q, DB, F, A> where DB: Database, A: IntoArguments<'q, DB>, { #[inline] fn query(&self) -> &'q str { self.inner.query() } #[inline] fn take_arguments(&mut self) -> Option<<DB as HasArguments<'q>>::Arguments> { self.inner.take_arguments() } } impl<'q, DB, F, O, A> Map<'q, DB, F, A> where DB: Database, F: Send + Sync + Fn(DB::Row) -> Result<O, Error>, O: Send + Unpin, A: 'q + Send + IntoArguments<'q, DB>, { pub fn fetch<'e, 'c: 'e, E>(self, executor: E) -> BoxStream<'e, Result<O, Error>> where 'q: 'e, E: 'e + Executor<'c, Database = DB>, DB: 'e, F: 'e, O: 'e, { self.fetch_many(executor) .try_fi
ither::Right(mapped); } } } }) } pub async fn fetch_all<'e, 'c: 'e, E>(self, executor: E) -> Result<Vec<O>, Error> where 'q: 'e, E: 'e + Executor<'c, Database = DB>, DB: 'e, F: 'e, O: 'e, { self.fetch(executor).try_collect().await } pub async fn fetch_one<'e, 'c: 'e, E>(self, executor: E) -> Result<O, Error> where 'q: 'e, E: 'e + Executor<'c, Database = DB>, DB: 'e, F: 'e, O: 'e, { self.fetch_optional(executor) .and_then(|row| match row { Some(row) => future::ok(row), None => future::err(Error::RowNotFound), }) .await } pub async fn fetch_optional<'e, 'c: 'e, E>(self, executor: E) -> Result<Option<O>, Error> where 'q: 'e, E: 'e + Executor<'c, Database = DB>, DB: 'e, F: 'e, O: 'e, { let row = executor.fetch_optional(self.inner).await?; if let Some(row) = row { (self.mapper)(row).map(Some) } else { Ok(None) } } } #[inline] pub fn query<DB>(sql: &str) -> Query<'_, DB, <DB as HasArguments<'_>>::Arguments> where DB: Database, { Query { database: PhantomData, arguments: Some(Default::default()), query: sql, } } #[inline] pub fn query_with<'q, DB, A>(sql: &'q str, arguments: A) -> Query<'q, DB, A> where DB: Database, A: IntoArguments<'q, DB>, { Query { database: PhantomData, arguments: Some(arguments), query: sql, } }
lter_map(|step| async move { Ok(match step { Either::Left(_) => None, Either::Right(o) => Some(o), }) }) .boxed() } pub fn fetch_many<'e, 'c: 'e, E>( self, executor: E, ) -> BoxStream<'e, Result<Either<u64, O>, Error>> where 'q: 'e, E: 'e + Executor<'c, Database = DB>, DB: 'e, F: 'e, O: 'e, { Box::pin(try_stream! { let mut s = executor.fetch_many(self.inner); while let Some(v) = s.try_next().await? { match v { Either::Left(v) => yield Either::Left(v), Either::Right(row) => { let mapped = (self.mapper)(row)?; yield E
random
[ { "content": "#[inline]\n\npub fn query_as<'q, DB, O>(sql: &'q str) -> QueryAs<'q, DB, O, <DB as HasArguments<'q>>::Arguments>\n\nwhere\n\n DB: Database,\n\n O: for<'r> FromRow<'r, DB::Row>,\n\n{\n\n QueryAs {\n\n inner: query(sql),\n\n output: PhantomData,\n\n }\n\n}\n\n\n\n/// Make a...
Rust
src/base.rs
KeenS/kappaLisp
9f6726d49e278f40270994939d126285cd9b8d3b
use std::ops::Deref; use env::Env; use eval::funcall; use expr::{Error as E, Expr, Kfloat, Kint, Result, Type}; use util::*; macro_rules! expr { ($e:expr) => { $e }; } macro_rules! def_arith_op { ($name: ident, $op: tt, $init: expr) => { pub fn $name(env: &mut Env, args: &Expr) -> Result<Expr> { let (init, args) = match args { Expr::Cons(hd, tl) => match tl.deref() { tl @ &Expr::Cons(_, _) => (hd.deref().clone(), tl), _ => ($init, args), }, args => ($init, args), }; f_foldl( env, &|_, x, y| match (x, y) { (&Expr::Int(x), &Expr::Int(y)) => Ok(kint(expr!(x $op y))), (&Expr::Float(x), &Expr::Int(y)) => Ok(kfloat(expr!(x $op (y as Kfloat)))), (&Expr::Int(x), &Expr::Float(y)) => Ok(kfloat(expr!((x as Kfloat) $op y))), (&Expr::Float(x), &Expr::Float(y)) => Ok(kfloat(expr!(x $op y))), (&Expr::Int(_), y) => Err(E::Type(Type::Int, y.clone())), (x, _) => Err(E::Type(Type::Int, x.clone())), }, &init, args, ) } }; } def_arith_op!(k_add, +, kint(0)); def_arith_op!(k_sub, -, kint(0)); def_arith_op!(k_mul, *, kint(1)); def_arith_op!(k_div, /, kint(1)); macro_rules! def_arith_cmp { ($name: ident, $op: tt) => { pub fn $name(_: &mut Env, args: &Expr) -> Result<Expr> { get_args!(args, (x, Int)(y, Int)); Ok(kbool(expr!(x $op y))) } }; } def_arith_cmp!(k_gt, >); def_arith_cmp!(k_ge, >=); def_arith_cmp!(k_lt, <); def_arith_cmp!(k_le, <=); def_arith_cmp!(k_eq, ==); def_arith_cmp!(k_neq, !=); pub fn k_concat(env: &mut Env, args: &Expr) -> Result<Expr> { let res = f_foldl( env, &|_, acc, x| match (acc, x) { (&Expr::Str(ref acc), &Expr::Str(ref x)) => Ok(kstr(format!("{}{}", acc, x))), (_, y) => Err(E::Type(Type::Str, y.clone())), }, &kstr(""), &args, ); Ok(res?.clone()) } pub fn k_funcall(env: &mut Env, args: &Expr) -> Result<Expr> { match args { &Expr::Cons(ref f, ref args) => match f.deref() { &Expr::Proc(ref f) => funcall(env, f, args.deref()), f => Err(E::NotFunction(f.clone())), }, args => Err(E::Form(args.clone())), } } pub fn k_cons(_: &mut Env, args: &Expr) -> Result<Expr> { get_args!(args, (car, Any)(cdr, Any)); Ok(kcons(car.clone(), cdr.clone())) } pub fn k_car(_: &mut Env, args: &Expr) -> Result<Expr> { get_args!(args, ((car, _), Cons)); Ok(car.clone()) } pub fn k_cdr(_: &mut Env, args: &Expr) -> Result<Expr> { get_args!(args, ((_, cdr), Cons)); Ok(cdr.clone()) } pub fn k_equal_p(_: &mut Env, args: &Expr) -> Result<Expr> { get_args!(args, (x, Any)(y, Any)); if x == y { Ok(ksym("t")) } else { Ok(knil()) } } pub fn k_string_to_number(_: &mut Env, args: &Expr) -> Result<Expr> { get_args!(args, (s, Str)); match s.parse() { Ok(i) => Ok(Expr::Int(i)), Err(_) => Err(E::InvalidArgument(args.clone())), } } pub fn k_substring(_: &mut Env, args: &Expr) -> Result<Expr> { get_args!(args, (s, Str) & optional(start, Int)(end, Int)); let len = s.len(); let ilen = len as Kint; let start = start.unwrap_or(0); let end = end.unwrap_or(ilen); if 0 <= start && start <= end && end < ilen { let start = start as usize; let end = end as usize; Ok(kstr((&s[start..end]).to_owned())) } else { Err(E::InvalidArgument(args.clone())) } } pub fn init(env: &mut Env) -> Result<()> { env.fregister("+", kprim("k_add", k_add)); env.fregister("-", kprim("k_sub", k_sub)); env.fregister("/", kprim("k_div", k_div)); env.fregister("*", kprim("k_mul", k_mul)); env.fregister(">", kprim("k_gt", k_gt)); env.fregister(">=", kprim("k_ge", k_ge)); env.fregister("<", kprim("k_lt", k_lt)); env.fregister("<=", kprim("k_le", k_le)); env.fregister("=", kprim("k_eq", k_eq)); env.fregister("/=", kprim("k_neq", k_neq)); env.fregister("concat", kprim("k_concat", k_concat)); env.fregister("funcall", kprim("k_funcall", k_funcall)); env.fregister("cons", kprim("k_cons", k_cons)); env.fregister("car", kprim("k_car", k_car)); env.fregister("cdr", kprim("k_cdr", k_cdr)); env.fregister("equalp", kprim("k_equal_p", k_equal_p)); env.fregister( "string-to-number", kprim("k_string_to_number", k_string_to_number), ); env.fregister("substring", kprim("k_substring", k_substring)); env.register("t", ksym("t")); Ok(()) }
use std::ops::Deref; use env::Env; use eval::funcall; use expr::{Error as E, Expr, Kfloat, Kint, Result, Type}; use util::*; macro_rules! expr { ($e:expr) => { $e }; } macro_rules! def_arith_op { ($name: ident, $op: tt, $init: expr) => { pub fn $name(env: &mut Env, args: &Expr) -> Result<Expr> { let (init, args) = match args { Expr::Cons(hd, tl) => match tl.deref() { tl @ &Expr::Cons(_, _) => (hd.deref().clone(), tl), _ => ($init, args), }, args => ($init, args), }; f_foldl( env, &|_, x, y| match (x, y) { (&Expr::Int(x), &Expr::Int(y)) => Ok(kint(expr!(x $op y))), (&Expr::Float(x), &Expr::Int(y)) => Ok(kfloat(expr!(x $op (y as Kfloat)))), (&Expr::Int(x), &Expr::Float(y)) => Ok(kfloat(expr!((x as Kfloat) $op y))), (&Expr::Float(x), &Expr::Float(y)) => Ok(kfloat(expr!(x $op y))), (&Expr::Int(_), y) => Err(E::Type(Type::Int, y.clone())), (x, _) => Err(E::Type(Type::Int, x.clone())), }, &init, args, ) } }; } def_arith_op!(k_add, +, kint(0)); def_arith_op!(k_sub, -, kint(0)); def_arith_op!(k_mul, *, kint(1)); def_arith_op!(k_div, /, kint(1)); macro_rules! def_arith_cmp { ($name: ident, $op: tt) => { pub fn $name(_: &mut Env, args: &Expr) -> Result<Expr> { get_args!(args, (x, Int)(y, Int)); Ok(kbool(expr!(x $op y))) } }; } def_arith_cmp!(k_gt, >); def_arith_cmp!(k_ge, >=); def_arith_cmp!(k_lt, <); def_arith_cmp!(k_le, <=); def_arith_cmp!(k_eq, ==); def_arith_cmp!(k_neq, !=); pub fn k_concat(env: &mut Env, args: &Expr) -> Result<Expr> { let res = f_foldl( env, &|_, acc, x| match (acc, x) { (&Expr::Str(ref acc), &Expr::Str(ref x)) => Ok(kstr(format!("{}{}", acc, x))), (_, y) => Err(E::Type(Type::Str, y.clone())), }, &kstr(""), &args, ); Ok(res?.clone()) } pub fn k_funcall(env: &mut Env, args: &Expr) -> Result<Expr> { match args { &Expr::Cons(ref f, ref args) => match f.deref() { &Expr::Proc(ref f) => funcall(env, f, args.deref()), f => Err(E::NotFunction(f.clone())), }, args => Err(E::Form(args.clone())), } } pub fn k_cons(_: &mut Env, args: &Expr) -> Result<Expr> { get_args!(args, (car, Any)(cdr, Any)); Ok(kcons(car.clone(), cdr.clone())) } pub fn k_car(_: &mut Env, args: &Expr) -> Result<Expr> { get_args!(args, ((car, _), Cons)); Ok(car.clone()) } pub fn k_cdr(_: &mut Env, args: &Expr) -> Result<Expr> { get_args!(args, ((_, cdr), Cons)); Ok(cdr.clone()) } pub fn k_equal_p(_: &mut Env, args: &Expr) -> Result<Expr> { get_args!(args, (x, Any)(y, Any)); if x == y { Ok(ksym("t")) } else { Ok(knil()) } } pub fn k_string_to_number(_: &mut Env, args: &Expr) -> Result<Expr> { get_args!(args, (s, Str)); match s.parse() { Ok(i) => Ok(Expr::Int(i)), Err(_) => Err(E::InvalidArgument(args.clone())), } } pub fn k_substring(_: &mut Env, args: &Expr) -> Result<Expr> { get_args!(args, (s, Str) & optional(start, Int)(end, Int)); let len = s.len(); let ilen = len as Kint; let start = start.unwrap_or(0); let end = end.unwrap_or(ilen); if 0 <= start && start <= end && end < ilen { let start = start as usize; let end = end as usize; Ok(kstr((&s[start..end]).to_owned())) } else { Err(E::InvalidArgument(args.clone())) } } pub fn init(env: &mut Env) -> Result<()> { env.fregister("+", kprim("k_add", k_add)); env.fregister("-", kprim("k_sub", k_sub)); env.fregister("/", kprim("k_div", k_div)); env.fregister("*", kprim("k_mul", k_mul)); env.fregister(">", kprim("k_gt", k_gt)); env.fregister(">=", kprim("k_ge", k_ge)); env.fregister("<", kprim("k_lt", k_lt)); env.fregister("<=", kprim("k_le", k_le)); env.fregister("=", kprim("k_eq", k_eq)); env.fregister("/=", kprim("k_neq", k_neq)); env.fregister("concat", kprim("k_concat", k_concat)); en
v.fregister("funcall", kprim("k_funcall", k_funcall)); env.fregister("cons", kprim("k_cons", k_cons)); env.fregister("car", kprim("k_car", k_car)); env.fregister("cdr", kprim("k_cdr", k_cdr)); env.fregister("equalp", kprim("k_equal_p", k_equal_p)); env.fregister( "string-to-number", kprim("k_string_to_number", k_string_to_number), ); env.fregister("substring", kprim("k_substring", k_substring)); env.register("t", ksym("t")); Ok(()) }
function_block-function_prefixed
[ { "content": "pub fn f_foldl<F>(env: &mut Env, f: &F, init: &Expr, args: &Expr) -> Result<Expr>\n\nwhere\n\n F: Fn(&mut Env, &Expr, &Expr) -> Result<Expr>,\n\n{\n\n let mut res = init.clone();\n\n let mut head = args;\n\n let nil = &Expr::Nil;\n\n while head != nil {\n\n match head {\n\n ...
Rust
sleighcraft/build.rs
ioo0s/sleighcraft
ad8024574d83ee109c1172b021f4a7438b95b1a1
use filetime::FileTime; use std::env; use std::fs; use std::path::{Path, PathBuf}; const DECOMPILER_SOURCE_BASE_CXX: &[&str] = &[ "space.cc", "float.cc", "address.cc", "pcoderaw.cc", "translate.cc", "opcodes.cc", "globalcontext.cc", "capability.cc", "architecture.cc", "options.cc", "graph.cc", "cover.cc", "block.cc", "cast.cc", "typeop.cc", "database.cc", "cpool.cc", "comment.cc", "fspec.cc", "action.cc", "loadimage.cc", "varnode.cc", "op.cc", "type.cc", "variable.cc", "varmap.cc", "jumptable.cc", "emulate.cc", "emulateutil.cc", "flow.cc", "userop.cc", "funcdata.cc", "funcdata_block.cc", "funcdata_varnode.cc", "funcdata_op.cc", "pcodeinject.cc", "heritage.cc", "prefersplit.cc", "rangeutil.cc", "ruleaction.cc", "subflow.cc", "blockaction.cc", "merge.cc", "double.cc", "coreaction.cc", "condexe.cc", "override.cc", "dynamic.cc", "crc32.cc", "prettyprint.cc", "printlanguage.cc", "printc.cc", "printjava.cc", "memstate.cc", "opbehavior.cc", "paramid.cc", "transform.cc", "stringmanage.cc", "string_ghidra.cc", "ghidra_arch.cc", "typegrp_ghidra.cc", "cpool_ghidra.cc", "loadimage_ghidra.cc", "inject_ghidra.cc", "database_ghidra.cc", "inject_sleigh.cc", "ghidra_translate.cc", "ghidra_context.cc", "comment_ghidra.cc", "sleigh_arch.cc", "sleigh.cc", "filemanage.cc", "semantics.cc", "slghsymbol.cc", "context.cc", "sleighbase.cc", "slghpatexpress.cc", "slghpattern.cc", "pcodecompile.cc", ]; /* const DECOMPILER_SOURCE_BASE_YACC: [&'static str; 1] = [ "xml.y" ]; const SLEIGH_COMPILER_SOURCE_CXX: [&'static str; 1] = [ "slghparse.y" ]; const SLEIGH_COMPILER_SOURCE_FLEX: [&'static str; 1] = [ "slghscan.l" ]; */ const DECOMPILER_SOURCE_SLEIGH_YACC: &[&str] = &["pcodeparse.y", "grammar.y", "xml.y"]; const PROXIES: &[&str] = &[ "address_proxy.cc", "addrspace_proxy.cc", "cover_proxy.cc", "funcdata_proxy.cc", "loadimage_proxy.cc", "opbehavior_proxy.cc", "opcode_proxy.cc", "opcodes_proxy.cc", "typeop_proxy.cc", "block_proxy.cc", "varnode_proxy.cc", "varnodedata_proxy.cc", "variable_proxy.cc", ]; struct CompileOptions { sources: Vec<PathBuf>, objects: Vec<PathBuf>, } fn need_recompile(source: &Path) -> bool { let outdir = env::var("OUT_DIR").unwrap(); let path = Path::new(&outdir).join(source); let mut path = path; path.set_extension("o"); let metadata = match fs::metadata(path) { Ok(m) => m, Err(_) => return true, }; let object_mtime = FileTime::from_last_modification_time(&metadata); let metadata = fs::metadata(source).unwrap_or_else(|_| panic!("source code {:?} not found", source)); let source_mtime = FileTime::from_last_modification_time(&metadata); source_mtime > object_mtime } fn obj_path_from_src_path(src_path: &Path) -> PathBuf { let outdir = env::var("OUT_DIR").unwrap(); let mut path = Path::new(&outdir).join(src_path); path.set_extension("o"); path } fn prepare() -> CompileOptions { let mut objects = vec![]; let mut sources = vec![]; for src in DECOMPILER_SOURCE_BASE_CXX.iter() { let path = Path::new("src").join("cpp").join(src); if need_recompile(&path) { sources.push(path); } else { objects.push(obj_path_from_src_path(&path)); } } for src in DECOMPILER_SOURCE_SLEIGH_YACC.iter() { let name = src.split('.').next().unwrap(); let path = Path::new("src") .join("cpp") .join("gen") .join("bison") .join(&format!("{}.cpp", name)); if need_recompile(&path) { sources.push(path); } else { objects.push(obj_path_from_src_path(&path)); } } for src in PROXIES.iter() { let path = Path::new("src") .join("cpp") .join("bridge") .join("proxies") .join(src); if need_recompile(&path) { sources.push(path); } else { objects.push(obj_path_from_src_path(&path)); } } CompileOptions { sources, objects } } fn main() { let compile_opts = prepare(); let sleigh_src_file = Path::new("src").join("sleigh.rs"); let mut target = cxx_build::bridge(sleigh_src_file); for obj in &compile_opts.objects { target.object(obj); } let disasm_src_path= Path::new("src").join("cpp").join("bridge").join("disasm.cpp"); let src_cpp = Path::new("src").join("cpp"); let src_cpp_gen_bison = Path::new("src").join("cpp").join("gen").join("bison"); let src_cpp_gen_flex = Path::new("src").join("cpp").join("gen").join("flex"); #[cfg(target_os = "windows")] { target.define("_WINDOWS", "1"); } target .cpp(true) .warnings(false) .file(disasm_src_path) .files(compile_opts.sources) .flag_if_supported("-std=c++14") .include(src_cpp) .include(src_cpp_gen_bison) .include(src_cpp_gen_flex) .compile("sleigh"); }
use filetime::FileTime; use std::env; use std::fs; use std::path::{Path, PathBuf}; const DECOMPILER_SOURCE_BASE_CXX: &[&str] = &[ "space.cc", "float.cc", "address.cc", "pcoderaw.cc", "translate.cc", "opcodes.cc", "globalcontext.cc", "capability.cc", "architecture.cc", "options.cc", "graph.cc", "cover.cc", "block.cc", "cast.cc", "typeop.cc", "database.cc", "cpool.cc", "comment.cc", "fspec.cc", "action.cc", "loadimage.cc", "varnode.cc", "op.cc", "type.cc", "variable.cc", "varmap.cc", "jumptable.cc", "emulate.cc", "emulateutil.cc", "flow.cc", "userop.cc", "funcdata.cc", "funcdata_block.cc", "funcdata_varnode.cc", "funcdata_op.cc", "pcodeinject.cc", "heritage.cc", "prefersplit.cc", "rangeutil.cc", "ruleaction.cc", "subflow.cc", "blockaction.cc", "merge.cc", "double.cc", "coreaction.cc", "condexe.cc", "override.cc", "dynamic.cc", "crc32.cc", "prettyprint.cc", "printlanguage.cc", "printc.cc", "printjava.cc", "memstate.cc", "opbehavior.cc", "paramid.cc", "transform.cc", "stringmanage.cc", "string_ghidra.cc", "ghidra_arch.cc", "typegrp_ghidra.cc", "cpool_ghidra.cc", "loadimage_ghidra.cc", "inject_ghidra.cc", "database_ghidra.cc", "inject_sleigh.cc", "ghidra_translate.cc", "ghidra_context.cc", "comment_ghidra.cc", "sleigh_arch.cc", "sleigh.cc", "filemanage.cc", "semantics.cc", "slghsymbol.cc", "context.cc", "sleighbase.cc", "slghpatexpress.cc", "slghpattern.cc", "pcodecompile.cc", ]; /* const DECOMPILER_SOURCE_BASE_YACC: [&'static str; 1] = [ "xml.y" ]; const SLEIGH_COMPILER_SOURCE_CXX: [&'static str; 1] = [ "slghparse.y" ]; const SLEIGH_COMPILER_SOURCE_FLEX: [&'static str; 1] = [ "slghscan.l" ]; */ const DECOMPILER_SOURCE_SLEIGH_YACC: &[&str] = &["pcodeparse.y", "grammar.y", "xml.y"]; const PROXIES: &[&str] = &[ "address_proxy.cc", "addrspace_proxy.cc", "cover_proxy.cc", "funcdata_proxy.cc", "loadimage_proxy.cc", "opbehavior_proxy.cc", "opcode_proxy.cc", "opcodes_proxy.cc", "typeop_proxy.cc", "block_proxy.cc", "varnode_proxy.cc", "varnodedata_proxy.cc", "variable_proxy.cc", ]; struct CompileOptions { sources: Vec<PathBuf>, objects: Vec<PathBuf>, } fn need_recompile(source: &Path) -> bool { let outdir = env::var("OUT_DIR").unwrap(); let path = Path::new(&outdir).join(source); let mut path = path; path.set_extension("o"); let metadata = match fs::metadata(path) { Ok(m) => m, Err(_) => return true, }; let object_mtime = FileTime::from_last_modification_time(&metadata); let metadata = fs::metadata(source).unwrap_or_else(|_| panic!("source code {:?} not found", source)); let source_mtime = FileTime::from_last_modification_time(&metadata); source_mtime > object_mtime } fn obj_path_from_src_path(src_path: &Path) -> PathBuf { let outdir = env::var("OUT_DIR").unwrap(); let mut p
.warnings(false) .file(disasm_src_path) .files(compile_opts.sources) .flag_if_supported("-std=c++14") .include(src_cpp) .include(src_cpp_gen_bison) .include(src_cpp_gen_flex) .compile("sleigh"); }
ath = Path::new(&outdir).join(src_path); path.set_extension("o"); path } fn prepare() -> CompileOptions { let mut objects = vec![]; let mut sources = vec![]; for src in DECOMPILER_SOURCE_BASE_CXX.iter() { let path = Path::new("src").join("cpp").join(src); if need_recompile(&path) { sources.push(path); } else { objects.push(obj_path_from_src_path(&path)); } } for src in DECOMPILER_SOURCE_SLEIGH_YACC.iter() { let name = src.split('.').next().unwrap(); let path = Path::new("src") .join("cpp") .join("gen") .join("bison") .join(&format!("{}.cpp", name)); if need_recompile(&path) { sources.push(path); } else { objects.push(obj_path_from_src_path(&path)); } } for src in PROXIES.iter() { let path = Path::new("src") .join("cpp") .join("bridge") .join("proxies") .join(src); if need_recompile(&path) { sources.push(path); } else { objects.push(obj_path_from_src_path(&path)); } } CompileOptions { sources, objects } } fn main() { let compile_opts = prepare(); let sleigh_src_file = Path::new("src").join("sleigh.rs"); let mut target = cxx_build::bridge(sleigh_src_file); for obj in &compile_opts.objects { target.object(obj); } let disasm_src_path= Path::new("src").join("cpp").join("bridge").join("disasm.cpp"); let src_cpp = Path::new("src").join("cpp"); let src_cpp_gen_bison = Path::new("src").join("cpp").join("gen").join("bison"); let src_cpp_gen_flex = Path::new("src").join("cpp").join("gen").join("flex"); #[cfg(target_os = "windows")] { target.define("_WINDOWS", "1"); } target .cpp(true)
random
[ { "content": "fn load_preset() -> HashMap<&'static str, &'static str> {\n\n let mut map = HashMap::new();\n\n macro_rules! def_arch {\n\n ($name: expr) => {\n\n // presets are used across the whole lifetime, it's safe to ignore\n\n // the lifetime by leaking its names' memory\...
Rust
tests/integration_test.rs
danieldulaney/rusync
992bb083699da5cec2e547044e49675677058ab9
extern crate filetime; extern crate tempdir; extern crate rusync; use std::fs; use std::fs::File; use std::io; use std::os::unix; use std::os::unix::fs::PermissionsExt; use std::path::Path; use std::path::PathBuf; use std::process::Command; use filetime::FileTime; use tempdir::TempDir; use rusync::progress::ProgressInfo; fn assert_same_contents(a: &Path, b: &Path) { assert!(a.exists(), "{:?} does not exist", a); assert!(b.exists(), "{:?} does not exist", b); let status = Command::new("diff") .args(&[a, b]) .status() .expect("Failed to execute process"); assert!(status.success(), "{:?} and {:?} differ", a, b) } fn is_executable(path: &Path) -> bool { let metadata = std::fs::metadata(&path).expect(&format!("Could not get metadata of {:?}", path)); let permissions = metadata.permissions(); let mode = permissions.mode(); mode & 0o111 != 0 } fn assert_executable(path: &Path) { assert!( is_executable(&path), "{:?} does not appear to be executable", path ); } fn assert_not_executable(path: &Path) { assert!(!is_executable(&path), "{:?} appears to be executable", path); } fn setup_test(tmp_path: &Path) -> (PathBuf, PathBuf) { let src_path = tmp_path.join("src"); let dest_path = tmp_path.join("dest"); let status = Command::new("cp") .args(&["-R", "tests/data", &src_path.to_string_lossy()]) .status() .expect("Failed to execute process"); assert!(status.success()); (src_path, dest_path) } fn make_recent(path: &Path) -> io::Result<()> { let metadata = fs::metadata(&path)?; let atime = FileTime::from_last_access_time(&metadata); let mtime = FileTime::from_last_modification_time(&metadata); let mut epoch = mtime.seconds_relative_to_1970(); epoch += 1; let mtime = FileTime::from_seconds_since_1970(epoch, 0); filetime::set_file_times(&path, atime, mtime)?; Ok(()) } struct DummyProgressInfo {} impl ProgressInfo for DummyProgressInfo {} fn new_test_syncer(src: &Path, dest: &Path) -> rusync::Syncer { let dummy_progress_info = DummyProgressInfo {}; let options = rusync::SyncOptions::new(); rusync::Syncer::new(&src, &dest, options, Box::new(dummy_progress_info)) } #[test] fn fresh_copy() { let tmp_dir = TempDir::new("test-rusync").expect("failed to create temp dir"); let (src_path, dest_path) = setup_test(&tmp_dir.path()); let syncer = new_test_syncer(&src_path, &dest_path); let outcome = syncer.sync(); assert!( outcome.is_ok(), "sync::sync failed with: {}", outcome.err().expect("") ); let src_top = src_path.join("top.txt"); let dest_top = dest_path.join("top.txt"); assert_same_contents(&src_top, &dest_top); let link_dest = dest_path.join("a_dir/link_to_one"); let target = fs::read_link(link_dest).expect("failed to read metada"); assert_eq!(target.to_string_lossy(), "one.txt"); } #[test] fn skip_up_to_date_files() { let tmp_dir = TempDir::new("test-rusync").expect("failed to create temp dir"); let (src_path, dest_path) = setup_test(&tmp_dir.path()); let syncer = new_test_syncer(&src_path, &dest_path); let stats = syncer.sync().unwrap(); assert_eq!(stats.up_to_date, 0); let src_top_txt = src_path.join("top.txt"); make_recent(&src_top_txt).expect("could not make top.txt recent"); let syncer = new_test_syncer(&src_path, &dest_path); let stats = syncer.sync().expect(""); assert_eq!(stats.copied, 1); } #[test] fn preserve_permissions() { let tmp_dir = TempDir::new("test-rusync").expect("failed to create temp dir"); let (src_path, dest_path) = setup_test(&tmp_dir.path()); let syncer = new_test_syncer(&src_path, &dest_path); syncer.sync().unwrap(); let dest_exe = &dest_path.join("a_dir/foo.exe"); assert_executable(&dest_exe); } #[test] fn do_not_preserve_permissions() { let tmp_dir = TempDir::new("test-rusync").expect("failed to create temp dir"); let (src_path, dest_path) = setup_test(&tmp_dir.path()); let mut options = rusync::SyncOptions::new(); options.preserve_permissions = false; let syncer = rusync::Syncer::new( &src_path, &dest_path, options, Box::new(DummyProgressInfo {}), ); syncer.sync().expect(""); let dest_exe = &dest_path.join("a_dir/foo.exe"); assert_not_executable(&dest_exe); } #[test] fn rewrite_partially_written_files() { let tmp_dir = TempDir::new("test-rusync").expect("failed to create temp dir"); let (src_path, dest_path) = setup_test(&tmp_dir.path()); let src_top = src_path.join("top.txt"); let expected = fs::read_to_string(&src_top).expect(""); let syncer = new_test_syncer(&src_path, &dest_path); syncer.sync().expect(""); let dest_top = dest_path.join("top.txt"); fs::write(&dest_top, "this is").expect(""); let syncer = new_test_syncer(&src_path, &dest_path); syncer.sync().expect(""); let actual = fs::read_to_string(&dest_top).expect(""); assert_eq!(actual, expected); } #[test] fn dest_read_only() { let tmp_dir = TempDir::new("test-rusync").expect("failed to create temp dir"); let (src_path, dest_path) = setup_test(&tmp_dir.path()); fs::create_dir_all(&dest_path).expect(""); let dest_top = dest_path.join("top.txt"); fs::write(&dest_top, "this is read only").expect(""); let top_file = File::open(dest_top).expect(""); let metadata = top_file.metadata().unwrap(); let mut permissions = metadata.permissions(); permissions.set_readonly(true); top_file.set_permissions(permissions).unwrap(); let src_top = src_path.join("top.txt"); make_recent(&src_top).expect("could not make top.txt recent"); let syncer = new_test_syncer(&src_path, &dest_path); let result = syncer.sync(); assert!(result.is_err()); } #[test] fn broken_link_in_src() { let tmp_dir = TempDir::new("test-rusync").expect("failed to create temp dir"); let (src_path, dest_path) = setup_test(&tmp_dir.path()); let src_broken_link = &src_path.join("broken"); unix::fs::symlink("no-such", &src_broken_link).expect(""); let syncer = new_test_syncer(&src_path, &dest_path); let result = syncer.sync(); let dest_broken_link = &dest_path.join("broken"); assert!(!dest_broken_link.exists()); assert_eq!( dest_broken_link.read_link().unwrap().to_string_lossy(), "no-such" ); assert!(result.is_ok()); }
extern crate filetime; extern crate tempdir; extern crate rusync; use std::fs; use std::fs::File; use std::io; use std::os::unix; use std::os::unix::fs::PermissionsExt; use std::path::Path; use std::path::PathBuf; use std::process::Command; use filetime::FileTime; use tempdir::TempDir; use rusync::progress::ProgressInfo; fn assert_same_contents(a: &Path, b: &Path) { assert!(a.exists(), "{:?} does not exist", a); assert!(b.exists(), "{:?} does not exist", b); let status = Command::new("diff") .args(&[a, b]) .status() .expect("Failed to execute process"); assert!(status.success(), "{:?} and {:?} differ", a, b) } fn is_executable(path: &Path) -> bool { let metadata = std::fs::metadata(&path).expect(&format!("Could not get metadata of {:?}", path)); let permissions = metadata.permissions(); let mode = permissions.mode(); mode & 0o111 != 0 } fn assert_executable(path: &Path) { assert!( is_executable(&path), "{:?} does not appear to be executable", path ); } fn assert_not_executable(path: &Path) { assert!(!is_executable(&path), "{:?} appears to be executable", path); } fn setup_test(tmp_path: &Path) -> (PathBuf, PathBuf) { let src_path = tmp_path.join("src"); let dest_path = tmp_path.join("dest"); let status = Command::new("cp") .args(&["-R", "tests/data", &src_path.to_string_lossy()]) .statu
create temp dir"); let (src_path, dest_path) = setup_test(&tmp_dir.path()); let syncer = new_test_syncer(&src_path, &dest_path); let outcome = syncer.sync(); assert!( outcome.is_ok(), "sync::sync failed with: {}", outcome.err().expect("") ); let src_top = src_path.join("top.txt"); let dest_top = dest_path.join("top.txt"); assert_same_contents(&src_top, &dest_top); let link_dest = dest_path.join("a_dir/link_to_one"); let target = fs::read_link(link_dest).expect("failed to read metada"); assert_eq!(target.to_string_lossy(), "one.txt"); } #[test] fn skip_up_to_date_files() { let tmp_dir = TempDir::new("test-rusync").expect("failed to create temp dir"); let (src_path, dest_path) = setup_test(&tmp_dir.path()); let syncer = new_test_syncer(&src_path, &dest_path); let stats = syncer.sync().unwrap(); assert_eq!(stats.up_to_date, 0); let src_top_txt = src_path.join("top.txt"); make_recent(&src_top_txt).expect("could not make top.txt recent"); let syncer = new_test_syncer(&src_path, &dest_path); let stats = syncer.sync().expect(""); assert_eq!(stats.copied, 1); } #[test] fn preserve_permissions() { let tmp_dir = TempDir::new("test-rusync").expect("failed to create temp dir"); let (src_path, dest_path) = setup_test(&tmp_dir.path()); let syncer = new_test_syncer(&src_path, &dest_path); syncer.sync().unwrap(); let dest_exe = &dest_path.join("a_dir/foo.exe"); assert_executable(&dest_exe); } #[test] fn do_not_preserve_permissions() { let tmp_dir = TempDir::new("test-rusync").expect("failed to create temp dir"); let (src_path, dest_path) = setup_test(&tmp_dir.path()); let mut options = rusync::SyncOptions::new(); options.preserve_permissions = false; let syncer = rusync::Syncer::new( &src_path, &dest_path, options, Box::new(DummyProgressInfo {}), ); syncer.sync().expect(""); let dest_exe = &dest_path.join("a_dir/foo.exe"); assert_not_executable(&dest_exe); } #[test] fn rewrite_partially_written_files() { let tmp_dir = TempDir::new("test-rusync").expect("failed to create temp dir"); let (src_path, dest_path) = setup_test(&tmp_dir.path()); let src_top = src_path.join("top.txt"); let expected = fs::read_to_string(&src_top).expect(""); let syncer = new_test_syncer(&src_path, &dest_path); syncer.sync().expect(""); let dest_top = dest_path.join("top.txt"); fs::write(&dest_top, "this is").expect(""); let syncer = new_test_syncer(&src_path, &dest_path); syncer.sync().expect(""); let actual = fs::read_to_string(&dest_top).expect(""); assert_eq!(actual, expected); } #[test] fn dest_read_only() { let tmp_dir = TempDir::new("test-rusync").expect("failed to create temp dir"); let (src_path, dest_path) = setup_test(&tmp_dir.path()); fs::create_dir_all(&dest_path).expect(""); let dest_top = dest_path.join("top.txt"); fs::write(&dest_top, "this is read only").expect(""); let top_file = File::open(dest_top).expect(""); let metadata = top_file.metadata().unwrap(); let mut permissions = metadata.permissions(); permissions.set_readonly(true); top_file.set_permissions(permissions).unwrap(); let src_top = src_path.join("top.txt"); make_recent(&src_top).expect("could not make top.txt recent"); let syncer = new_test_syncer(&src_path, &dest_path); let result = syncer.sync(); assert!(result.is_err()); } #[test] fn broken_link_in_src() { let tmp_dir = TempDir::new("test-rusync").expect("failed to create temp dir"); let (src_path, dest_path) = setup_test(&tmp_dir.path()); let src_broken_link = &src_path.join("broken"); unix::fs::symlink("no-such", &src_broken_link).expect(""); let syncer = new_test_syncer(&src_path, &dest_path); let result = syncer.sync(); let dest_broken_link = &dest_path.join("broken"); assert!(!dest_broken_link.exists()); assert_eq!( dest_broken_link.read_link().unwrap().to_string_lossy(), "no-such" ); assert!(result.is_ok()); }
s() .expect("Failed to execute process"); assert!(status.success()); (src_path, dest_path) } fn make_recent(path: &Path) -> io::Result<()> { let metadata = fs::metadata(&path)?; let atime = FileTime::from_last_access_time(&metadata); let mtime = FileTime::from_last_modification_time(&metadata); let mut epoch = mtime.seconds_relative_to_1970(); epoch += 1; let mtime = FileTime::from_seconds_since_1970(epoch, 0); filetime::set_file_times(&path, atime, mtime)?; Ok(()) } struct DummyProgressInfo {} impl ProgressInfo for DummyProgressInfo {} fn new_test_syncer(src: &Path, dest: &Path) -> rusync::Syncer { let dummy_progress_info = DummyProgressInfo {}; let options = rusync::SyncOptions::new(); rusync::Syncer::new(&src, &dest, options, Box::new(dummy_progress_info)) } #[test] fn fresh_copy() { let tmp_dir = TempDir::new("test-rusync").expect("failed to
random
[ { "content": "pub fn get_rel_path(a: &Path, b: &Path) -> FSResult<PathBuf> {\n\n let rel_path = pathdiff::diff_paths(&a, &b);\n\n if rel_path.is_none() {\n\n let desc = format!(\n\n \"Could not get relative path from {} to {}\",\n\n &a.to_string_lossy(),\n\n &b.to_s...
Rust
src/caret/movement.rs
jamessral/sodium
f98a942348861398e74457ded693e57b86d31fd5
use edit::buffer::TextBuffer; use state::editor::Editor; impl Editor { #[inline] pub fn goto(&mut self, (x, y): (usize, usize)) { self.cursor_mut().y = y; self.cursor_mut().x = x; } #[inline] pub fn previous(&self, n: usize) -> Option<(usize, usize)> { self.before(n, self.pos()) } #[inline] pub fn next(&self, n: usize) -> Option<(usize, usize)> { self.after(n, self.pos()) } #[inline] pub fn after(&self, n: usize, (x, y): (usize, usize)) -> Option<(usize, usize)> { if x + n < self.buffers.current_buffer()[y].len() { Some((x + n, y)) } else { if y + 1 >= self.buffers.current_buffer().len() { None } else { let mut mv = n + x - self.buffers.current_buffer()[y].len(); let mut ry = y + 1; loop { if mv < self.buffers.current_buffer()[ry].len() { return Some((mv, ry)); } else { if ry + 1 < self.buffers.current_buffer().len() { mv -= self.buffers.current_buffer()[ry].len(); ry += 1; } else { return None; } } } } } } #[inline] pub fn before(&self, n: usize, (x, y): (usize, usize)) -> Option<(usize, usize)> { if x >= n { Some((x - n, y)) } else { if y == 0 { None } else { let mut mv = n - x - 1; let mut ry = y - 1; loop { if mv <= self.buffers.current_buffer()[ry].len() { return Some((self.buffers.current_buffer()[ry].len() - mv, ry)); } else { if ry > 0 && mv >= self.buffers.current_buffer()[ry].len() { mv -= self.buffers.current_buffer()[ry].len(); ry -= 1; } else if ry == 0 { return None; } } } } } } #[inline] pub fn right(&self, n: usize, tight: bool) -> (usize, usize) { self.bound_hor((self.x() + n, self.y()), tight) } #[inline] pub fn right_unbounded(&self, n: usize) -> (isize, isize) { ((self.x() + n) as isize, self.y() as isize) } #[inline] pub fn left(&self, n: usize) -> (usize, usize) { if n <= self.x() { (self.x() - n, self.y()) } else { (0, self.y()) } } #[inline] pub fn left_unbounded(&self, n: usize) -> (isize, isize) { (self.x() as isize - n as isize, self.y() as isize) } #[inline] pub fn up(&self, n: usize) -> (usize, usize) { if n <= self.y() { (self.cursor().x, self.y() - n) } else { (self.cursor().x, 0) } } #[inline] pub fn up_unbounded(&self, n: usize) -> (isize, isize) { (self.cursor().x as isize, self.y() as isize - n as isize) } #[inline] pub fn down(&self, n: usize) -> (usize, usize) { self.bound_ver((self.cursor().x, self.y() + n)) } #[inline] pub fn down_unbounded(&self, n: usize) -> (isize, isize) { (self.cursor().x as isize, self.y() as isize + n as isize) } pub fn next_ocur(&self, c: char, n: usize) -> Option<usize> { let mut dn = 0; let mut x = self.x(); for (i, ch) in self.buffers.current_buffer()[self.y()] .chars() .skip(x) .enumerate() { if ch == c { if i > 0 { dn += 1; if dn == n { x += i; return Some(x); } } } } None } pub fn previous_ocur(&self, c: char, n: usize) -> Option<usize> { let mut dn = 0; let mut x = self.x(); let y = self.y(); for (i, ch) in self.buffers.current_buffer()[y] .chars() .rev() .skip(self.buffers.current_buffer()[y].len() - x) .enumerate() { if ch == c { dn += 1; if dn == n { x -= i + 1; return Some(x); } } } None } pub fn _next_word_forward(&self, n: usize) -> Option<usize> { let mut dn = 0; let mut x = self.x(); for (i, ch) in self.buffers.current_buffer()[self.y()] .chars() .skip(x) .enumerate() { if ch.is_whitespace() { dn += 1; if dn == n { x += i + 1; return Some(x); } } } None } }
use edit::buffer::TextBuffer; use state::editor::Editor; impl Editor { #[inline] pub fn goto(&mut self, (x, y): (usize, usize)) { self.cursor_mut().y = y; self.cursor_mut().x = x; } #[inline] pub fn previous(&self, n: usize) -> Option<(usize, usize)> { self.before(n, self.pos()) } #[inline] pub fn next(&self, n: usize) -> Option<(usize, usize)> { self.after(n, self.pos()) } #[inline] pub fn after(&self, n: usize, (x, y): (usize, usize)) -> Option<(usize, usize)> { if x + n < self.buffers.current_buffer()[y].len() { Some((x + n, y)) } else { if y + 1 >= self.buffers.current_buffer().len() { None } else { let mut mv = n + x - self.buffers.current_buffer()[y].len(); let mut ry = y + 1; loop { if mv < self.buffers.current_buffer()[ry].len() { return Some((mv, ry)); } else { if ry + 1 < self.buffers.current_buffer().len() {
#[inline] pub fn before(&self, n: usize, (x, y): (usize, usize)) -> Option<(usize, usize)> { if x >= n { Some((x - n, y)) } else { if y == 0 { None } else { let mut mv = n - x - 1; let mut ry = y - 1; loop { if mv <= self.buffers.current_buffer()[ry].len() { return Some((self.buffers.current_buffer()[ry].len() - mv, ry)); } else { if ry > 0 && mv >= self.buffers.current_buffer()[ry].len() { mv -= self.buffers.current_buffer()[ry].len(); ry -= 1; } else if ry == 0 { return None; } } } } } } #[inline] pub fn right(&self, n: usize, tight: bool) -> (usize, usize) { self.bound_hor((self.x() + n, self.y()), tight) } #[inline] pub fn right_unbounded(&self, n: usize) -> (isize, isize) { ((self.x() + n) as isize, self.y() as isize) } #[inline] pub fn left(&self, n: usize) -> (usize, usize) { if n <= self.x() { (self.x() - n, self.y()) } else { (0, self.y()) } } #[inline] pub fn left_unbounded(&self, n: usize) -> (isize, isize) { (self.x() as isize - n as isize, self.y() as isize) } #[inline] pub fn up(&self, n: usize) -> (usize, usize) { if n <= self.y() { (self.cursor().x, self.y() - n) } else { (self.cursor().x, 0) } } #[inline] pub fn up_unbounded(&self, n: usize) -> (isize, isize) { (self.cursor().x as isize, self.y() as isize - n as isize) } #[inline] pub fn down(&self, n: usize) -> (usize, usize) { self.bound_ver((self.cursor().x, self.y() + n)) } #[inline] pub fn down_unbounded(&self, n: usize) -> (isize, isize) { (self.cursor().x as isize, self.y() as isize + n as isize) } pub fn next_ocur(&self, c: char, n: usize) -> Option<usize> { let mut dn = 0; let mut x = self.x(); for (i, ch) in self.buffers.current_buffer()[self.y()] .chars() .skip(x) .enumerate() { if ch == c { if i > 0 { dn += 1; if dn == n { x += i; return Some(x); } } } } None } pub fn previous_ocur(&self, c: char, n: usize) -> Option<usize> { let mut dn = 0; let mut x = self.x(); let y = self.y(); for (i, ch) in self.buffers.current_buffer()[y] .chars() .rev() .skip(self.buffers.current_buffer()[y].len() - x) .enumerate() { if ch == c { dn += 1; if dn == n { x -= i + 1; return Some(x); } } } None } pub fn _next_word_forward(&self, n: usize) -> Option<usize> { let mut dn = 0; let mut x = self.x(); for (i, ch) in self.buffers.current_buffer()[self.y()] .chars() .skip(x) .enumerate() { if ch.is_whitespace() { dn += 1; if dn == n { x += i + 1; return Some(x); } } } None } }
mv -= self.buffers.current_buffer()[ry].len(); ry += 1; } else { return None; } } } } } }
function_block-function_prefix_line
[ { "content": "/// Convert a usize tuple to isize\n\npub fn to_signed_pos((x, y): (usize, usize)) -> (isize, isize) {\n\n (x as isize, y as isize)\n\n}\n\n\n\nimpl Editor {\n\n /// Get the position of the current cursor, bounded\n\n #[inline]\n\n pub fn pos(&self) -> (usize, usize) {\n\n let c...
Rust
serde-generate/tests/ocaml_runtime.rs
zefchain/serde-reflection
3bc9fa7422a2e725960ae8a1166f6929961f6128
use serde_generate::{ ocaml, test_utils, test_utils::{Choice, Runtime, Test}, CodeGeneratorConfig, SourceInstaller, }; use std::{fs::File, io::Write, process::Command}; use tempfile::tempdir; fn quote_bytes(bytes: &[u8]) -> String { format!( "\"{}\"", bytes .iter() .map(|x| format!("\\{:03}", x)) .collect::<Vec<_>>() .join("") ) } #[test] fn test_ocaml_bcs_runtime_on_simple_data() { test_ocaml_runtime_on_simple_data(Runtime::Bcs); } #[test] fn test_ocaml_bincode_runtime_on_simple_data() { test_ocaml_runtime_on_simple_data(Runtime::Bincode); } fn test_ocaml_runtime_on_simple_data(runtime: Runtime) { let registry = test_utils::get_simple_registry().unwrap(); let dir0 = tempdir().unwrap(); let dir = dir0.path(); let installer = ocaml::Installer::new(dir.to_path_buf()); let runtime_str = match runtime { Runtime::Bcs => { installer.install_bcs_runtime().unwrap(); "bcs" } Runtime::Bincode => { installer.install_bincode_runtime().unwrap(); "bincode" } }; let config = CodeGeneratorConfig::new("testing".to_string()).with_encodings(vec![runtime.into()]); let dir_path = dir.join(&config.module_name()); std::fs::create_dir_all(&dir_path).unwrap(); let dune_project_source_path = dir.join("dune-project"); let mut dune_project_file = std::fs::File::create(dune_project_source_path).unwrap(); writeln!(dune_project_file, "(lang dune 3.0)").unwrap(); let dune_source_path = dir_path.join("dune"); let mut dune_file = std::fs::File::create(dune_source_path).unwrap(); writeln!( dune_file, r#" (env (_ (flags (:standard -w -30-42)))) (library (name testing) (modules testing) (preprocess (pps ppx)) (libraries {}_runtime)) (executable (name main) (modules main) (libraries serde testing)) "#, runtime_str ) .unwrap(); let lib_path = dir_path.join("testing.ml"); let mut lib = File::create(&lib_path).unwrap(); let generator = ocaml::CodeGenerator::new(&config); generator.output(&mut lib, &registry).unwrap(); let exe_path = dir_path.join("main.ml"); let mut exe = File::create(&exe_path).unwrap(); let reference = runtime.serialize(&Test { a: vec![4, 6], b: (-3, 5), c: Choice::C { x: 7 }, }); let reference_bytes = quote_bytes(&reference); writeln!( exe, r#" open Serde open Stdint exception Unexpected_success let () = let input = Bytes.of_string {0} in let value = Deserialize.apply Testing.test_de input in let a = List.map Uint32.of_int [4; 6] in let b = -3L, Uint64.of_int 5 in let c = Testing.Choice_C {{ x = Uint8.of_int 7 }} in let value2 = {{Testing.a; b; c}} in assert (value = value2); let output = Serialize.apply Testing.test_ser value2 in assert (input = output); let input2 = Bytes.of_string ({0} ^ "\001") in try let _ = Deserialize.apply Testing.test_de input2 in raise Unexpected_success with | Unexpected_success -> assert false | _ -> () "#, reference_bytes ) .unwrap(); let status = Command::new("dune") .arg("exec") .arg("testing/main.exe") .arg("--root") .arg(dir) .status() .unwrap(); assert!(status.success()); } #[test] fn test_ocaml_bcs_runtime_on_supported_types() { test_ocaml_runtime_on_supported_types(Runtime::Bcs); } #[test] fn test_ocaml_bincode_runtime_on_supported_types() { test_ocaml_runtime_on_supported_types(Runtime::Bincode); } fn test_ocaml_runtime_on_supported_types(runtime: Runtime) { let registry = test_utils::get_registry().unwrap(); let dir0 = tempdir().unwrap(); let dir = dir0.path(); let installer = ocaml::Installer::new(dir.to_path_buf()); let runtime_str = match runtime { Runtime::Bcs => { installer.install_bcs_runtime().unwrap(); "bcs" } Runtime::Bincode => { installer.install_bincode_runtime().unwrap(); "bincode" } }; let config = CodeGeneratorConfig::new("testing".to_string()).with_encodings(vec![runtime.into()]); let dir_path = dir.join(&config.module_name()); std::fs::create_dir_all(&dir_path).unwrap(); let dune_project_source_path = dir.join("dune-project"); let mut dune_project_file = std::fs::File::create(dune_project_source_path).unwrap(); writeln!(dune_project_file, "(lang dune 3.0)").unwrap(); let dune_source_path = dir_path.join("dune"); let mut dune_file = std::fs::File::create(dune_source_path).unwrap(); writeln!( dune_file, r#" (env (_ (flags (:standard -w -30-42)))) (executable (name test) (modules test) (preprocess (pps ppx)) (libraries {}_runtime)) "#, runtime_str ) .unwrap(); let source_path = dir_path.join("test.ml"); println!("{:?}", source_path); let mut source = File::create(&source_path).unwrap(); let generator = ocaml::CodeGenerator::new(&config); generator.output(&mut source, &registry).unwrap(); let positive_encodings: Vec<_> = runtime .get_positive_samples_quick() .iter() .map(|bytes| quote_bytes(bytes)) .collect(); let negative_encodings: Vec<_> = runtime .get_negative_samples() .iter() .map(|bytes| quote_bytes(bytes)) .collect(); writeln!( source, r#" open Serde exception Unexpected_success let () = List.iter (fun s -> let b = Bytes.of_string s in let sd = Deserialize.apply serde_data_de b in let b2 = Serialize.apply serde_data_ser sd in assert (b = b2)) [{}]; List.iter (fun s -> let b = Bytes.of_string s in try let _ = Deserialize.apply serde_data_de b in raise Unexpected_success with | Unexpected_success -> assert false | _ -> ()) [{}] "#, positive_encodings.join("; "), negative_encodings.join("; ") ) .unwrap(); let status = Command::new("dune") .arg("exec") .arg("testing/test.exe") .arg("--root") .arg(dir) .status() .unwrap(); assert!(status.success()); }
use serde_generate::{ ocaml, test_utils, test_utils::{Choice, Runtime, Test}, CodeGeneratorConfig, SourceInstaller, }; use std::{fs::File, io::Write, process::Command}; use tempfile::tempdir; fn quote_bytes(bytes: &[u8]) -> String { format!( "\"{}\"", bytes .iter() .map(|x| format!("\\{:03}", x)) .collect::<Vec<_>>() .join("") ) } #[test] fn test_ocaml_bcs_runtime_on_simple_data() { test_ocaml_runtime_on_simple_data(Runtime::Bcs); } #[test] fn test_ocaml_bincode_runtime_on_simple_data() { test_ocaml_runtime_on_simple_data(Runtime::Bincode); } fn test_ocaml_runtime_on_simple_data(runtime: Runtime) { let registry = test_utils::get_simple_registry().unwrap(); let dir0 = tempdir().unwrap(); let dir = dir0.path(); let installer = ocaml::Installer::new(dir.to_path_buf()); let runtime_str =
; let config = CodeGeneratorConfig::new("testing".to_string()).with_encodings(vec![runtime.into()]); let dir_path = dir.join(&config.module_name()); std::fs::create_dir_all(&dir_path).unwrap(); let dune_project_source_path = dir.join("dune-project"); let mut dune_project_file = std::fs::File::create(dune_project_source_path).unwrap(); writeln!(dune_project_file, "(lang dune 3.0)").unwrap(); let dune_source_path = dir_path.join("dune"); let mut dune_file = std::fs::File::create(dune_source_path).unwrap(); writeln!( dune_file, r#" (env (_ (flags (:standard -w -30-42)))) (library (name testing) (modules testing) (preprocess (pps ppx)) (libraries {}_runtime)) (executable (name main) (modules main) (libraries serde testing)) "#, runtime_str ) .unwrap(); let lib_path = dir_path.join("testing.ml"); let mut lib = File::create(&lib_path).unwrap(); let generator = ocaml::CodeGenerator::new(&config); generator.output(&mut lib, &registry).unwrap(); let exe_path = dir_path.join("main.ml"); let mut exe = File::create(&exe_path).unwrap(); let reference = runtime.serialize(&Test { a: vec![4, 6], b: (-3, 5), c: Choice::C { x: 7 }, }); let reference_bytes = quote_bytes(&reference); writeln!( exe, r#" open Serde open Stdint exception Unexpected_success let () = let input = Bytes.of_string {0} in let value = Deserialize.apply Testing.test_de input in let a = List.map Uint32.of_int [4; 6] in let b = -3L, Uint64.of_int 5 in let c = Testing.Choice_C {{ x = Uint8.of_int 7 }} in let value2 = {{Testing.a; b; c}} in assert (value = value2); let output = Serialize.apply Testing.test_ser value2 in assert (input = output); let input2 = Bytes.of_string ({0} ^ "\001") in try let _ = Deserialize.apply Testing.test_de input2 in raise Unexpected_success with | Unexpected_success -> assert false | _ -> () "#, reference_bytes ) .unwrap(); let status = Command::new("dune") .arg("exec") .arg("testing/main.exe") .arg("--root") .arg(dir) .status() .unwrap(); assert!(status.success()); } #[test] fn test_ocaml_bcs_runtime_on_supported_types() { test_ocaml_runtime_on_supported_types(Runtime::Bcs); } #[test] fn test_ocaml_bincode_runtime_on_supported_types() { test_ocaml_runtime_on_supported_types(Runtime::Bincode); } fn test_ocaml_runtime_on_supported_types(runtime: Runtime) { let registry = test_utils::get_registry().unwrap(); let dir0 = tempdir().unwrap(); let dir = dir0.path(); let installer = ocaml::Installer::new(dir.to_path_buf()); let runtime_str = match runtime { Runtime::Bcs => { installer.install_bcs_runtime().unwrap(); "bcs" } Runtime::Bincode => { installer.install_bincode_runtime().unwrap(); "bincode" } }; let config = CodeGeneratorConfig::new("testing".to_string()).with_encodings(vec![runtime.into()]); let dir_path = dir.join(&config.module_name()); std::fs::create_dir_all(&dir_path).unwrap(); let dune_project_source_path = dir.join("dune-project"); let mut dune_project_file = std::fs::File::create(dune_project_source_path).unwrap(); writeln!(dune_project_file, "(lang dune 3.0)").unwrap(); let dune_source_path = dir_path.join("dune"); let mut dune_file = std::fs::File::create(dune_source_path).unwrap(); writeln!( dune_file, r#" (env (_ (flags (:standard -w -30-42)))) (executable (name test) (modules test) (preprocess (pps ppx)) (libraries {}_runtime)) "#, runtime_str ) .unwrap(); let source_path = dir_path.join("test.ml"); println!("{:?}", source_path); let mut source = File::create(&source_path).unwrap(); let generator = ocaml::CodeGenerator::new(&config); generator.output(&mut source, &registry).unwrap(); let positive_encodings: Vec<_> = runtime .get_positive_samples_quick() .iter() .map(|bytes| quote_bytes(bytes)) .collect(); let negative_encodings: Vec<_> = runtime .get_negative_samples() .iter() .map(|bytes| quote_bytes(bytes)) .collect(); writeln!( source, r#" open Serde exception Unexpected_success let () = List.iter (fun s -> let b = Bytes.of_string s in let sd = Deserialize.apply serde_data_de b in let b2 = Serialize.apply serde_data_ser sd in assert (b = b2)) [{}]; List.iter (fun s -> let b = Bytes.of_string s in try let _ = Deserialize.apply serde_data_de b in raise Unexpected_success with | Unexpected_success -> assert false | _ -> ()) [{}] "#, positive_encodings.join("; "), negative_encodings.join("; ") ) .unwrap(); let status = Command::new("dune") .arg("exec") .arg("testing/test.exe") .arg("--root") .arg(dir) .status() .unwrap(); assert!(status.success()); }
match runtime { Runtime::Bcs => { installer.install_bcs_runtime().unwrap(); "bcs" } Runtime::Bincode => { installer.install_bincode_runtime().unwrap(); "bincode" } }
if_condition
[ { "content": "fn quote_bytes(bytes: &[u8]) -> String {\n\n format!(\n\n \"{{{}}}\",\n\n bytes\n\n .iter()\n\n .map(|x| format!(\"{}\", *x as i8))\n\n .collect::<Vec<_>>()\n\n .join(\", \")\n\n )\n\n}\n\n\n", "file_path": "serde-generate/tests/j...
Rust
generate-assets/src/lib.rs
BlackPhlox/bevy-website
3a84400990d4d85e40303ba4bc3e1e85f63c991d
use cratesio_dbdump_csvtab::rusqlite::Connection; use cratesio_dbdump_lookup::{get_versions, CrateDependency, CrateLookup}; use rand::{thread_rng, Rng}; use serde::Deserialize; use std::{fs, io, path::PathBuf, str::FromStr}; #[derive(Deserialize, Debug, Clone)] #[serde(deny_unknown_fields)] pub struct Asset { pub name: String, pub link: String, pub description: String, pub order: Option<usize>, pub image: Option<String>, pub color: Option<String>, pub emoji: Option<String>, #[serde(skip)] pub original_path: Option<PathBuf>, #[serde(skip)] pub tags: Vec<String>, #[serde(skip)] pub dependencies: Vec<CrateDependency>, #[serde(skip)] pub downloads: u32, #[serde(skip)] pub repo_url: Option<String>, #[serde(skip)] pub homepage_url: Option<String>, #[serde(skip)] pub last_update: i64, #[serde(skip)] pub latest_version: String, #[serde(skip)] pub license: String, } #[derive(Debug, Clone)] pub struct Section { pub name: String, pub content: Vec<AssetNode>, pub template: Option<String>, pub header: Option<String>, pub order: Option<usize>, pub sort_order_reversed: bool, } #[derive(Debug, Clone)] pub enum AssetNode { Section(Section), Asset(Asset), } impl AssetNode { pub fn name(&self) -> String { match self { AssetNode::Section(content) => content.name.clone(), AssetNode::Asset(content) => content.name.clone(), } } pub fn order(&self) -> usize { match self { AssetNode::Section(content) => content.order.unwrap_or(99999), AssetNode::Asset(content) => content.order.unwrap_or(99999), } } } fn visit_dirs(dir: PathBuf, section: &mut Section, db: &Connection) -> io::Result<()> { if dir.is_dir() { for entry in fs::read_dir(dir)? { let entry = entry?; let path = entry.path(); if path.file_name().unwrap() == ".git" || path.file_name().unwrap() == ".github" { continue; } if path.is_dir() { let folder = path.file_name().unwrap(); let (order, sort_order_reversed) = if path.join("_category.toml").exists() { let from_file: toml::Value = toml::de::from_str( &fs::read_to_string(path.join("_category.toml")).unwrap(), ) .unwrap(); ( from_file .get("order") .and_then(|v| v.as_integer()) .map(|v| v as usize), from_file .get("sort_order_reversed") .and_then(|v| v.as_bool()) .unwrap_or(false), ) } else { (None, false) }; let mut new_section = Section { name: folder.to_str().unwrap().to_string(), content: vec![], template: None, header: None, order, sort_order_reversed, }; visit_dirs(path.clone(), &mut new_section, db)?; section.content.push(AssetNode::Section(new_section)); } else { if path.file_name().unwrap() == "_category.toml" || path.extension().unwrap() != "toml" { continue; } let mut asset: Asset = toml::de::from_str(&fs::read_to_string(&path).unwrap()).unwrap(); asset.original_path = Some(path); populate_with_crate_io_data(db, &mut asset); section.content.push(AssetNode::Asset(asset)); } } } Ok(()) } fn populate_with_crate_io_data(db: &Connection, asset: &mut Asset) { if asset.image.is_none() && asset.emoji.is_none() { let emoji_code: u32 = thread_rng().gen_range(0x1F600..0x1F64F); let emoji = char::from_u32(emoji_code).unwrap_or('💔'); asset.emoji = Some(emoji.to_string()); } let co = db.get_crate(&asset.name); if let Ok(Some(c)) = co { let latest_version = &get_versions(db, asset.name.to_string(), true).unwrap()[0]; asset.latest_version = latest_version.1.clone(); let license = &latest_version.2; asset.license = license.to_string(); if asset.description.is_empty() { asset.description = c.description; } asset.homepage_url = c.homepage_url; let dt = chrono::NaiveDateTime::parse_from_str(c.last_update.as_str(), "%Y-%m-%d %H:%M:%S%.6f"); if let Ok(n_date_time) = dt { asset.last_update = n_date_time.format("%s").to_string().parse().unwrap(); } else { println!("{:?}", dt.unwrap_err()); } asset.downloads = c.downloads; asset.tags = c .keywords .into_iter() .filter(|s| !(s.eq("bevy") || s.eq("bevyengine") || s.eq("gamedev") || s.eq("game"))) .collect(); asset.repo_url = c.repo_url; let mut crate_dependencies = c.dependencies; crate_dependencies.dedup_by_key(|cd| format!("{}{}", cd.crate_id, cd.version)); asset.dependencies = crate_dependencies .into_iter() .map(|f| { let is_bevy = (f.crate_id.eq("bevy") || f.crate_id.eq("bevy_app")) && f.version.ends_with(".0"); let v = if is_bevy { f.version[..f.version.len() - 2].to_string() } else { f.version } .replace("^", ""); CrateDependency { crate_id: f.crate_id, version: v, kind: f.kind, } }) .collect() } } pub fn parse_assets(asset_dir: &str, db: &Connection) -> io::Result<Section> { let mut asset_root_section = Section { name: "Assets".to_string(), content: vec![], template: Some("assets.html".to_string()), header: Some("Assets".to_string()), order: None, sort_order_reversed: false, }; visit_dirs( PathBuf::from_str(asset_dir).unwrap(), &mut asset_root_section, db, )?; Ok(asset_root_section) }
use cratesio_dbdump_csvtab::rusqlite::Connection; use cratesio_dbdump_lookup::{get_versions, CrateDependency, CrateLookup}; use rand::{thread_rng, Rng}; use serde::Deserialize; use std::{fs, io, path::PathBuf, str::FromStr}; #[derive(Deserialize, Debug, Clone)] #[serde(deny_unknown_fields)] pub struct Asset { pub name: String, pub link: String, pub description: String, pub order: Option<usize>, pub image: Option<String>, pub color: Option<String>, pub emoji: Option<String>, #[serde(skip)] pub original_path: Option<PathBuf>, #[serde(skip)] pub tags: Vec<String>, #[serde(skip)] pub dependencies: Vec<CrateDependency>, #[serde(skip)] pub downloads: u32, #[serde(skip)] pub repo_url: Option<String>, #[serde(skip)] pub homepage_url: Option<String>, #[serde(skip)] pub last_update: i64, #[serde(skip)] pub latest_version: String, #[serde(skip)] pub license: String, } #[derive(Debug, Clone)] pub struct Section { pub name: String, pub content: Vec<AssetNode>, pub template: Option<String>, pub header: Option<String>, pub order: Option<usize>, pub sort_order_reversed: bool, } #[derive(Debug, Clone)] pub enum AssetNode { Section(Section), Asset(Asset), } impl AssetNode { pub fn name(&self) -> String { match self { AssetNode::Section(content) => content.name.clone(), AssetNode::Asset(content) => content.name.clone(), } } pub fn order(&self) -> usize { match self { AssetNode::Section(content) => content.order.unwrap_or(99999), AssetNode::Asset(content) => content.order.unwrap_or(99999), } } } fn visit_dirs(dir: PathBuf, section: &mut Section, db: &Connection) -> io::Result<()> { if dir.is_dir() { for entry in fs::read_dir(dir)? { let entry = entry?; let path = entry.path(); if path.file_name().unwrap() == ".git" || path.file_name().unwrap() == ".github" { continue; } if path.is_dir() { let folder = path.file_name().unwrap(); let (order, sort_order_reversed) = if path.join("_category.toml").exists() { let from_file: toml::Value = toml::de::from_str( &fs::read_to_string(path.join("_category.toml")).unwrap(), ) .unwrap(); ( from_file .get("order") .and_then(|v| v.as_integer()) .map(|v| v as usize), from_file .get("sort_order_reversed") .and_then(|v| v.as_bool()) .unwrap_or(false), ) } else { (None, false) }; let mut new_section = Section { name: folder.to_str().unwrap().to_string(), content: vec![], template: None, header: None, order, sort_order_reversed, }; visit_dirs(path.clone(), &mut new_section, db)?; section.content.push(AssetNode::Section(new_section)); } else { if path.file_name().unwrap() == "_category.toml" || path.extension().unwrap() != "toml" {
fn populate_with_crate_io_data(db: &Connection, asset: &mut Asset) { if asset.image.is_none() && asset.emoji.is_none() { let emoji_code: u32 = thread_rng().gen_range(0x1F600..0x1F64F); let emoji = char::from_u32(emoji_code).unwrap_or('💔'); asset.emoji = Some(emoji.to_string()); } let co = db.get_crate(&asset.name); if let Ok(Some(c)) = co { let latest_version = &get_versions(db, asset.name.to_string(), true).unwrap()[0]; asset.latest_version = latest_version.1.clone(); let license = &latest_version.2; asset.license = license.to_string(); if asset.description.is_empty() { asset.description = c.description; } asset.homepage_url = c.homepage_url; let dt = chrono::NaiveDateTime::parse_from_str(c.last_update.as_str(), "%Y-%m-%d %H:%M:%S%.6f"); if let Ok(n_date_time) = dt { asset.last_update = n_date_time.format("%s").to_string().parse().unwrap(); } else { println!("{:?}", dt.unwrap_err()); } asset.downloads = c.downloads; asset.tags = c .keywords .into_iter() .filter(|s| !(s.eq("bevy") || s.eq("bevyengine") || s.eq("gamedev") || s.eq("game"))) .collect(); asset.repo_url = c.repo_url; let mut crate_dependencies = c.dependencies; crate_dependencies.dedup_by_key(|cd| format!("{}{}", cd.crate_id, cd.version)); asset.dependencies = crate_dependencies .into_iter() .map(|f| { let is_bevy = (f.crate_id.eq("bevy") || f.crate_id.eq("bevy_app")) && f.version.ends_with(".0"); let v = if is_bevy { f.version[..f.version.len() - 2].to_string() } else { f.version } .replace("^", ""); CrateDependency { crate_id: f.crate_id, version: v, kind: f.kind, } }) .collect() } } pub fn parse_assets(asset_dir: &str, db: &Connection) -> io::Result<Section> { let mut asset_root_section = Section { name: "Assets".to_string(), content: vec![], template: Some("assets.html".to_string()), header: Some("Assets".to_string()), order: None, sort_order_reversed: false, }; visit_dirs( PathBuf::from_str(asset_dir).unwrap(), &mut asset_root_section, db, )?; Ok(asset_root_section) }
continue; } let mut asset: Asset = toml::de::from_str(&fs::read_to_string(&path).unwrap()).unwrap(); asset.original_path = Some(path); populate_with_crate_io_data(db, &mut asset); section.content.push(AssetNode::Asset(asset)); } } } Ok(()) }
function_block-function_prefix_line
[ { "content": "fn visit_dirs(dir: PathBuf, section: &mut Section) -> io::Result<()> {\n\n if !dir.is_dir() {\n\n // Todo: after the 0.6 release, remove this if statement\n\n // For now we will allow this to be able to point to the `latest` branch (0.5)\n\n // which does not yet include er...
Rust
tensorflow-sys/build.rs
andrewcsmith/rust-1
18db708a1893ae96dad207392bd721dcd7bc83f3
extern crate curl; extern crate flate2; extern crate pkg_config; extern crate semver; extern crate tar; use std::error::Error; use std::fs::File; use std::io::BufWriter; use std::io::Write; use std::path::{Path, PathBuf}; use std::process; use std::process::Command; use std::{env, fs}; use curl::easy::Easy; use flate2::read::GzDecoder; use semver::Version; use tar::Archive; const FRAMEWORK_LIBRARY: &'static str = "tensorflow_framework"; const LIBRARY: &'static str = "tensorflow"; const REPOSITORY: &'static str = "https://github.com/tensorflow/tensorflow.git"; const FRAMEWORK_TARGET: &'static str = "tensorflow:libtensorflow_framework.so"; const TARGET: &'static str = "tensorflow:libtensorflow.so"; const VERSION: &'static str = "1.12.0"; const TAG: &'static str = "v1.12.0"; const MIN_BAZEL: &'static str = "0.5.4"; macro_rules! get(($name:expr) => (ok!(env::var($name)))); macro_rules! ok(($expression:expr) => ($expression.unwrap())); macro_rules! log { ($fmt:expr) => (println!(concat!("libtensorflow-sys/build.rs:{}: ", $fmt), line!())); ($fmt:expr, $($arg:tt)*) => (println!(concat!("libtensorflow-sys/build.rs:{}: ", $fmt), line!(), $($arg)*)); } macro_rules! log_var(($var:ident) => (log!(concat!(stringify!($var), " = {:?}"), $var))); fn main() { if check_windows_lib() { log!("Returning early because {} was already found", LIBRARY); return; } if pkg_config::find_library(LIBRARY).is_ok() { log!("Returning early because {} was already found", LIBRARY); return; } let force_src = match env::var("TF_RUST_BUILD_FROM_SRC") { Ok(s) => s == "true", Err(_) => false, }; if !force_src && env::consts::ARCH == "x86_64" && (env::consts::OS == "linux" || env::consts::OS == "macos") { install_prebuilt(); } else { build_from_src(); } } #[cfg(not(target_env = "msvc"))] fn check_windows_lib() -> bool { false } #[cfg(target_env = "msvc")] fn check_windows_lib() -> bool { let windows_lib: &str = &format!("{}.lib", LIBRARY); if let Ok(path) = env::var("PATH") { for p in path.split(";") { let path = Path::new(p).join(windows_lib); if path.exists() { println!("cargo:rustc-link-lib=dylib={}", LIBRARY); println!("cargo:rustc-link-search=native={}", p); return true } } } false } fn remove_suffix(value: &mut String, suffix: &str) { if value.ends_with(suffix) { let n = value.len(); value.truncate(n - suffix.len()); } } fn extract<P: AsRef<Path>, P2: AsRef<Path>>(archive_path: P, extract_to: P2) { let file = File::open(archive_path).unwrap(); let unzipped = GzDecoder::new(file); let mut a = Archive::new(unzipped); a.unpack(extract_to).unwrap(); } fn install_prebuilt() { let os = match env::consts::OS { "macos" => "darwin", x => x, }; let proc_type = if cfg!(feature = "tensorflow_gpu") {"gpu"} else {"cpu"}; let binary_url = format!( "https://storage.googleapis.com/tensorflow/libtensorflow/libtensorflow-{}-{}-{}-{}.tar.gz", proc_type, os, env::consts::ARCH, VERSION); log_var!(binary_url); let short_file_name = binary_url.split("/").last().unwrap(); let mut base_name = short_file_name.to_string(); remove_suffix(&mut base_name, ".tar.gz"); log_var!(base_name); let download_dir = match env::var("TF_RUST_DOWNLOAD_DIR") { Ok(s) => PathBuf::from(s), Err(_) => PathBuf::from(&get!("CARGO_MANIFEST_DIR")).join("target"), }; if !download_dir.exists() { fs::create_dir(&download_dir).unwrap(); } let file_name = download_dir.join(short_file_name); log_var!(file_name); if !file_name.exists() { let f = File::create(&file_name).unwrap(); let mut writer = BufWriter::new(f); let mut easy = Easy::new(); easy.url(&binary_url).unwrap(); easy.write_function(move |data| { Ok(writer.write(data).unwrap()) }).unwrap(); easy.perform().unwrap(); let response_code = easy.response_code().unwrap(); if response_code != 200 { panic!("Unexpected response code {} for {}", response_code, binary_url); } } let unpacked_dir = download_dir.join(base_name); let lib_dir = unpacked_dir.join("lib"); let framework_library_file = format!("lib{}.so", FRAMEWORK_LIBRARY); let library_file = format!("lib{}.so", LIBRARY); let framework_library_full_path = lib_dir.join(&framework_library_file); let library_full_path = lib_dir.join(&library_file); if !framework_library_full_path.exists() || !library_full_path.exists() { extract(file_name, &unpacked_dir); } println!("cargo:rustc-link-lib=dylib={}", FRAMEWORK_LIBRARY); println!("cargo:rustc-link-lib=dylib={}", LIBRARY); let output = PathBuf::from(&get!("OUT_DIR")); let new_framework_library_full_path = output.join(&framework_library_file); if new_framework_library_full_path.exists() { log!("File {} already exists, deleting.", new_framework_library_full_path.display()); std::fs::remove_file(&new_framework_library_full_path).unwrap(); } let new_library_full_path = output.join(&library_file); if new_library_full_path.exists() { log!("File {} already exists, deleting.", new_library_full_path.display()); std::fs::remove_file(&new_library_full_path).unwrap(); } log!("Copying {} to {}...", library_full_path.display(), new_library_full_path.display()); std::fs::copy(&library_full_path, &new_library_full_path).unwrap(); log!("Copying {} to {}...", framework_library_full_path.display(), new_framework_library_full_path.display()); std::fs::copy(&framework_library_full_path, &new_framework_library_full_path).unwrap(); println!("cargo:rustc-link-search={}", output.display()); } fn build_from_src() { let output = PathBuf::from(&get!("OUT_DIR")); log_var!(output); let source = PathBuf::from(&get!("CARGO_MANIFEST_DIR")).join(format!("target/source-{}", TAG)); log_var!(source); let lib_dir = output.join(format!("lib-{}", TAG)); log_var!(lib_dir); if lib_dir.exists() { log!("Directory {:?} already exists", lib_dir); } else { log!("Creating directory {:?}", lib_dir); fs::create_dir(lib_dir.clone()).unwrap(); } let framework_library_path = lib_dir.join(format!("lib{}.so", FRAMEWORK_LIBRARY)); log_var!(framework_library_path); let library_path = lib_dir.join(format!("lib{}.so", LIBRARY)); log_var!(library_path); if library_path.exists() && framework_library_path.exists() { log!("{:?} and {:?} already exist, not building", library_path, framework_library_path); } else { if let Err(e) = check_bazel() { println!("cargo:error=Bazel must be installed at version {} or greater. (Error: {})", MIN_BAZEL, e); process::exit(1); } let framework_target_path = &FRAMEWORK_TARGET.replace(":", "/"); log_var!(framework_target_path); let target_path = &TARGET.replace(":", "/"); log_var!(target_path); if !Path::new(&source.join(".git")).exists() { run("git", |command| { command.arg("clone") .arg(format!("--branch={}", TAG)) .arg("--recursive") .arg(REPOSITORY) .arg(&source) }); } let configure_hint_file_pb = source.join(".rust-configured"); let configure_hint_file = Path::new(&configure_hint_file_pb); if !configure_hint_file.exists() { run("bash", |command| command.current_dir(&source) .env("TF_NEED_CUDA", if cfg!(feature = "tensorflow_gpu") {"1"} else {"0"}) .arg("-c") .arg("yes ''|./configure")); File::create(configure_hint_file).unwrap(); } let bazel_args_string = if let Ok(args) = env::var("TF_RUST_BAZEL_OPTS") { args } else { "".to_string() }; run("bazel", |command| { command.current_dir(&source) .arg("build") .arg(format!("--jobs={}", get!("NUM_JOBS"))) .arg("--compilation_mode=opt") .arg("--copt=-march=native") .args(bazel_args_string.split_whitespace()) .arg(TARGET) }); let framework_target_bazel_bin = source.join("bazel-bin").join(framework_target_path); log!("Copying {:?} to {:?}", framework_target_bazel_bin, framework_library_path); fs::copy(framework_target_bazel_bin, framework_library_path).unwrap(); let target_bazel_bin = source.join("bazel-bin").join(target_path); log!("Copying {:?} to {:?}", target_bazel_bin, library_path); fs::copy(target_bazel_bin, library_path).unwrap(); } println!("cargo:rustc-link-lib=dylib={}", FRAMEWORK_LIBRARY); println!("cargo:rustc-link-lib=dylib={}", LIBRARY); println!("cargo:rustc-link-search={}", lib_dir.display()); } fn run<F>(name: &str, mut configure: F) where F: FnMut(&mut Command) -> &mut Command { let mut command = Command::new(name); let configured = configure(&mut command); log!("Executing {:?}", configured); if !ok!(configured.status()).success() { panic!("failed to execute {:?}", configured); } log!("Command {:?} finished successfully", configured); } fn check_bazel() -> Result<(), Box<Error>> { let mut command = Command::new("bazel"); command.arg("version"); log!("Executing {:?}", command); let out = command.output()?; log!("Command {:?} finished successfully", command); let stdout = String::from_utf8(out.stdout)?; let mut found_version = false; for line in stdout.lines() { if line.starts_with("Build label:") { found_version = true; let mut version_str = line.split(":") .nth(1) .unwrap() .split(" ") .nth(1) .unwrap() .trim(); if version_str.ends_with('-') { version_str = &version_str[..version_str.len() - 1]; } let version = Version::parse(version_str)?; let want = Version::parse(MIN_BAZEL)?; if version < want { return Err(format!("Installed version {} is less than required version {}", version_str, MIN_BAZEL) .into()); } } } if !found_version { return Err("Did not find version number in `bazel version` output.".into()); } Ok(()) }
extern crate curl; extern crate flate2; extern crate pkg_config; extern crate semver; extern crate tar; use std::error::Error; use std::fs::File; use std::io::BufWriter; use std::io::Write; use std::path::{Path, PathBuf}; use std::process; use std::process::Command; use std::{env, fs}; use curl::easy::Easy; use flate2::read::GzDecoder; use semver::Version; use tar::Archive; const FRAMEWORK_LIBRARY: &'static str = "tensorflow_framework"; const LIBRARY: &'static str = "tensorflow"; const REPOSITORY: &'static str = "https://github.com/tensorflow/tensorflow.git"; const FRAMEWORK_TARGET: &'static str = "tensorflow:libtensorflow_framework.so"; const TARGET: &'static str = "tensorflow:libtensorflow.so"; const VERSION: &'static str = "1.12.0"; const TAG: &'static str = "v1.12.0"; const MIN_BAZEL: &'static str = "0.5.4"; macro_rules! get(($name:expr) => (ok!(env::var($name)))); macro_rules! ok(($expression:expr) => ($expression.unwrap())); macro_rules! log { ($fmt:expr) => (println!(concat!("libtensorflow-sys/build.rs:{}: ", $fmt), line!())); ($fmt:expr, $($arg:tt)*) => (println!(concat!("libtensorflow-sys/build.rs:{}: ", $fmt), line!(), $($arg)*)); } macro_rules! log_var(($var:ident) => (log!(concat!(stringify!($var), " = {:?}"), $var))); fn main() { if check_windows_lib() { log!("Returning early because {} was already found", LIBRARY); return; } if pkg_config::find_library(LIBRARY).is_ok() { log!("Returning early because {} was already found", LIBRARY); return; } let force_src = match env::var("TF_RUST_BUILD_FROM_SRC") { Ok(s) => s == "true", Err(_) => false, }; if !force_src && env::consts::ARCH == "x86_64" && (env::consts::OS == "linux" || env::consts::OS == "macos") { install_prebuilt(); } else { build_from_src(); } } #[cfg(not(target_env = "msvc"))] fn check_windows_lib() -> bool { false } #[cfg(target_env = "msvc")] fn check_windows_lib() -> bool { let windows_lib: &str = &format!("{}.lib", LIBRARY); if let Ok(path) = env::var("PATH") { for p in path.split(";") { let path = Path::new(p).join(windows_lib); if path.exists() { println!("cargo:rustc-link-lib=dylib={}", LIBRARY); println!("cargo:rustc-link-search=native={}", p); return true } } } false } fn remove_suffix(value: &mut String, suffix: &str) { if value.ends_with(suffix) { let n = value.len(); value.truncate(n - suffix.len()); } } fn extract<P: AsRef<Path>, P2: AsRef<Path>>(archive_path: P, extract_to: P2) { let file = File::open(archive_path).unwrap(); let unzipped = GzDecoder::new(file); let mut a = Archive::new(unzipped); a.unpack(extract_to).unwrap(); } fn install_prebuilt() { let os = match env::consts::OS { "macos" => "darwin", x => x, }; let proc_type = if cfg!(feature = "tensorflow_gpu") {"gpu"} else {"cpu"}; let binary_url = format!( "https://storage.googleapis.com/tensorflow/libtensorflow/libtensorflow-{}-{}-{}-{}.tar.gz", proc_type, os, env::consts::ARCH, VERSION); log_var!(binary_url); let short_file_name = binary_url.split("/").last().unwrap(); let mut base_name = short_file_name.to_string(); remove_suffix(&mut base_name, ".tar.gz"); log_var!(base_name); let download_dir = match env::var("TF_RUST_DOWNLOAD_DIR") { Ok(s) => PathBuf::from(s), Err(_) => PathBuf::from(&get!("CARGO_MANIFEST_DIR")).join("target"), }; if !download_dir.exists() { fs::create_dir(&download_dir).unwrap(); } let file_name = download_dir.join(short_file_name); log_var!(file_name); if !file_name.exists() { let f = File::create(&file_name).unwrap(); let mut writer = BufWriter::new(f); let mut easy = Easy::new(); easy.url(&binary_url).unwrap(); easy.write_function(move |data| { Ok(writer.write(data).unwrap()) }).unwrap(); easy.perform().unwrap(); let response_code = easy.response_code().unwrap(); if response_code != 200 { panic!("Unexpected response code {} for {}", response_code, binary_url); } } let unpacked_dir = download_dir.join(base_name); let lib_dir = unpacked_dir.join("lib"); let framework_library_file = format!("lib{}.so", FRAMEWORK_LIBRARY); let library_file = format!("lib{}.so", LIBRARY); let framework_library_full_path = lib_dir.join(&framework_library_file); let library_full_path = lib_dir.join(&library_file); if !framework_library_full_path.exists() || !library_full_path.exists() { extract(file_name, &unpacked_dir); } println!("cargo:rustc-link-lib=dylib={}", FRAMEWORK_LIBRARY); println!("cargo:rustc-link-lib=dylib={}", LIBRARY); let output = PathBuf::from(&get!("OUT_DIR")); let new_framework_library_full_path = output.join(&framework_library_file); if new_framework_library_full_path.exists() { log!("File {} already exists, deleting.", new_framework_library_full_path.display()); std::fs::remove_file(&new_framework_library_full_path).unwrap(); } let new_library_full_path = output.join(&library_file); if new_library_full_path.exists() { log!("File {} already exists, deleting.", new_library_full_path.display()); std::fs::remove_file(&new_library_full_path).unwrap(); } log!("Copying {} to {}...", library_full_path.display(), new_library_full_path.display()); std::fs::copy(&library_full_path, &new_library_full_path).unwrap(); log!("Copying {} to {}...", framework_library_full_path.display(), new_framework_library_full_path.display()); std::fs::copy(&framework_library_full_path, &new_framework_library_full_path).unwrap(); println!("cargo:rustc-link-search={}", output.display()); } fn build_from_src() { let output = PathBuf::from(&get!("OUT_DIR")); log_var!(output); let source = PathBuf::from(&get!("CARGO_MANIFEST_DIR")).join(format!("target/source-{}", TAG)); log_var!(source); let lib_dir = output.join(format!("lib-{}", TAG)); log_var!(lib_dir); if lib_dir.exists() { log!("Directory {:?} already exists", lib_dir); } else { log!("Creating directory {:?}", lib_dir); fs::create_dir(lib_dir.clone()).unwrap(); } let framework_library_path = lib_dir.join(format!("lib{}.so", FRAMEWORK_LIBRARY)); log_var!(framework_library_path); let library_path = lib_dir.join(format!("lib{}.so", LIBRARY)); log_var!(library_path); if library_path.exists() && framework_library_path.exists() { log!("{:?} and {:?} already exist, not building", library_path, framework_library_path); } else { if let Err(e) = check_bazel() { println!("cargo:error=Bazel must be installed at version {} or greater. (Error: {})", MIN_BAZEL, e); process::exit(1); } let framework_target_path = &FRAMEWORK_TARGET.replace(":", "/"); log_var!(framework_target_path); let target_path = &TARGET.replace(":", "/"); log_var!(target_path); if !Path::new(&source.join(".git")).exists() { run("git", |command| { command.arg("clone") .arg(format!("--branch={}", TAG)) .arg("--recursive") .arg(REPOSITORY) .arg(&source) }); } let configure_hint_file_pb = source.join(".rust-configured"); let configure_hint_file = Path::new(&configure_hint_file_pb); if !configure_hint_file.exists() { run("bash", |command| command.current_dir(&source) .env("TF_NEED_CUDA", if cfg!(feature = "tensorflow_gpu") {"1"} else {"0"}) .arg("-c") .arg("yes ''|./configure")); File::create(configure_hint_file).unwrap(); } let bazel_args_string = if let Ok(args) = env::var("TF_RUST_BAZEL_OPTS") { args } else { "".to_string() }; run("bazel", |command| { command.current_dir(&source) .arg("build") .arg(format!("--jobs={}", get!("NUM_JOBS"))) .arg("--compilation_mode=opt") .arg("--copt=-march=native") .args(bazel_args_string.split_whitespace()) .arg(TARGET) }); let framework_target_bazel_bin = source.join("bazel-bin").join(framework_target_path); log!("Copying {:?} to {:?}", framework_target_bazel_bin, framework_library_path); fs::copy(framework_target_bazel_bin, framework_library_path).unwrap(); let target_bazel_bin = source.join("bazel-bin").join(target_path); log!("Copying {:?} to {:?}", target_bazel_bin, library_path); fs::copy(target_bazel_bin, library_path).unwrap(); } println!("cargo:rustc-link-lib=dylib={}", FRAMEWORK_LIBRARY); println!("cargo:rustc-link-lib=dylib={}", LIBRARY); println!("cargo:rustc-link-search={}", lib_dir.display()); } fn run<F>(name: &str, mut configure: F) where F: FnMut(&mut Command) -> &mut Command { let mut command = Command::new(name); let configured = configure(&mut command); log!("Executing {:?}", configured); if !ok!(configured.status()).success() { panic!("failed to execute {:?}", configured); } log!("Command {:?} finished successfully", configured); } fn check_bazel() -> Result<(), Box<Error>> { let mut command = Command::new("bazel"); command.arg("version"); log!("Executing {:?}", command); let out = command.output()?; log!("Command {:?} finished successfully", command); let stdout = String::from_utf8(out.stdout)?; let mut found_version = false; for line in stdout.lines() { if line.starts_with("Build label:") { found_version = true; let mut version_str = line.split(":") .nth(1) .unwrap() .split(" ") .nth(1) .unwrap() .trim(); if version_str.ends_with('-') { version_str = &version_str[..version_str.len() - 1]; } let version = Version::parse(version_str)?; let want = Version::parse(MIN_BAZEL)?; if vers
ion < want { return Err(format!("Installed version {} is less than required version {}", version_str, MIN_BAZEL) .into()); } } } if !found_version { return Err("Did not find version number in `bazel version` output.".into()); } Ok(()) }
function_block-function_prefixed
[ { "content": "fn log_env_var(log: &mut Write, var: &str) -> Result<(), io::Error> {\n\n match env::var(var) {\n\n Ok(s) => writeln!(log, \"{}={}\", var, s),\n\n Err(env::VarError::NotPresent) => writeln!(log, \"{} is not present\", var),\n\n Err(env::VarError::NotUnicode(_)) => writeln!(...
Rust
src/mock_server.rs
pactflow/pact-protobuf-plugin
1d57edee9e5fe34b76d32ab4f72ea8935eab4bc2
use std::collections::HashMap; use std::future::Future; use std::net::SocketAddr; use std::pin::Pin; use std::sync::Mutex; use std::task::{Context, Poll}; use std::thread; use anyhow::anyhow; use bytes::Bytes; use http::Method; use hyper::{http, Request, Response}; use hyper::server::accept; use lazy_static::lazy_static; use maplit::hashmap; use pact_matching::BodyMatchResult; use pact_models::content_types::ContentType; use pact_models::json_utils::json_to_string; use pact_models::plugins::PluginData; use pact_models::prelude::v4::V4Pact; use pact_models::v4::sync_message::SynchronousMessage; use prost::Message; use prost_types::{FileDescriptorSet, MethodDescriptorProto}; use serde_json::Value; use tokio::net::TcpListener; use tokio::runtime::Handle; use tokio::sync::oneshot::{channel, Sender}; use tonic::body::{BoxBody, empty_body}; use tonic::metadata::MetadataMap; use tower::make::Shared; use tower::ServiceBuilder; use tower_http::compression::CompressionLayer; use tower_http::ServiceBuilderExt; use tower_service::Service; use tracing::{debug, error, Instrument, instrument, trace, trace_span}; use uuid::Uuid; use crate::dynamic_message::PactCodec; use crate::mock_service::MockService; use crate::tcp::TcpIncoming; use crate::utils::{find_message_type_by_name, last_name}; lazy_static! { pub static ref MOCK_SERVER_STATE: Mutex<HashMap<String, (Sender<()>, Vec<(String, BodyMatchResult)>)>> = Mutex::new(hashmap!{}); } #[derive(Debug, Clone)] pub struct GrpcMockServer { pact: V4Pact, plugin_config: PluginData, descriptors: HashMap<String, FileDescriptorSet>, routes: HashMap<String, (FileDescriptorSet, MethodDescriptorProto, SynchronousMessage)>, pub server_key: String } impl GrpcMockServer { pub fn new(pact: V4Pact, plugin_config: &PluginData) -> Self { GrpcMockServer { pact, plugin_config: plugin_config.clone(), descriptors: Default::default(), routes: Default::default(), server_key: Uuid::new_v4().to_string() } } #[instrument] pub async fn start_server(mut self, host_interface: &str, port: u32, tls: bool) -> anyhow::Result<SocketAddr> { for (key, value) in &self.plugin_config.configuration { if let Value::Object(map) = value { if let Some(descriptor) = map.get("protoDescriptors") { let bytes = base64::decode(json_to_string(descriptor))?; let buffer = Bytes::from(bytes); let fds = FileDescriptorSet::decode(buffer)?; self.descriptors.insert(key.clone(), fds); } } } if self.descriptors.is_empty() { return Err(anyhow!("Pact file does not contain any Protobuf descriptors")); } self.routes = self.pact.interactions.iter() .filter_map(|i| i.as_v4_sync_message()) .filter_map(|i| i.plugin_config.get("protobuf").map(|p| (p.clone(), i.clone()))) .filter_map(|(c, i)| { if let Some(key) = c.get("descriptorKey") { if let Some(descriptors) = self.descriptors.get(json_to_string(key).as_str()) { if let Some(service) = c.get("service") { if let Some((service_name, method_name)) = json_to_string(service).split_once('/') { descriptors.file.iter().filter_map(|d| { d.service.iter().find(|s| s.name.clone().unwrap_or_default() == service_name) }).next() .and_then(|d| { d.method.iter() .find(|m| m.name.clone().unwrap_or_default() == method_name) .map(|m| (format!("{service_name}/{method_name}"), (descriptors.clone(), m.clone(), i.clone()))) }) } else { None } } else { None } } else { None } } else { None } }).collect(); let interface = if host_interface.is_empty() { "[::1]" } else { host_interface }; let addr: SocketAddr = format!("{interface}:{port}").parse()?; trace!("setting up mock server {addr}"); let (snd, rcr) = channel::<()>(); { let mut guard = MOCK_SERVER_STATE.lock().unwrap(); guard.insert(self.server_key.clone(), (snd, vec![])); } let listener = TcpListener::bind(addr).await?; let address = listener.local_addr()?; let handle = Handle::current(); let key = self.server_key.clone(); let key2 = self.server_key.clone(); let result = thread::spawn(move || { let incoming_stream = TcpIncoming { inner: listener }; let incoming = accept::from_stream(incoming_stream); trace!("setting up middleware"); let service = ServiceBuilder::new() .trace_for_grpc() .layer(CompressionLayer::new()) .service(self); trace!("setting up HTTP server"); let server = hyper::Server::builder(incoming) .http2_only(true) .serve(Shared::new(service)) .with_graceful_shutdown(async move { let _ = rcr.await; trace!("Received shutdown signal for server {}", key); }) .instrument(tracing::trace_span!("mock server", key = key2.as_str(), port = address.port())); trace!("spawning server onto runtime"); handle.spawn(server); trace!("spawning server onto runtime - done"); }).join(); if result.is_err() { Err(anyhow!("Failed to start mock server thread")) } else { trace!("Mock server setup OK"); Ok(address) } } } impl Service<hyper::Request<hyper::Body>> for GrpcMockServer { type Response = hyper::Response<BoxBody>; type Error = anyhow::Error; type Future = Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>> + Send>>; fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> { Poll::Ready(Ok(())) } #[instrument] fn call(&mut self, req: Request<hyper::Body>) -> Self::Future { let routes = self.routes.clone(); let server_key = self.server_key.clone(); Box::pin(async move { trace!("Got request {req:?}"); let headers = req.headers(); let metadata = MetadataMap::from_headers(headers.clone()); let content_type = if let Some(content_type) = metadata.get("content-type") { ContentType::parse(content_type.to_str().unwrap_or_default()) .map_err(|err| anyhow!(err)) } else { Err(anyhow!("no content type was provided")) }; match content_type { Ok(content_type) => if content_type.base_type().to_string().starts_with("application/grpc") { let method = req.method(); if method == Method::POST { let request_path = req.uri().path(); debug!("gRPC request received {}", request_path); if let Some((service, method)) = request_path[1..].split_once('/') { let service_name = last_name(service); let lookup = format!("{service_name}/{method}"); if let Some((file, method_descriptor, message)) = routes.get(lookup.as_str()) { trace!(message = message.description.as_str(), "Found route for service call"); let input_message_name = method_descriptor.input_type.clone().unwrap_or_default(); let input_message = find_message_type_by_name(last_name(input_message_name.as_str()), file); let output_message_name = method_descriptor.output_type.clone().unwrap_or_default(); let output_message = find_message_type_by_name(last_name(output_message_name.as_str()), file); if let Ok(input_message) = input_message { if let Ok(output_message) = output_message { let codec = PactCodec::new(file, &input_message, &output_message, message); let mock_service = MockService::new(file, service_name, method_descriptor, &input_message, &output_message, message, server_key.as_str()); let mut grpc = tonic::server::Grpc::new(codec); Ok(grpc.unary(mock_service, req).await) } else { error!("Did not find the descriptor for the output message {}", output_message_name); Ok(failed_precondition()) } } else { error!("Did not find the descriptor for the input message {}", input_message_name); Ok(failed_precondition()) } } else { Ok(invalid_path()) } } else { Ok(invalid_path()) } } else { Ok(invalid_method()) } } else { Ok(invalid_media()) } Err(err) => { error!("Failed to parse the content type - {err}"); Ok(invalid_media()) } } }.instrument(trace_span!("mock_server_handler", key = self.server_key.as_str()))) } } fn invalid_media() -> Response<BoxBody> { http::Response::builder() .status(415) .body(empty_body()) .unwrap() } fn invalid_method() -> Response<BoxBody> { http::Response::builder() .status(405) .body(empty_body()) .unwrap() } fn invalid_path() -> Response<BoxBody> { http::Response::builder() .status(200) .header("grpc-status", "12") .header("content-type", "application/grpc") .body(empty_body()) .unwrap() } fn failed_precondition() -> Response<BoxBody> { http::Response::builder() .status(200) .header("grpc-status", "9") .header("content-type", "application/grpc") .body(empty_body()) .unwrap() }
use std::collections::HashMap; use std::future::Future; use std::net::SocketAddr; use std::pin::Pin; use std::sync::Mutex; use std::task::{Context, Poll}; use std::thread; use anyhow::anyhow; use bytes::Bytes; use http::Method; use hyper::{http, Request, Response}; use hyper::server::accept; use lazy_static::lazy_static; use maplit::hashmap; use pact_matching::BodyMatchResult; use pact_models::content_types::ContentType; use pact_models::json_utils::json_to_string; use pact_models::plugins::PluginData; use pact_models::prelude::v4::V4Pact; use pact_models::v4::sync_message::SynchronousMessage; use prost::Message; use prost_types::{FileDescriptorSet, MethodDescriptorProto}; use serde_json::Value; use tokio::net::TcpListener; use tokio::runtime::Handle; use tokio::sync::oneshot::{channel, Sender}; use tonic::body::{BoxBody, empty_body}; use tonic::metadata::MetadataMap; use tower::make::Shared; use tower::ServiceBuilder; use tower_http::compression::CompressionLayer; use tower_http::ServiceBuilderExt; use tower_service::Service; use tracing::{debug, error, Instrument, instrument, trace, trace_span}; use uuid::Uuid; use crate::dynamic_message::PactCodec; use crate::mock_service::MockService; use crate::tcp::TcpIncoming; use crate::utils::{find_message_type_by_name, last_name}; lazy_static! { pub static ref MOCK_SERVER_STATE: Mutex<HashMap<String, (Sender<()>, Vec<(String, BodyMatchResult)>)>> = Mutex::new(hashmap!{}); } #[derive(Debug, Clone)] pub struct GrpcMockServer { pact: V4Pact, plugin_config: PluginData, descriptors: HashMap<String, FileDescriptorSet>, routes: HashMap<String, (FileDescriptorSet, MethodDescriptorProto, SynchronousMessage)>, pub server_key: String } impl GrpcMockServer { pub f
} #[instrument] pub async fn start_server(mut self, host_interface: &str, port: u32, tls: bool) -> anyhow::Result<SocketAddr> { for (key, value) in &self.plugin_config.configuration { if let Value::Object(map) = value { if let Some(descriptor) = map.get("protoDescriptors") { let bytes = base64::decode(json_to_string(descriptor))?; let buffer = Bytes::from(bytes); let fds = FileDescriptorSet::decode(buffer)?; self.descriptors.insert(key.clone(), fds); } } } if self.descriptors.is_empty() { return Err(anyhow!("Pact file does not contain any Protobuf descriptors")); } self.routes = self.pact.interactions.iter() .filter_map(|i| i.as_v4_sync_message()) .filter_map(|i| i.plugin_config.get("protobuf").map(|p| (p.clone(), i.clone()))) .filter_map(|(c, i)| { if let Some(key) = c.get("descriptorKey") { if let Some(descriptors) = self.descriptors.get(json_to_string(key).as_str()) { if let Some(service) = c.get("service") { if let Some((service_name, method_name)) = json_to_string(service).split_once('/') { descriptors.file.iter().filter_map(|d| { d.service.iter().find(|s| s.name.clone().unwrap_or_default() == service_name) }).next() .and_then(|d| { d.method.iter() .find(|m| m.name.clone().unwrap_or_default() == method_name) .map(|m| (format!("{service_name}/{method_name}"), (descriptors.clone(), m.clone(), i.clone()))) }) } else { None } } else { None } } else { None } } else { None } }).collect(); let interface = if host_interface.is_empty() { "[::1]" } else { host_interface }; let addr: SocketAddr = format!("{interface}:{port}").parse()?; trace!("setting up mock server {addr}"); let (snd, rcr) = channel::<()>(); { let mut guard = MOCK_SERVER_STATE.lock().unwrap(); guard.insert(self.server_key.clone(), (snd, vec![])); } let listener = TcpListener::bind(addr).await?; let address = listener.local_addr()?; let handle = Handle::current(); let key = self.server_key.clone(); let key2 = self.server_key.clone(); let result = thread::spawn(move || { let incoming_stream = TcpIncoming { inner: listener }; let incoming = accept::from_stream(incoming_stream); trace!("setting up middleware"); let service = ServiceBuilder::new() .trace_for_grpc() .layer(CompressionLayer::new()) .service(self); trace!("setting up HTTP server"); let server = hyper::Server::builder(incoming) .http2_only(true) .serve(Shared::new(service)) .with_graceful_shutdown(async move { let _ = rcr.await; trace!("Received shutdown signal for server {}", key); }) .instrument(tracing::trace_span!("mock server", key = key2.as_str(), port = address.port())); trace!("spawning server onto runtime"); handle.spawn(server); trace!("spawning server onto runtime - done"); }).join(); if result.is_err() { Err(anyhow!("Failed to start mock server thread")) } else { trace!("Mock server setup OK"); Ok(address) } } } impl Service<hyper::Request<hyper::Body>> for GrpcMockServer { type Response = hyper::Response<BoxBody>; type Error = anyhow::Error; type Future = Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>> + Send>>; fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> { Poll::Ready(Ok(())) } #[instrument] fn call(&mut self, req: Request<hyper::Body>) -> Self::Future { let routes = self.routes.clone(); let server_key = self.server_key.clone(); Box::pin(async move { trace!("Got request {req:?}"); let headers = req.headers(); let metadata = MetadataMap::from_headers(headers.clone()); let content_type = if let Some(content_type) = metadata.get("content-type") { ContentType::parse(content_type.to_str().unwrap_or_default()) .map_err(|err| anyhow!(err)) } else { Err(anyhow!("no content type was provided")) }; match content_type { Ok(content_type) => if content_type.base_type().to_string().starts_with("application/grpc") { let method = req.method(); if method == Method::POST { let request_path = req.uri().path(); debug!("gRPC request received {}", request_path); if let Some((service, method)) = request_path[1..].split_once('/') { let service_name = last_name(service); let lookup = format!("{service_name}/{method}"); if let Some((file, method_descriptor, message)) = routes.get(lookup.as_str()) { trace!(message = message.description.as_str(), "Found route for service call"); let input_message_name = method_descriptor.input_type.clone().unwrap_or_default(); let input_message = find_message_type_by_name(last_name(input_message_name.as_str()), file); let output_message_name = method_descriptor.output_type.clone().unwrap_or_default(); let output_message = find_message_type_by_name(last_name(output_message_name.as_str()), file); if let Ok(input_message) = input_message { if let Ok(output_message) = output_message { let codec = PactCodec::new(file, &input_message, &output_message, message); let mock_service = MockService::new(file, service_name, method_descriptor, &input_message, &output_message, message, server_key.as_str()); let mut grpc = tonic::server::Grpc::new(codec); Ok(grpc.unary(mock_service, req).await) } else { error!("Did not find the descriptor for the output message {}", output_message_name); Ok(failed_precondition()) } } else { error!("Did not find the descriptor for the input message {}", input_message_name); Ok(failed_precondition()) } } else { Ok(invalid_path()) } } else { Ok(invalid_path()) } } else { Ok(invalid_method()) } } else { Ok(invalid_media()) } Err(err) => { error!("Failed to parse the content type - {err}"); Ok(invalid_media()) } } }.instrument(trace_span!("mock_server_handler", key = self.server_key.as_str()))) } } fn invalid_media() -> Response<BoxBody> { http::Response::builder() .status(415) .body(empty_body()) .unwrap() } fn invalid_method() -> Response<BoxBody> { http::Response::builder() .status(405) .body(empty_body()) .unwrap() } fn invalid_path() -> Response<BoxBody> { http::Response::builder() .status(200) .header("grpc-status", "12") .header("content-type", "application/grpc") .body(empty_body()) .unwrap() } fn failed_precondition() -> Response<BoxBody> { http::Response::builder() .status(200) .header("grpc-status", "9") .header("content-type", "application/grpc") .body(empty_body()) .unwrap() }
n new(pact: V4Pact, plugin_config: &PluginData) -> Self { GrpcMockServer { pact, plugin_config: plugin_config.clone(), descriptors: Default::default(), routes: Default::default(), server_key: Uuid::new_v4().to_string() }
function_block-random_span
[ { "content": "/// Get the name of the enum value\n\npub fn enum_name(enum_value: i32, descriptor: &EnumDescriptorProto) -> String {\n\n descriptor.value.iter().find(|v| v.number.unwrap_or(-1) == enum_value)\n\n .map(|v| v.name.clone().unwrap_or_else(|| format!(\"enum {}\", enum_value)))\n\n .unwrap_or_el...
Rust
src/infrastructure/arranging/sequencing.rs
SpeedyNinja/laminar
bf65f8ad2a8d168a265d0bc302b232912ce0bca4
use super::{Arranging, ArrangingSystem}; use crate::packet::SequenceNumber; use std::{collections::HashMap, marker::PhantomData}; pub struct SequencingSystem<T> { streams: HashMap<u8, SequencingStream<T>>, } impl<T> SequencingSystem<T> { pub fn new() -> SequencingSystem<T> { SequencingSystem { streams: HashMap::with_capacity(32), } } } impl<T> ArrangingSystem for SequencingSystem<T> { type Stream = SequencingStream<T>; fn stream_count(&self) -> usize { self.streams.len() } fn get_or_create_stream(&mut self, stream_id: u8) -> &mut Self::Stream { self.streams .entry(stream_id) .or_insert_with(|| SequencingStream::new(stream_id)) } } pub struct SequencingStream<T> { _stream_id: u8, top_index: usize, phantom: PhantomData<T>, unique_item_identifier: u16, } impl<T> SequencingStream<T> { pub fn new(stream_id: u8) -> SequencingStream<T> { SequencingStream { _stream_id: stream_id, top_index: 0, phantom: PhantomData, unique_item_identifier: 0, } } #[cfg(test)] pub fn stream_id(&self) -> u8 { self._stream_id } pub fn new_item_identifier(&mut self) -> SequenceNumber { self.unique_item_identifier = self.unique_item_identifier.wrapping_add(1); self.unique_item_identifier } } impl<T> Arranging for SequencingStream<T> { type ArrangingItem = T; fn arrange( &mut self, incoming_index: usize, item: Self::ArrangingItem, ) -> Option<Self::ArrangingItem> { if incoming_index > self.top_index { self.top_index = incoming_index; return Some(item); } None } } #[cfg(test)] mod tests { use super::{Arranging, ArrangingSystem, SequencingSystem}; #[derive(Debug, PartialEq, Clone)] struct Packet { pub sequence: usize, pub ordering_stream: u8, } impl Packet { fn new(sequence: usize, ordering_stream: u8) -> Packet { Packet { sequence, ordering_stream, } } } #[test] fn create_stream() { let mut system: SequencingSystem<Packet> = SequencingSystem::new(); let stream = system.get_or_create_stream(1); assert_eq!(stream.stream_id(), 1); } #[test] fn create_existing_stream() { let mut system: SequencingSystem<Packet> = SequencingSystem::new(); system.get_or_create_stream(1); let stream = system.get_or_create_stream(1); assert_eq!(stream.stream_id(), 1); } macro_rules! assert_sequence { ( [$( $x:expr ),*], [$( $y:expr),*], $stream_id:expr) => { { let mut before: Vec<usize> = Vec::new(); $( before.push($x); )* let mut after: Vec<usize> = Vec::new(); $( after.push($y); )* let mut packets = Vec::new(); for (_, v) in before.iter().enumerate() { packets.push(Packet::new(*v, $stream_id)); } let mut sequence_system = SequencingSystem::<Packet>::new(); let stream = sequence_system.get_or_create_stream(1); let mut sequenced_packets = Vec::new(); for packet in packets.into_iter() { match stream.arrange(packet.sequence, packet.clone()) { Some(packet) => { sequenced_packets.push(packet.sequence);}, None => {} }; } assert_eq!(after, sequenced_packets); } }; } #[test] fn can_sequence() { assert_sequence!([1, 3, 5, 4, 2], [1, 3, 5], 1); assert_sequence!([1, 5, 4, 3, 2], [1, 5], 1); assert_sequence!([5, 3, 4, 2, 1], [5], 1); assert_sequence!([4, 3, 2, 1, 5], [4, 5], 1); assert_sequence!([2, 1, 4, 3, 5], [2, 4, 5], 1); assert_sequence!([5, 2, 1, 4, 3], [5], 1); assert_sequence!([3, 2, 4, 1, 5], [3, 4, 5], 1); } #[test] fn sequence_on_multiple_streams() { assert_sequence!([1, 3, 5, 4, 2], [1, 3, 5], 1); assert_sequence!([1, 5, 4, 3, 2], [1, 5], 2); assert_sequence!([5, 3, 4, 2, 1], [5], 3); assert_sequence!([4, 3, 2, 1, 5], [4, 5], 4); assert_sequence!([2, 1, 4, 3, 5], [2, 4, 5], 5); assert_sequence!([5, 2, 1, 4, 3], [5], 6); assert_sequence!([3, 2, 4, 1, 5], [3, 4, 5], 7); } }
use super::{Arranging, ArrangingSystem}; use crate::packet::SequenceNumber; use std::{collections::HashMap, marker::PhantomData}; pub struct SequencingSystem<T> { streams: HashMap<u8, SequencingStream<T>>, } impl<T> SequencingSystem<T> { pub fn new() -> SequencingSystem<T> { SequencingSystem { streams: HashMap::with_capacity(32), } } } impl<T> ArrangingSystem for SequencingSystem<T> { type Stream = SequencingStream<T>; fn stream_count(&self) -> usize { self.streams.len() } fn get_or_create_stream(&mut self, stream_id: u8) -> &mut Self::Stream { self.streams .entry(stream_id) .or_insert_with(|| SequencingStream::new(stream_id)) } } pub struct SequencingStream<T> { _stream_id: u8, top_index: usize, phantom: PhantomData<T>, unique_item_identifier: u16, } impl<T> SequencingStream<T> { pub fn new(stream_id: u8) -> SequencingStream<T> { SequencingStream { _stream_id: stream_id, top_index: 0, phantom: PhantomData, unique_item_identifier: 0, } } #[cfg(test)] pub fn stream_id(&self) -> u8 { self._stream_id } pub fn new_item_identifier(&mut self) -> SequenceNumber { self.unique_item_identifier = self.unique_item_identifier.wrapping_add(1); self.unique_item_identifier } } impl<T> Arranging for SequencingStream<T> { type ArrangingItem = T; fn arrange( &mut self, incoming_index: usize, item: Self::ArrangingItem, ) -> Option<Self::ArrangingItem> { if incoming_index > self.top_index { self.top_index = incoming_index; return Some(item); } None } } #[cfg(test)] mod tests { use super::{Arranging, ArrangingSystem, SequencingSystem}; #[derive(Debug, PartialEq, Clone)] struct Packet { pub sequence: usize, pub ordering_stream: u8, } impl Packet { fn new(sequence: usize, ordering_stream: u8) -> Packet { Packet { sequence, ordering_stream, } } } #[test]
#[test] fn create_existing_stream() { let mut system: SequencingSystem<Packet> = SequencingSystem::new(); system.get_or_create_stream(1); let stream = system.get_or_create_stream(1); assert_eq!(stream.stream_id(), 1); } macro_rules! assert_sequence { ( [$( $x:expr ),*], [$( $y:expr),*], $stream_id:expr) => { { let mut before: Vec<usize> = Vec::new(); $( before.push($x); )* let mut after: Vec<usize> = Vec::new(); $( after.push($y); )* let mut packets = Vec::new(); for (_, v) in before.iter().enumerate() { packets.push(Packet::new(*v, $stream_id)); } let mut sequence_system = SequencingSystem::<Packet>::new(); let stream = sequence_system.get_or_create_stream(1); let mut sequenced_packets = Vec::new(); for packet in packets.into_iter() { match stream.arrange(packet.sequence, packet.clone()) { Some(packet) => { sequenced_packets.push(packet.sequence);}, None => {} }; } assert_eq!(after, sequenced_packets); } }; } #[test] fn can_sequence() { assert_sequence!([1, 3, 5, 4, 2], [1, 3, 5], 1); assert_sequence!([1, 5, 4, 3, 2], [1, 5], 1); assert_sequence!([5, 3, 4, 2, 1], [5], 1); assert_sequence!([4, 3, 2, 1, 5], [4, 5], 1); assert_sequence!([2, 1, 4, 3, 5], [2, 4, 5], 1); assert_sequence!([5, 2, 1, 4, 3], [5], 1); assert_sequence!([3, 2, 4, 1, 5], [3, 4, 5], 1); } #[test] fn sequence_on_multiple_streams() { assert_sequence!([1, 3, 5, 4, 2], [1, 3, 5], 1); assert_sequence!([1, 5, 4, 3, 2], [1, 5], 2); assert_sequence!([5, 3, 4, 2, 1], [5], 3); assert_sequence!([4, 3, 2, 1, 5], [4, 5], 4); assert_sequence!([2, 1, 4, 3, 5], [2, 4, 5], 5); assert_sequence!([5, 2, 1, 4, 3], [5], 6); assert_sequence!([3, 2, 4, 1, 5], [3, 4, 5], 7); } }
fn create_stream() { let mut system: SequencingSystem<Packet> = SequencingSystem::new(); let stream = system.get_or_create_stream(1); assert_eq!(stream.stream_id(), 1); }
function_block-function_prefix_line
[ { "content": "pub fn payload() -> Vec<u8> {\n\n vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]\n\n}\n", "file_path": "tests/unreliable_packets_test.rs", "rank": 0, "score": 186202.72506258375 }, { "content": "pub fn payload() -> Vec<u8> {\n\n vec![0; 4000]\n\n}\n", "file_path": "tests/fragment...
Rust
lib/src/kvstore.rs
crashlabs-io/moiradb
a94642077d4deea192558b7112f291db82b8f8d6
use super::*; use rocksdb::{WriteBatch, DB}; use std::fmt::Debug; use tokio::sync::mpsc; use tokio::sync::oneshot; use tokio::task::JoinHandle; type DBValue<V> = Arc<Option<V>>; #[derive(Debug)] pub enum KVCommand<K, V> where K: Send, V: Send, { Read(K, oneshot::Sender<DBValue<V>>), Write(Vec<(K, DBValue<V>)>), } #[derive(Clone)] pub struct KVAdapter<K, V> where K: Send, V: Send, { pub sender: mpsc::UnboundedSender<KVCommand<K, V>>, } impl<K, V> KVAdapter<K, V> where K: Debug + Send, V: Debug + Send, { pub async fn read(&mut self, key: K) -> DBValue<V> { let (send, response) = oneshot::channel(); if let Err(e) = self.sender.send(KVCommand::Read(key, send)) { panic!(format!("SendError: {:?}", e)); } response.await.unwrap() } pub fn read_to_fut(&mut self, key: K, fut: oneshot::Sender<DBValue<V>>) { if let Err(e) = self.sender.send(KVCommand::Read(key, fut)) { panic!(format!("SendError: {:?}", e)); } } pub fn write(&mut self, write_set: Vec<(K, DBValue<V>)>) { if let Err(e) = self.sender.send(KVCommand::Write(write_set)) { panic!(format!("SendError: {:?}", e)); } } } /* pub struct KVBackend<K,V> { pub kvstore : DB, pub cache : HashMap<K,V>, pub receiver : mpsc::Receiver<KVCommand<K,V>>, } */ pub fn init<K, V>(db: DB) -> (KVAdapter<K, V>, JoinHandle<()>) where K: 'static + Send + Serialize + Debug, V: 'static + Send + Sync + Serialize + DeserializeOwned + Debug, { let (tx, mut rx) = mpsc::unbounded_channel(); let handle = tokio::spawn(async move { while let Some(command) = rx.recv().await { match command { KVCommand::Read(key, response) => { let key_bytes = bincode::serialize(&key).unwrap(); let value = db.get_pinned(key_bytes).unwrap(); if let Some(value_bytes) = value { let parsed_value: V = bincode::deserialize(&value_bytes[..]).unwrap(); match response.send(Arc::new(Some(parsed_value))) { Err(e) => println!( "Error {:?} sending parsed response value for key: {:?}", e, key ), _ => (), }; } else { match response.send(Arc::new(None)) { Err(e) => { println!("Error {:?} sending empty response for key: {:?}", e, key) } _ => (), }; } } KVCommand::Write(mut write_set) => { let mut batch = WriteBatch::default(); while let Some((key, value)) = write_set.pop() { let key_bytes = bincode::serialize(&key).unwrap(); match &*value { None => batch.delete(key_bytes), Some(value) => { let value_bytes = bincode::serialize(&value).unwrap(); batch.put(key_bytes, value_bytes); } } } match db.write(batch) { Ok(_) => {} Err(e) => println!("Error writing to db: {:?}", e), }; } } } match db.flush() { Ok(_) => {} Err(e) => println!("Error flushing database: {:?}", e), }; }); let adapter: KVAdapter<K, V> = KVAdapter { sender: tx }; (adapter, handle) } #[cfg(test)] mod tests { use rocksdb::Options; use super::*; #[tokio::test(flavor = "multi_thread")] async fn kvstore_read_and_write() { let path = "/tmp/test_kvstore_read_and_write.rocksdb"; let _ = DB::destroy(&Options::default(), path); let store = DB::open_default(path).expect("database barfed on open"); let (mut kv_adapter, _) = init::<String, String>(store); let v = kv_adapter.read("A".to_string()).await; assert_eq!(None, *v); kv_adapter.write(vec![("A".to_string(), Arc::new(Some("VA".to_string())))]); let v = kv_adapter.read("A".to_string()).await; assert_eq!(Some("VA".to_string()), *v); kv_adapter.write(vec![("A".to_string(), Arc::new(None))]); let v = kv_adapter.read("A".to_string()).await; assert_eq!(None, *v); } }
use super::*; use rocksdb::{WriteBatch, DB}; use std::fmt::Debug; use tokio::sync::mpsc; use tokio::sync::oneshot; use tokio::task::JoinHandle; type DBValue<V> = Arc<Option<V>>; #[derive(Debug)] pub enum KVCommand<K, V> where K: Send, V: Send, { Read(K, oneshot::Sender<DBValue<V>>), Write(Vec<(K, DBValue<V>)>), } #[derive(Clone)] pub struct KVAdapter<K, V> where K: Send, V: Send, { pub sender: mpsc::UnboundedSender<KVCommand<K, V>>, } impl<K, V> KVAdapter<K, V> where K: Debug + Send, V: Debug + Send, { pub async fn read(&mut self, key: K) -> DBValue<V> { let (send, response) = oneshot::channel(); if let Err(e) = self.sender.send(KVCommand::Read(key, send)) { panic!(format!("SendError: {:?}", e)); } response.await.unwrap() } pub fn read_to_fut(&mut self, key: K, fut: oneshot::Sender<DBValue<V>>) { if let Err(e) = self.sender.send(KVCommand::Read(key, fut)) { panic!(format!("SendError: {:?}", e)); } } pub fn write(&mut self, write_set: Vec<(K, DBValue<V>)>) { if let Err(e) = self.sender.send(KVCommand::Write(write_set)) { panic!(format!("SendError: {:?}", e)); } } } /* pub struct KVBackend<K,V> { pub kvstore : DB, pub cache : HashMap<K,V>, pub receiver : mpsc::Receiver<KVCommand<K,V>>, } */ pub fn init<K, V>(db: DB) -> (KVAdapter<K, V>, JoinHandle<()>) where K: 'static + Send + Serialize + Debug, V: 'static + Send + Sync + Serialize + DeserializeOwned + Debug, { let (tx, mut rx) = mpsc::unbounded_channel(); let handle = tokio::spawn(async move { while let Some(command) = rx.recv().await { match command { KVCommand::Read(key, response) => { let key_bytes = bincode::serialize(&key).unwrap(); let value = db.get_pinned(key_bytes).unwrap(); if let Some(value_bytes) = value { let parsed_value: V = bincode::deserialize(&value_bytes[..]).unwrap(); match response.send(Arc::new(Some(parsed_value))) { Err(e) => println!( "Error {:?} sending parsed response value for key: {:?}", e, key ), _ => (), }; } else { match response.send(Arc::new(None)) { Err(e) => { println!("Error {:?} sending empty response for key: {:?}", e, key) } _ => (), }; } } KVCommand::Write(mut write_set) => { let mut batch = WriteBatch::default(); while let Some((key, value)) = write_set.pop() { let key_bytes = bincode::serialize(&key).unwrap(); match &*value { None => batch.delete(key_bytes), Some(value) => { let value_bytes = bincode::serialize(&value).unwrap(); batch.put(key_bytes, value_bytes); } } } match db.write(batch) { Ok(_) => {} Err(e) => println!("Error writing to db: {:?}", e), }; } } } match db.flush() { Ok(_) => {} Err(e) => println!("Error flushing database: {:?}", e), }; }); let adapter: KVAdapter<K, V> = KVAdapter { sender: tx }; (adapter, handle) } #[cfg(test)] mod tests { use rocksdb::Options; use super::*; #[tokio::test(flavor = "multi_thread")] async fn kvstore_read_and_write() { let path = "/tmp/test_kvstore_read_and_write.rocksdb"; let _ = DB::destroy(&Options::default(), path); let store = DB::open_default(path).expect("database barfed on open"); let (mut kv_adapter, _) = init::<String, String>(store); let v = kv_adapter.read("A".to_string()).await; assert_eq!(None, *v);
}
kv_adapter.write(vec![("A".to_string(), Arc::new(Some("VA".to_string())))]); let v = kv_adapter.read("A".to_string()).await; assert_eq!(Some("VA".to_string()), *v); kv_adapter.write(vec![("A".to_string(), Arc::new(None))]); let v = kv_adapter.read("A".to_string()).await; assert_eq!(None, *v); }
function_block-function_prefix_line
[ { "content": "pub trait MergeCommand<K, V>: Command<K, V>\n\nwhere\n\n K: Debug,\n\n V: Debug,\n\n Self: Debug,\n\n{\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone, Copy)]\n\npub enum ExecState {\n\n Abort,\n\n Commit,\n\n Merge,\n\n NoWrite,\n\n Pending,\n\n Reschedule,\n\n}\n\n\n\n#[d...
Rust
common/functions/src/scalars/udfs/in_basic.rs
pymongo/databend
c349ea00478df90c6b9c6438bec9b3643b4072e4
use std::collections::HashSet; use std::fmt; use common_datavalues::columns::DataColumn; use common_datavalues::prelude::DataColumnsWithField; use common_datavalues::prelude::MutableArrayBuilder; use common_datavalues::prelude::MutableBooleanArrayBuilder; use common_datavalues::types::merge_types; use common_datavalues::DataType; use common_datavalues::DataTypeAndNullable; use common_datavalues::DataValue; use common_exception::ErrorCode; use common_exception::Result; use crate::scalars::function_factory::FunctionDescription; use crate::scalars::function_factory::FunctionFeatures; use crate::scalars::Function; #[derive(Clone)] pub struct InFunction<const NEGATED: bool>; impl<const NEGATED: bool> InFunction<NEGATED> { pub fn try_create(_display_name: &str) -> Result<Box<dyn Function>> { Ok(Box::new(InFunction::<NEGATED> {})) } pub fn desc() -> FunctionDescription { FunctionDescription::creator(Box::new(Self::try_create)).features( FunctionFeatures::default() .bool_function() .variadic_arguments(2, usize::MAX), ) } } macro_rules! basic_contains { ($INPUT_DT: expr, $INPUT_ARRAY: expr, $CHECK_ARRAY: expr, $NEGATED: expr, $BUILDER: expr, $CAST_TYPE: ident, bool) => { let mut vals_set = HashSet::new(); for array in $CHECK_ARRAY { let array = array.column().cast_with_type($INPUT_DT)?; match array { DataColumn::Constant(DataValue::$CAST_TYPE(Some(val)), _) => { vals_set.insert(val); } DataColumn::Constant(DataValue::$CAST_TYPE(None), _) => { continue; } _ => { return Err(ErrorCode::LogicalError("it's a bug")); } } } let arr = $INPUT_ARRAY.bool()?; for val in arr.into_no_null_iter() { let contains = vals_set.contains(&val); $BUILDER.push((contains && !NEGATED) || (!contains && NEGATED)); } }; ($INPUT_DT: expr, $INPUT_ARRAY: expr, $CHECK_ARRAY: expr, $NEGATED: expr, $BUILDER: expr, $CAST_TYPE: ident, $PRIMITIVE_TYPE: ident) => { let mut vals_set = HashSet::new(); for array in $CHECK_ARRAY { let array = array.column().cast_with_type($INPUT_DT)?; match array { DataColumn::Constant(DataValue::$CAST_TYPE(Some(val)), _) => { vals_set.insert(val); } DataColumn::Constant(DataValue::$CAST_TYPE(None), _) => { continue; } _ => { return Err(ErrorCode::LogicalError("it's a bug")); } } } let arr = $INPUT_ARRAY.$PRIMITIVE_TYPE()?; for val in arr.into_no_null_iter() { let contains = vals_set.contains(val); $BUILDER.push((contains && !NEGATED) || (!contains && NEGATED)); } }; } macro_rules! float_contains { ($INPUT_DT: expr, $INPUT_ARRAY: expr, $CHECK_ARRAY: expr, $NEGATED: expr, $BUILDER: expr, $CAST_TYPE: ident, $PRIMITIVE_TYPE: ident) => { let mut vals_set = Vec::new(); for array in $CHECK_ARRAY { let array = array.column().cast_with_type($INPUT_DT)?; match array { DataColumn::Constant(DataValue::$CAST_TYPE(Some(val)), _) => { vals_set.push(val); } DataColumn::Constant(DataValue::$CAST_TYPE(None), _) => { continue; } _ => { return Err(ErrorCode::LogicalError("it's a bug")); } } } let arr = $INPUT_ARRAY.$PRIMITIVE_TYPE()?; for val in arr.into_no_null_iter() { let contains = vals_set.contains(val); $BUILDER.push((contains && !NEGATED) || (!contains && NEGATED)); } }; } impl<const NEGATED: bool> Function for InFunction<NEGATED> { fn name(&self) -> &str { "InFunction" } fn return_type(&self, args: &[DataTypeAndNullable]) -> Result<DataTypeAndNullable> { let input_dt = args[0].data_type(); if input_dt == &DataType::Null { return Ok(DataTypeAndNullable::create(input_dt, false)); } let dt = DataType::Boolean; Ok(DataTypeAndNullable::create(&dt, false)) } fn eval(&self, columns: &DataColumnsWithField, _input_rows: usize) -> Result<DataColumn> { let input_column = columns[0].column(); let input_array = match input_column { DataColumn::Array(array) => array.to_owned(), DataColumn::Constant(scalar, _) => scalar.to_array()?, }; let input_dt = input_array.data_type(); if input_dt == &DataType::Null { let mut array = MutableBooleanArrayBuilder::<false>::with_capacity(input_array.len()); for _ in 0..input_array.len() { array.push_null(); } return Ok(DataColumn::Array(array.as_series())); } let mut builder = MutableBooleanArrayBuilder::<false>::with_capacity(input_column.len()); let check_arrays = &columns[1..]; let mut least_super_dt = input_dt.clone(); for array in check_arrays { least_super_dt = merge_types(&least_super_dt, array.data_type())?; } let input_array = input_array.cast_with_type(&least_super_dt)?; match least_super_dt { DataType::Boolean => { basic_contains!( &least_super_dt, input_array, check_arrays, NEGATED, builder, Boolean, bool ); } DataType::UInt8 => { basic_contains!( &least_super_dt, input_array, check_arrays, NEGATED, builder, UInt8, u8 ); } DataType::UInt16 => { basic_contains!( &least_super_dt, input_array, check_arrays, NEGATED, builder, UInt16, u16 ); } DataType::UInt32 => { basic_contains!( &least_super_dt, input_array, check_arrays, NEGATED, builder, UInt32, u32 ); } DataType::UInt64 => { basic_contains!( &least_super_dt, input_array, check_arrays, NEGATED, builder, UInt64, u64 ); } DataType::Int8 => { basic_contains!( &least_super_dt, input_array, check_arrays, NEGATED, builder, Int8, i8 ); } DataType::Int16 => { basic_contains!( &least_super_dt, input_array, check_arrays, NEGATED, builder, Int16, i16 ); } DataType::Int32 => { basic_contains!( &least_super_dt, input_array, check_arrays, NEGATED, builder, Int32, i32 ); } DataType::Int64 => { basic_contains!( &least_super_dt, input_array, check_arrays, NEGATED, builder, Int64, i64 ); } DataType::Float32 => { float_contains!( &least_super_dt, input_array, check_arrays, NEGATED, builder, Float32, f32 ); } DataType::Float64 => { float_contains!( &least_super_dt, input_array, check_arrays, NEGATED, builder, Float64, f64 ); } DataType::String => { basic_contains!( &least_super_dt, input_array, check_arrays, NEGATED, builder, String, string ); } DataType::Struct(_) => {} _ => { unimplemented!() } } Ok(DataColumn::Array(builder.as_series())) } } impl<const NEGATED: bool> fmt::Display for InFunction<NEGATED> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if NEGATED { write!(f, "NOT IN") } else { write!(f, "IN") } } }
use std::collections::HashSet; use std::fmt; use common_datavalues::columns::DataColumn; use common_datavalues::prelude::DataColumnsWithField; use common_datavalues::prelude::MutableArrayBuilder; use common_datavalues::prelude::MutableBooleanArrayBuilder; use common_datavalues::types::merge_types; use common_datavalues::DataType; use common_datavalues::DataTypeAndNullable; use common_datavalues::DataValue; use common_exception::ErrorCode; use common_exception::Result; use crate::scalars::function_factory::FunctionDescription; use crate::scalars::function_factory::FunctionFeatures; use crate::scalars::Function; #[derive(Clone)] pub struct InFunction<const NEGATED: bool>; impl<const NEGATED: bool> InFunction<NEGATED> { pub fn try_create(_display_name: &str) -> Result<Box<dyn Function>> { Ok(Box::new(InFunction::<NEGATED> {})) } pub fn desc() -> FunctionDescription { FunctionDescription::creator(Box::new(Self::try_create)).features( FunctionFeatures::default() .bool_function() .variadic_arguments(2, usize::MAX), ) } } macro_rules! basic_contains { ($INPUT_DT: expr, $INPUT_ARRAY: expr, $CHECK_ARRAY: expr, $NEGATED: expr, $BUILDER: expr, $CAST_TYPE: ident, bool) => { let mut vals_set = HashSet::new(); for array in $CHECK_ARRAY { let array = array.column().cast_with_type($INPUT_DT)?; match array { DataColumn::Constant(DataValue::$CAST_TYPE(Some(val)), _) => { vals_set.insert(val); } DataColumn::Constant(DataValue::$CAST_TYPE(None), _) => { continue; } _ => { return Err(ErrorCode::LogicalError("it's a bug")); } } } let arr = $INPUT_ARRAY.bool()?; for val in arr.into_no_null_iter() { let contains = vals_set.contains(&val); $BUILDER.push((contains && !NEGATED) || (!contains && NEGATED)); } }; ($INPUT_DT: expr, $INPUT_ARRAY: expr, $CHECK_ARRAY: expr, $NEGATED: expr, $BUILDER: expr, $CAST_TYPE: ident, $PRIMITIVE_TYPE: ident) => { let mut vals_set = HashSet::new(); for array in $CHECK_ARRAY { let array = array.column().cast_with_type($INPUT_DT)?; match array { DataColumn::Constant(DataValue::$CAST_TYPE(Some(val)), _) => { vals_set.insert(val); } DataColumn::Constant(DataValue::$CAST_TYPE(None), _) => { continue; } _ => { return Err(ErrorCode::LogicalError("it's a bug")); } } } let arr = $INPUT_ARRAY.$PRIMITIVE_TYPE()?; for val in arr.into_no_null_iter() { let contains = vals_set.contains(val); $BUILDER.push((contains && !NEGATED) || (!contains && NEGATED)); } }; } macro_rules! float_contains { ($INPUT_DT: expr, $INPUT_ARRAY: expr, $CHECK_ARRAY: expr, $NEGATED: expr, $BUILDER: expr, $CAST_TYPE: ident, $PRIMITIVE_TYPE: ident) => { let mut vals_set = Vec::new(); for array in $CHECK_ARRAY { let array = array.column().cast_with_type($INPUT_DT)?; match array { DataColumn::Constant(DataValue::$CAST_TYPE(Some(val)), _) => { vals_set.push(val); } DataColumn::Constant(DataValue::$CAST_TYPE(None), _) => { continue; } _ => { return Err(ErrorCode::LogicalError("it's a bug")); } } } let arr = $INPUT_ARRAY.$PRIMITIVE_TYPE()?; for val in arr.into_no_null_iter() { let contains = vals_set.contains(val); $BUILDER.push((contains && !NEGATED) || (!contains && NEGATED)); } }; } impl<const NEGATED: bool> Function for InFunction<NEGATED> { fn name(&self) -> &str { "InFunction" } fn return_type(&self, args: &[DataTypeAndNullable]) -> Result<DataTypeAndNullable> { let input_dt = args[0].data_type(); if input_dt == &DataType::Null { return Ok(DataTypeAndNullable::create(input_dt, false)); } let dt = DataType::Boolean; Ok(DataTypeAndNullable::create(&dt, false)) }
} impl<const NEGATED: bool> fmt::Display for InFunction<NEGATED> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if NEGATED { write!(f, "NOT IN") } else { write!(f, "IN") } } }
fn eval(&self, columns: &DataColumnsWithField, _input_rows: usize) -> Result<DataColumn> { let input_column = columns[0].column(); let input_array = match input_column { DataColumn::Array(array) => array.to_owned(), DataColumn::Constant(scalar, _) => scalar.to_array()?, }; let input_dt = input_array.data_type(); if input_dt == &DataType::Null { let mut array = MutableBooleanArrayBuilder::<false>::with_capacity(input_array.len()); for _ in 0..input_array.len() { array.push_null(); } return Ok(DataColumn::Array(array.as_series())); } let mut builder = MutableBooleanArrayBuilder::<false>::with_capacity(input_column.len()); let check_arrays = &columns[1..]; let mut least_super_dt = input_dt.clone(); for array in check_arrays { least_super_dt = merge_types(&least_super_dt, array.data_type())?; } let input_array = input_array.cast_with_type(&least_super_dt)?; match least_super_dt { DataType::Boolean => { basic_contains!( &least_super_dt, input_array, check_arrays, NEGATED, builder, Boolean, bool ); } DataType::UInt8 => { basic_contains!( &least_super_dt, input_array, check_arrays, NEGATED, builder, UInt8, u8 ); } DataType::UInt16 => { basic_contains!( &least_super_dt, input_array, check_arrays, NEGATED, builder, UInt16, u16 ); } DataType::UInt32 => { basic_contains!( &least_super_dt, input_array, check_arrays, NEGATED, builder, UInt32, u32 ); } DataType::UInt64 => { basic_contains!( &least_super_dt, input_array, check_arrays, NEGATED, builder, UInt64, u64 ); } DataType::Int8 => { basic_contains!( &least_super_dt, input_array, check_arrays, NEGATED, builder, Int8, i8 ); } DataType::Int16 => { basic_contains!( &least_super_dt, input_array, check_arrays, NEGATED, builder, Int16, i16 ); } DataType::Int32 => { basic_contains!( &least_super_dt, input_array, check_arrays, NEGATED, builder, Int32, i32 ); } DataType::Int64 => { basic_contains!( &least_super_dt, input_array, check_arrays, NEGATED, builder, Int64, i64 ); } DataType::Float32 => { float_contains!( &least_super_dt, input_array, check_arrays, NEGATED, builder, Float32, f32 ); } DataType::Float64 => { float_contains!( &least_super_dt, input_array, check_arrays, NEGATED, builder, Float64, f64 ); } DataType::String => { basic_contains!( &least_super_dt, input_array, check_arrays, NEGATED, builder, String, string ); } DataType::Struct(_) => {} _ => { unimplemented!() } } Ok(DataColumn::Array(builder.as_series())) }
function_block-full_function
[ { "content": "pub fn string_literal(val: &str) -> Expression {\n\n Expression::create_literal(DataValue::String(Some(val.as_bytes().to_vec())))\n\n}\n\n\n", "file_path": "query/src/storages/fuse/table_functions/table_arg_util.rs", "rank": 0, "score": 452972.13957462483 }, { "content": "pu...
Rust
research/query_service/ir/integrated/tests/expand_test.rs
wuyueandrew/GraphScope
9e2d77d83378f85f001b555d06e4dcbf9a6a4260
mod common; #[cfg(test)] mod test { use std::sync::Arc; use graph_proxy::{create_demo_graph, SimplePartition}; use graph_store::ldbc::LDBCVertexParser; use graph_store::prelude::DefaultId; use ir_common::expr_parse::str_to_expr_pb; use ir_common::generated::algebra as pb; use ir_common::generated::common as common_pb; use pegasus::api::{Map, Sink}; use pegasus::result::ResultStream; use pegasus::JobConf; use runtime::graph::element::{Element, GraphElement}; use runtime::graph::property::Details; use runtime::process::operator::flatmap::FlatMapFuncGen; use runtime::process::operator::map::MapFuncGen; use runtime::process::operator::source::SourceOperator; use runtime::process::record::Record; use crate::common::test::*; fn source_gen(alias: Option<common_pb::NameOrId>) -> Box<dyn Iterator<Item = Record> + Send> { create_demo_graph(); let scan_opr_pb = pb::Scan { scan_opt: 0, alias, params: None, idx_predicate: None }; let mut source_opr_pb = pb::logical_plan::Operator { opr: Some(pb::logical_plan::operator::Opr::Scan(scan_opr_pb)) }; let source = SourceOperator::new(&mut source_opr_pb, 1, 1, Arc::new(SimplePartition { num_servers: 1 })) .unwrap(); source.gen_source(0).unwrap() } fn expand_test(expand: pb::EdgeExpand) -> ResultStream<Record> { let conf = JobConf::new("expand_test"); let result = pegasus::run(conf, || { let expand = expand.clone(); |input, output| { let mut stream = input.input_from(source_gen(None))?; let flatmap_func = expand.gen_flat_map().unwrap(); stream = stream.flat_map(move |input| flatmap_func.exec(input))?; stream.sink_into(output) } }) .expect("build job failure"); result } fn expand_test_with_source_tag( source_tag: common_pb::NameOrId, expand: pb::EdgeExpand, ) -> ResultStream<Record> { let conf = JobConf::new("expand_test"); let result = pegasus::run(conf, || { let source_tag = source_tag.clone(); let expand = expand.clone(); |input, output| { let mut stream = input.input_from(source_gen(Some(source_tag)))?; let flatmap_func = expand.gen_flat_map().unwrap(); stream = stream.flat_map(move |input| flatmap_func.exec(input))?; stream.sink_into(output) } }) .expect("build job failure"); result } #[test] fn expand_outv_test() { let expand_opr_pb = pb::EdgeExpand { v_tag: None, direction: 0, params: None, is_edge: false, alias: None }; let mut result = expand_test(expand_opr_pb); let mut result_ids = vec![]; let v2: DefaultId = LDBCVertexParser::to_global_id(2, 0); let v3: DefaultId = LDBCVertexParser::to_global_id(3, 1); let v4: DefaultId = LDBCVertexParser::to_global_id(4, 0); let v5: DefaultId = LDBCVertexParser::to_global_id(5, 1); let mut expected_ids = vec![v2, v3, v3, v3, v4, v5]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { result_ids.push(element.id() as usize) } } result_ids.sort(); expected_ids.sort(); assert_eq!(result_ids, expected_ids) } #[test] fn expand_oute_with_label_test() { let query_param = query_params(vec!["knows".into()], vec![], None); let expand_opr_pb = pb::EdgeExpand { v_tag: None, direction: 0, params: Some(query_param), is_edge: true, alias: None, }; let mut result = expand_test(expand_opr_pb); let mut result_edges = vec![]; let v1: DefaultId = LDBCVertexParser::to_global_id(1, 0); let v2: DefaultId = LDBCVertexParser::to_global_id(2, 0); let v4: DefaultId = LDBCVertexParser::to_global_id(4, 0); let expected_edges = vec![(v1, v4), (v1, v2)]; while let Some(Ok(record)) = result.next() { if let Some(e) = record.get(None).unwrap().as_graph_edge() { result_edges.push((e.src_id as usize, e.dst_id as usize)); } } assert_eq!(result_edges, expected_edges) } #[test] fn expand_oute_with_many_labels_test() { let query_param = query_params(vec!["knows".into(), "created".into()], vec![], None); let expand_opr_pb = pb::EdgeExpand { v_tag: None, direction: 0, params: Some(query_param), is_edge: true, alias: None, }; let mut result = expand_test(expand_opr_pb); let mut result_edges = vec![]; let v1: DefaultId = LDBCVertexParser::to_global_id(1, 0); let v2: DefaultId = LDBCVertexParser::to_global_id(2, 0); let v3: DefaultId = LDBCVertexParser::to_global_id(3, 1); let v4: DefaultId = LDBCVertexParser::to_global_id(4, 0); let v5: DefaultId = LDBCVertexParser::to_global_id(5, 1); let v6: DefaultId = LDBCVertexParser::to_global_id(6, 0); let mut expected_edges = vec![(v1, v2), (v1, v3), (v1, v4), (v4, v3), (v4, v5), (v6, v3)]; expected_edges.sort(); while let Some(Ok(record)) = result.next() { if let Some(e) = record.get(None).unwrap().as_graph_edge() { result_edges.push((e.src_id as usize, e.dst_id as usize)); } } result_edges.sort(); assert_eq!(result_edges, expected_edges) } #[test] fn expand_inv_with_label_property_test() { let query_param = query_params(vec!["knows".into()], vec!["name".into()], None); let expand_opr_pb = pb::EdgeExpand { v_tag: None, direction: 1, params: Some(query_param), is_edge: false, alias: None, }; let mut result = expand_test(expand_opr_pb); let mut result_ids_with_prop = vec![]; let v1: DefaultId = LDBCVertexParser::to_global_id(1, 0); let expected_ids_with_prop = vec![(v1, "marko".to_string().into()), (v1, "marko".to_string().into())]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { result_ids_with_prop.push(( element.id() as usize, element .details() .unwrap() .get_property(&"name".into()) .unwrap() .try_to_owned() .unwrap(), )) } } assert_eq!(result_ids_with_prop, expected_ids_with_prop) } #[test] fn expand_bothv_test() { let query_param = query_params(vec![], vec![], None); let expand_opr_pb = pb::EdgeExpand { v_tag: None, direction: 2, params: Some(query_param), is_edge: false, alias: None, }; let mut result = expand_test(expand_opr_pb); let mut cnt = 0; let expected_result_num = 12; while let Some(_) = result.next() { cnt += 1; } assert_eq!(cnt, expected_result_num) } #[test] fn expand_outv_from_tag_as_tag_test() { let query_param = query_params(vec!["knows".into()], vec![], None); let expand_opr_pb = pb::EdgeExpand { v_tag: Some("a".into()), direction: 0, params: Some(query_param), is_edge: false, alias: Some("b".into()), }; let mut result = expand_test_with_source_tag("a".into(), expand_opr_pb); let mut result_ids = vec![]; let v2: DefaultId = LDBCVertexParser::to_global_id(2, 0); let v4: DefaultId = LDBCVertexParser::to_global_id(4, 0); let mut expected_ids = vec![v2, v4]; while let Some(Ok(record)) = result.next() { if let Some(element) = record .get(Some(&"b".into())) .unwrap() .as_graph_vertex() { result_ids.push(element.id() as usize) } } result_ids.sort(); expected_ids.sort(); assert_eq!(result_ids, expected_ids) } #[test] fn expand_outv_from_select_tag_test() { let query_param = query_params(vec!["knows".into()], vec![], None); let project = pb::Project { mappings: vec![pb::project::ExprAlias { expr: str_to_expr_pb("@a".to_string()).ok(), alias: None, }], is_append: false, }; let expand = pb::EdgeExpand { v_tag: None, direction: 0, params: Some(query_param), is_edge: false, alias: None, }; let conf = JobConf::new("expand_test"); let mut result = pegasus::run(conf, || { let project = project.clone(); let expand = expand.clone(); |input, output| { let mut stream = input.input_from(source_gen(Some("a".into())))?; let map_func = project.gen_map().unwrap(); stream = stream.map(move |input| map_func.exec(input))?; let flatmap_func = expand.gen_flat_map().unwrap(); stream = stream.flat_map(move |input| flatmap_func.exec(input))?; stream.sink_into(output) } }) .expect("build job failure"); let mut result_ids = vec![]; let v2: DefaultId = LDBCVertexParser::to_global_id(2, 0); let v4: DefaultId = LDBCVertexParser::to_global_id(4, 0); let mut expected_ids = vec![v2, v4]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { result_ids.push(element.id() as usize) } } result_ids.sort(); expected_ids.sort(); assert_eq!(result_ids, expected_ids) } #[test] fn expand_outv_filter_test() { let query_param = query_params(vec!["knows".into()], vec![], str_to_expr_pb("@.id == 2".to_string()).ok()); let expand_opr_pb = pb::EdgeExpand { v_tag: None, direction: 0, params: Some(query_param), is_edge: false, alias: None, }; let mut result = expand_test(expand_opr_pb); let mut result_ids = vec![]; let v2: DefaultId = LDBCVertexParser::to_global_id(2, 0); let expected_ids = vec![v2]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { result_ids.push(element.id() as usize) } } assert_eq!(result_ids, expected_ids) } #[test] fn expand_oute_inv_test() { let expand_opr = pb::EdgeExpand { v_tag: None, direction: 0, params: Some(query_params(vec!["knows".into()], vec![], None)), is_edge: true, alias: None, }; let getv_opr = pb::GetV { tag: None, opt: 1, params: Some(query_params(vec![], vec![], None)), alias: None, }; let conf = JobConf::new("expand_oute_inv_test"); let mut result = pegasus::run(conf, || { let expand = expand_opr.clone(); let getv = getv_opr.clone(); |input, output| { let mut stream = input.input_from(source_gen(None))?; let flatmap_func = expand.gen_flat_map().unwrap(); stream = stream.flat_map(move |input| flatmap_func.exec(input))?; let map_func = getv.gen_map().unwrap(); stream = stream.map(move |input| map_func.exec(input))?; stream.sink_into(output) } }) .expect("build job failure"); let expected_ids = vec![2, 4]; let mut result_ids = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { result_ids.push(element.id() as usize); assert!(element .details() .unwrap() .get_property(&"name".into()) .is_none()) } } result_ids.sort(); assert_eq!(result_ids, expected_ids) } #[test] fn expand_ine_outv_test() { let expand_opr = pb::EdgeExpand { v_tag: None, direction: 1, params: Some(query_params(vec!["created".into()], vec![], None)), is_edge: true, alias: None, }; let getv_opr = pb::GetV { tag: None, opt: 0, params: Some(query_params(vec![], vec![], None)), alias: None, }; let conf = JobConf::new("expand_ine_outv_test"); let mut result = pegasus::run(conf, || { let expand = expand_opr.clone(); let getv = getv_opr.clone(); |input, output| { let mut stream = input.input_from(source_gen(None))?; let flatmap_func = expand.gen_flat_map().unwrap(); stream = stream.flat_map(move |input| flatmap_func.exec(input))?; let map_func = getv.gen_map().unwrap(); stream = stream.map(move |input| map_func.exec(input))?; stream.sink_into(output) } }) .expect("build job failure"); let expected_ids = vec![1, 4, 4, 6]; let mut result_ids = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { result_ids.push(element.id() as usize); assert!(element .details() .unwrap() .get_property(&"name".into()) .is_none()) } } result_ids.sort(); assert_eq!(result_ids, expected_ids) } #[test] fn expand_bothe_otherv_test() { let expand_opr = pb::EdgeExpand { v_tag: None, direction: 2, params: Some(query_params(vec!["knows".into()], vec![], None)), is_edge: true, alias: None, }; let getv_opr = pb::GetV { tag: None, opt: 2, params: Some(query_params(vec![], vec![], None)), alias: None, }; let conf = JobConf::new("expand_bothe_otherv_test"); let mut result = pegasus::run(conf, || { let expand = expand_opr.clone(); let getv = getv_opr.clone(); |input, output| { let mut stream = input.input_from(source_gen(None))?; let flatmap_func = expand.gen_flat_map().unwrap(); stream = stream.flat_map(move |input| flatmap_func.exec(input))?; let map_func = getv.gen_map().unwrap(); stream = stream.map(move |input| map_func.exec(input))?; stream.sink_into(output) } }) .expect("build job failure"); let expected_ids = vec![1, 1, 2, 4]; let mut result_ids = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { result_ids.push(element.id() as usize); assert!(element .details() .unwrap() .get_property(&"name".into()) .is_none()) } } result_ids.sort(); assert_eq!(result_ids, expected_ids) } }
mod common; #[cfg(test)] mod test { use std::sync::Arc; use graph_proxy::{create_demo_graph, SimplePartition}; use graph_store::ldbc::LDBCVertexParser; use graph_store::prelude::DefaultId; use ir_common::expr_parse::str_to_expr_pb; use ir_common::generated::algebra as pb; use ir_common::generated::common as common_pb; use pegasus::api::{Map, Sink}; use pegasus::result::ResultStream; use pegasus::JobConf; use runtime::graph::element::{Element, GraphElement}; use runtime::graph::property::Details; use runtime::process::operator::flatmap::FlatMapFuncGen; use runtime::process::operator::map::MapFuncGen; use runtime::process::operator::source::SourceOperator; use runtime::process::record::Record; use crate::common::test::*; fn source_gen(alias: Option<common_pb::NameOrId>) -> Box<dyn Iterator<Item = Record> + Send> { create_demo_graph(); let scan_opr_pb = pb::Scan { scan_opt: 0, alias, params: None, idx_predicate: None }; let mut source_opr_pb = pb::logical_plan::Operator { opr: Some(pb::logical_plan::operator::Opr::Scan(scan_opr_pb)) }; let source = SourceOperator::new(&mut source_opr_pb, 1, 1, Arc::new(SimplePartition { num_servers: 1 })) .unwrap(); source.gen_source(0).unwrap() } fn expand_test(expand: pb::EdgeExpand) -> ResultStream<Record> { let conf = JobConf::new("expand_test"); let result = pegasus::run(conf, || { let expand = expand.clone(); |input, output| { let mut stream = input.input_from(source_gen(None))?; let flatmap_func = expand.gen_flat_map().unwrap(); stream = stream.flat_map(move |input| flatmap_func.exec(input))?; stream.sink_into(output) } }) .expect("build job failure"); result } fn expand_test_with_source_tag( source_tag: common_pb::NameOrId, expand: pb::EdgeExpand, ) -> ResultStream<Record> { let conf = JobConf::new("expand_test"); let result = pegasus::run(conf, || { let source_tag = source_tag.clone(); let expand = expand.clone(); |input, output| { let mut stream = input.input_from(source_gen(Some(source_tag)))?; let flatmap_func = expand.gen_flat_map().unwrap(); stream = stream.flat_map(move |input| flatmap_func.exec(input))?; stream.sink_into(output) } }) .expect("build job failure"); result } #[test] fn expand_outv_test() { let expand_opr_pb = pb::EdgeExpand { v_tag: None, direction: 0, params: None, is_edge: false, alias: None }; let mut result = expand_test(expand_opr_pb); let mut result_ids = vec![]; let v2: DefaultId = LDBCVertexParser::to_global_id(2, 0); let v3: DefaultId = LDBCVertexParser::to_global_id(3, 1); let v4: DefaultId = LDBCVertexParser::to_global_id(4, 0); let v5: DefaultId = LDBCVertexParser::to_global_id(5, 1); let mut expected_ids = vec![v2, v3, v3, v3, v4, v5]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { result_ids.push(element.id() as usize) } } result_ids.sort(); expected_ids.sort(); assert_eq!(result_ids, expected_ids) } #[test]
#[test] fn expand_oute_with_many_labels_test() { let query_param = query_params(vec!["knows".into(), "created".into()], vec![], None); let expand_opr_pb = pb::EdgeExpand { v_tag: None, direction: 0, params: Some(query_param), is_edge: true, alias: None, }; let mut result = expand_test(expand_opr_pb); let mut result_edges = vec![]; let v1: DefaultId = LDBCVertexParser::to_global_id(1, 0); let v2: DefaultId = LDBCVertexParser::to_global_id(2, 0); let v3: DefaultId = LDBCVertexParser::to_global_id(3, 1); let v4: DefaultId = LDBCVertexParser::to_global_id(4, 0); let v5: DefaultId = LDBCVertexParser::to_global_id(5, 1); let v6: DefaultId = LDBCVertexParser::to_global_id(6, 0); let mut expected_edges = vec![(v1, v2), (v1, v3), (v1, v4), (v4, v3), (v4, v5), (v6, v3)]; expected_edges.sort(); while let Some(Ok(record)) = result.next() { if let Some(e) = record.get(None).unwrap().as_graph_edge() { result_edges.push((e.src_id as usize, e.dst_id as usize)); } } result_edges.sort(); assert_eq!(result_edges, expected_edges) } #[test] fn expand_inv_with_label_property_test() { let query_param = query_params(vec!["knows".into()], vec!["name".into()], None); let expand_opr_pb = pb::EdgeExpand { v_tag: None, direction: 1, params: Some(query_param), is_edge: false, alias: None, }; let mut result = expand_test(expand_opr_pb); let mut result_ids_with_prop = vec![]; let v1: DefaultId = LDBCVertexParser::to_global_id(1, 0); let expected_ids_with_prop = vec![(v1, "marko".to_string().into()), (v1, "marko".to_string().into())]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { result_ids_with_prop.push(( element.id() as usize, element .details() .unwrap() .get_property(&"name".into()) .unwrap() .try_to_owned() .unwrap(), )) } } assert_eq!(result_ids_with_prop, expected_ids_with_prop) } #[test] fn expand_bothv_test() { let query_param = query_params(vec![], vec![], None); let expand_opr_pb = pb::EdgeExpand { v_tag: None, direction: 2, params: Some(query_param), is_edge: false, alias: None, }; let mut result = expand_test(expand_opr_pb); let mut cnt = 0; let expected_result_num = 12; while let Some(_) = result.next() { cnt += 1; } assert_eq!(cnt, expected_result_num) } #[test] fn expand_outv_from_tag_as_tag_test() { let query_param = query_params(vec!["knows".into()], vec![], None); let expand_opr_pb = pb::EdgeExpand { v_tag: Some("a".into()), direction: 0, params: Some(query_param), is_edge: false, alias: Some("b".into()), }; let mut result = expand_test_with_source_tag("a".into(), expand_opr_pb); let mut result_ids = vec![]; let v2: DefaultId = LDBCVertexParser::to_global_id(2, 0); let v4: DefaultId = LDBCVertexParser::to_global_id(4, 0); let mut expected_ids = vec![v2, v4]; while let Some(Ok(record)) = result.next() { if let Some(element) = record .get(Some(&"b".into())) .unwrap() .as_graph_vertex() { result_ids.push(element.id() as usize) } } result_ids.sort(); expected_ids.sort(); assert_eq!(result_ids, expected_ids) } #[test] fn expand_outv_from_select_tag_test() { let query_param = query_params(vec!["knows".into()], vec![], None); let project = pb::Project { mappings: vec![pb::project::ExprAlias { expr: str_to_expr_pb("@a".to_string()).ok(), alias: None, }], is_append: false, }; let expand = pb::EdgeExpand { v_tag: None, direction: 0, params: Some(query_param), is_edge: false, alias: None, }; let conf = JobConf::new("expand_test"); let mut result = pegasus::run(conf, || { let project = project.clone(); let expand = expand.clone(); |input, output| { let mut stream = input.input_from(source_gen(Some("a".into())))?; let map_func = project.gen_map().unwrap(); stream = stream.map(move |input| map_func.exec(input))?; let flatmap_func = expand.gen_flat_map().unwrap(); stream = stream.flat_map(move |input| flatmap_func.exec(input))?; stream.sink_into(output) } }) .expect("build job failure"); let mut result_ids = vec![]; let v2: DefaultId = LDBCVertexParser::to_global_id(2, 0); let v4: DefaultId = LDBCVertexParser::to_global_id(4, 0); let mut expected_ids = vec![v2, v4]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { result_ids.push(element.id() as usize) } } result_ids.sort(); expected_ids.sort(); assert_eq!(result_ids, expected_ids) } #[test] fn expand_outv_filter_test() { let query_param = query_params(vec!["knows".into()], vec![], str_to_expr_pb("@.id == 2".to_string()).ok()); let expand_opr_pb = pb::EdgeExpand { v_tag: None, direction: 0, params: Some(query_param), is_edge: false, alias: None, }; let mut result = expand_test(expand_opr_pb); let mut result_ids = vec![]; let v2: DefaultId = LDBCVertexParser::to_global_id(2, 0); let expected_ids = vec![v2]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { result_ids.push(element.id() as usize) } } assert_eq!(result_ids, expected_ids) } #[test] fn expand_oute_inv_test() { let expand_opr = pb::EdgeExpand { v_tag: None, direction: 0, params: Some(query_params(vec!["knows".into()], vec![], None)), is_edge: true, alias: None, }; let getv_opr = pb::GetV { tag: None, opt: 1, params: Some(query_params(vec![], vec![], None)), alias: None, }; let conf = JobConf::new("expand_oute_inv_test"); let mut result = pegasus::run(conf, || { let expand = expand_opr.clone(); let getv = getv_opr.clone(); |input, output| { let mut stream = input.input_from(source_gen(None))?; let flatmap_func = expand.gen_flat_map().unwrap(); stream = stream.flat_map(move |input| flatmap_func.exec(input))?; let map_func = getv.gen_map().unwrap(); stream = stream.map(move |input| map_func.exec(input))?; stream.sink_into(output) } }) .expect("build job failure"); let expected_ids = vec![2, 4]; let mut result_ids = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { result_ids.push(element.id() as usize); assert!(element .details() .unwrap() .get_property(&"name".into()) .is_none()) } } result_ids.sort(); assert_eq!(result_ids, expected_ids) } #[test] fn expand_ine_outv_test() { let expand_opr = pb::EdgeExpand { v_tag: None, direction: 1, params: Some(query_params(vec!["created".into()], vec![], None)), is_edge: true, alias: None, }; let getv_opr = pb::GetV { tag: None, opt: 0, params: Some(query_params(vec![], vec![], None)), alias: None, }; let conf = JobConf::new("expand_ine_outv_test"); let mut result = pegasus::run(conf, || { let expand = expand_opr.clone(); let getv = getv_opr.clone(); |input, output| { let mut stream = input.input_from(source_gen(None))?; let flatmap_func = expand.gen_flat_map().unwrap(); stream = stream.flat_map(move |input| flatmap_func.exec(input))?; let map_func = getv.gen_map().unwrap(); stream = stream.map(move |input| map_func.exec(input))?; stream.sink_into(output) } }) .expect("build job failure"); let expected_ids = vec![1, 4, 4, 6]; let mut result_ids = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { result_ids.push(element.id() as usize); assert!(element .details() .unwrap() .get_property(&"name".into()) .is_none()) } } result_ids.sort(); assert_eq!(result_ids, expected_ids) } #[test] fn expand_bothe_otherv_test() { let expand_opr = pb::EdgeExpand { v_tag: None, direction: 2, params: Some(query_params(vec!["knows".into()], vec![], None)), is_edge: true, alias: None, }; let getv_opr = pb::GetV { tag: None, opt: 2, params: Some(query_params(vec![], vec![], None)), alias: None, }; let conf = JobConf::new("expand_bothe_otherv_test"); let mut result = pegasus::run(conf, || { let expand = expand_opr.clone(); let getv = getv_opr.clone(); |input, output| { let mut stream = input.input_from(source_gen(None))?; let flatmap_func = expand.gen_flat_map().unwrap(); stream = stream.flat_map(move |input| flatmap_func.exec(input))?; let map_func = getv.gen_map().unwrap(); stream = stream.map(move |input| map_func.exec(input))?; stream.sink_into(output) } }) .expect("build job failure"); let expected_ids = vec![1, 1, 2, 4]; let mut result_ids = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { result_ids.push(element.id() as usize); assert!(element .details() .unwrap() .get_property(&"name".into()) .is_none()) } } result_ids.sort(); assert_eq!(result_ids, expected_ids) } }
fn expand_oute_with_label_test() { let query_param = query_params(vec!["knows".into()], vec![], None); let expand_opr_pb = pb::EdgeExpand { v_tag: None, direction: 0, params: Some(query_param), is_edge: true, alias: None, }; let mut result = expand_test(expand_opr_pb); let mut result_edges = vec![]; let v1: DefaultId = LDBCVertexParser::to_global_id(1, 0); let v2: DefaultId = LDBCVertexParser::to_global_id(2, 0); let v4: DefaultId = LDBCVertexParser::to_global_id(4, 0); let expected_edges = vec![(v1, v4), (v1, v2)]; while let Some(Ok(record)) = result.next() { if let Some(e) = record.get(None).unwrap().as_graph_edge() { result_edges.push((e.src_id as usize, e.dst_id as usize)); } } assert_eq!(result_edges, expected_edges) }
function_block-full_function
[ { "content": "fn create_src(id: u32, source: &mut Source<i32>) -> Result<(Stream<i32>, Stream<i32>), BuildJobError> {\n\n let src1 = if id == 0 { source.input_from(1..5)? } else { source.input_from(8..10)? };\n\n let (src1, src2) = src1.copied()?;\n\n let src2 = src2.map(|x| Ok(x + 1))?;\n\n Ok((src...
Rust
src/config/request.rs
initprism/isahc
9ddf6b4de16f3b90608dcce0abf2ddd8c51ef2e6
use super::{proxy::Proxy, *}; use curl::easy::Easy2; #[doc(hidden)] pub trait WithRequestConfig: Sized { fn with_config(self, f: impl FnOnce(&mut RequestConfig)) -> Self; } pub(crate) trait SetOpt { fn set_opt<H>(&self, easy: &mut Easy2<H>) -> Result<(), curl::Error>; } macro_rules! define_request_config { ($($field:ident: $t:ty,)*) => { #[derive(Clone, Debug, Default)] pub struct RequestConfig { $( pub(crate) $field: $t, )* } impl RequestConfig { pub(crate) fn client_defaults() -> Self { Self { version_negotiation: Some(VersionNegotiation::default()), automatic_decompression: Some(true), authentication: Some(Authentication::default()), ..Default::default() } } pub(crate) fn merge(&mut self, defaults: &Self) { $( if self.$field.is_none() { if let Some(value) = defaults.$field.as_ref() { self.$field = Some(value.clone()); } } )* } } }; } define_request_config! { timeout: Option<Duration>, connect_timeout: Option<Duration>, low_speed_timeout: Option<(u32, Duration)>, version_negotiation: Option<VersionNegotiation>, automatic_decompression: Option<bool>, expect_continue: Option<ExpectContinue>, authentication: Option<Authentication>, credentials: Option<Credentials>, tcp_keepalive: Option<Duration>, tcp_nodelay: Option<bool>, interface: Option<NetworkInterface>, ip_version: Option<IpVersion>, dial: Option<Dialer>, proxy: Option<Option<http::Uri>>, proxy_blacklist: Option<proxy::Blacklist>, proxy_authentication: Option<Proxy<Authentication>>, proxy_credentials: Option<Proxy<Credentials>>, max_upload_speed: Option<u64>, max_download_speed: Option<u64>, ssl_client_certificate: Option<ClientCertificate>, ssl_ca_certificate: Option<CaCertificate>, ssl_ciphers: Option<ssl::Ciphers>, ssl_options: Option<SslOption>, enable_metrics: Option<bool>, redirect_policy: Option<RedirectPolicy>, auto_referer: Option<bool>, title_case_headers: Option<bool>, } impl SetOpt for RequestConfig { fn set_opt<H>(&self, easy: &mut Easy2<H>) -> Result<(), curl::Error> { if let Some(timeout) = self.timeout { easy.timeout(timeout)?; } if let Some((low_speed, timeout)) = self.low_speed_timeout { easy.low_speed_limit(low_speed)?; easy.low_speed_time(timeout)?; } if let Some(timeout) = self.connect_timeout { easy.connect_timeout(timeout)?; } if let Some(negotiation) = self.version_negotiation.as_ref() { negotiation.set_opt(easy)?; } #[allow(unsafe_code)] { if let Some(enable) = self.automatic_decompression { if enable { easy.accept_encoding("")?; } else { unsafe { match curl_sys::curl_easy_setopt( easy.raw(), curl_sys::CURLOPT_ACCEPT_ENCODING, 0, ) { curl_sys::CURLE_OK => {} code => return Err(curl::Error::new(code)), } } } } } if let Some(expect_continue) = self.expect_continue.as_ref() { expect_continue.set_opt(easy)?; } if let Some(auth) = self.authentication.as_ref() { auth.set_opt(easy)?; } if let Some(credentials) = self.credentials.as_ref() { credentials.set_opt(easy)?; } if let Some(interval) = self.tcp_keepalive { easy.tcp_keepalive(true)?; easy.tcp_keepintvl(interval)?; } if let Some(enable) = self.tcp_nodelay { easy.tcp_nodelay(enable)?; } if let Some(interface) = self.interface.as_ref() { interface.set_opt(easy)?; } if let Some(version) = self.ip_version.as_ref() { version.set_opt(easy)?; } if let Some(dialer) = self.dial.as_ref() { dialer.set_opt(easy)?; } if let Some(proxy) = self.proxy.as_ref() { match proxy { Some(uri) => easy.proxy(&format!("{}", uri))?, None => easy.proxy("")?, } } if let Some(blacklist) = self.proxy_blacklist.as_ref() { blacklist.set_opt(easy)?; } if let Some(auth) = self.proxy_authentication.as_ref() { auth.set_opt(easy)?; } if let Some(credentials) = self.proxy_credentials.as_ref() { credentials.set_opt(easy)?; } if let Some(max) = self.max_upload_speed { easy.max_send_speed(max)?; } if let Some(max) = self.max_download_speed { easy.max_recv_speed(max)?; } if let Some(cert) = self.ssl_client_certificate.as_ref() { cert.set_opt(easy)?; } if let Some(cert) = self.ssl_ca_certificate.as_ref() { cert.set_opt(easy)?; } if let Some(ciphers) = self.ssl_ciphers.as_ref() { ciphers.set_opt(easy)?; } if let Some(options) = self.ssl_options.as_ref() { options.set_opt(easy)?; } if let Some(enable) = self.enable_metrics { easy.progress(enable)?; } Ok(()) } }
use super::{proxy::Proxy, *}; use curl::easy::Easy2; #[doc(hidden)] pub trait WithRequestConfig: Sized { fn with_config(self, f: impl FnOnce(&mut RequestConfig)) -> Self; } pub(crate) trait SetOpt { fn set_opt<H>(&self, easy: &mut Easy2<H>) -> Result<(), curl::Error>; } macro_rules! define_request_config { ($($field:ident: $t:ty,)*) => { #[derive(Clone, Debug, Default)] pub struct RequestConfig { $( pub(crate) $field: $t, )* } impl RequestConfig { pub(crate) fn client_defaults() -> Self { Self { version_negotiation: Some(VersionNegotiation::default()), automatic_decompression: Some(true), authentication: Some(Authentication::default()), ..Default::default() } } pub(crate) fn merge(&mut self, defaults: &Self) { $( if self.$field.is_none() { if let Some(value) = defaults.$field.as_ref() { self.$field = Some(value.clone()); } } )* } } }; } define_request_config! { timeout: Option<Duration>, connect_timeout: Option<Duration>, low_speed_timeout: Option<(u32, Duration)>, version_negotiation: Option<VersionNegotiation>, automatic_decompression: Option<bool>, expect_continue: Option<ExpectContinue>, authent
peed, timeout)) = self.low_speed_timeout { easy.low_speed_limit(low_speed)?; easy.low_speed_time(timeout)?; } if let Some(timeout) = self.connect_timeout { easy.connect_timeout(timeout)?; } if let Some(negotiation) = self.version_negotiation.as_ref() { negotiation.set_opt(easy)?; } #[allow(unsafe_code)] { if let Some(enable) = self.automatic_decompression { if enable { easy.accept_encoding("")?; } else { unsafe { match curl_sys::curl_easy_setopt( easy.raw(), curl_sys::CURLOPT_ACCEPT_ENCODING, 0, ) { curl_sys::CURLE_OK => {} code => return Err(curl::Error::new(code)), } } } } } if let Some(expect_continue) = self.expect_continue.as_ref() { expect_continue.set_opt(easy)?; } if let Some(auth) = self.authentication.as_ref() { auth.set_opt(easy)?; } if let Some(credentials) = self.credentials.as_ref() { credentials.set_opt(easy)?; } if let Some(interval) = self.tcp_keepalive { easy.tcp_keepalive(true)?; easy.tcp_keepintvl(interval)?; } if let Some(enable) = self.tcp_nodelay { easy.tcp_nodelay(enable)?; } if let Some(interface) = self.interface.as_ref() { interface.set_opt(easy)?; } if let Some(version) = self.ip_version.as_ref() { version.set_opt(easy)?; } if let Some(dialer) = self.dial.as_ref() { dialer.set_opt(easy)?; } if let Some(proxy) = self.proxy.as_ref() { match proxy { Some(uri) => easy.proxy(&format!("{}", uri))?, None => easy.proxy("")?, } } if let Some(blacklist) = self.proxy_blacklist.as_ref() { blacklist.set_opt(easy)?; } if let Some(auth) = self.proxy_authentication.as_ref() { auth.set_opt(easy)?; } if let Some(credentials) = self.proxy_credentials.as_ref() { credentials.set_opt(easy)?; } if let Some(max) = self.max_upload_speed { easy.max_send_speed(max)?; } if let Some(max) = self.max_download_speed { easy.max_recv_speed(max)?; } if let Some(cert) = self.ssl_client_certificate.as_ref() { cert.set_opt(easy)?; } if let Some(cert) = self.ssl_ca_certificate.as_ref() { cert.set_opt(easy)?; } if let Some(ciphers) = self.ssl_ciphers.as_ref() { ciphers.set_opt(easy)?; } if let Some(options) = self.ssl_options.as_ref() { options.set_opt(easy)?; } if let Some(enable) = self.enable_metrics { easy.progress(enable)?; } Ok(()) } }
ication: Option<Authentication>, credentials: Option<Credentials>, tcp_keepalive: Option<Duration>, tcp_nodelay: Option<bool>, interface: Option<NetworkInterface>, ip_version: Option<IpVersion>, dial: Option<Dialer>, proxy: Option<Option<http::Uri>>, proxy_blacklist: Option<proxy::Blacklist>, proxy_authentication: Option<Proxy<Authentication>>, proxy_credentials: Option<Proxy<Credentials>>, max_upload_speed: Option<u64>, max_download_speed: Option<u64>, ssl_client_certificate: Option<ClientCertificate>, ssl_ca_certificate: Option<CaCertificate>, ssl_ciphers: Option<ssl::Ciphers>, ssl_options: Option<SslOption>, enable_metrics: Option<bool>, redirect_policy: Option<RedirectPolicy>, auto_referer: Option<bool>, title_case_headers: Option<bool>, } impl SetOpt for RequestConfig { fn set_opt<H>(&self, easy: &mut Easy2<H>) -> Result<(), curl::Error> { if let Some(timeout) = self.timeout { easy.timeout(timeout)?; } if let Some((low_s
random
[ { "content": "/// Creates an interceptor from an arbitrary closure or function.\n\npub fn from_fn<F, E>(f: F) -> InterceptorFn<F>\n\nwhere\n\n F: for<'a> private::AsyncFn2<Request<AsyncBody>, Context<'a>, Output = InterceptorResult<E>>\n\n + Send\n\n + Sync\n\n + 'static,\n\n E: Error...
Rust
src/barcodeservice.rs
kalkspace/getraenkekassengeraete
39248dc3a0c6a3ae1b285e8a3f964cfe81775460
use async_stream::stream; use futures_core::stream::Stream; use libc::ioctl; use log::{error, warn}; use std::convert::TryFrom; use std::error::Error; use std::fs::File; use std::os::unix::io::AsRawFd; use std::path::{Path, PathBuf}; use tokio::io::AsyncReadExt; use tokio::time::{sleep, Duration}; use tokio_fd::AsyncFd; fn u8_8(u: &[u8]) -> [u8; 8] { [u[0], u[1], u[2], u[3], u[4], u[5], u[6], u[7]] } fn u8_4(u: &[u8]) -> [u8; 4] { [u[0], u[1], u[2], u[3]] } fn u8_2(u: &[u8]) -> [u8; 2] { [u[0], u[1]] } const EVIOCGRAB: u64 = 1074021776; fn create_input_event(buf: &[u8]) -> libc::input_event { libc::input_event { time: libc::timeval { tv_sec: i64::from_le_bytes(u8_8(&buf[0..8])), tv_usec: i64::from_le_bytes(u8_8(&buf[8..16])), }, type_: u16::from_le_bytes(u8_2(&buf[16..18])), code: u16::from_le_bytes(u8_2(&buf[18..20])), value: i32::from_le_bytes(u8_4(&buf[20..24])), } } struct KeyboardFile { _file: File, fd: AsyncFd, } impl KeyboardFile { pub fn new(dev: &Path) -> Result<KeyboardFile, Box<dyn Error>> { let file = File::open(dev)?; let fd = file.as_raw_fd(); unsafe { ioctl(fd, EVIOCGRAB, 1); } Ok(KeyboardFile { _file: file, fd: AsyncFd::try_from(fd)?, }) } pub fn fd_mut(&mut self) -> &mut AsyncFd { &mut self.fd } } struct BarcodeScanner { dev: PathBuf, keyboard_file: Option<KeyboardFile>, first_sleep_secs: Option<u64>, } impl BarcodeScanner { pub fn new(dev: impl Into<PathBuf>) -> BarcodeScanner { BarcodeScanner { dev: dev.into(), keyboard_file: None, first_sleep_secs: Some(0), } } pub async fn acquire_keyboard_fd(&mut self) -> &mut AsyncFd { if self.keyboard_file.is_none() { let mut sleep_secs = self.first_sleep_secs.take().unwrap_or(1); while self.keyboard_file.is_none() { sleep(Duration::from_secs(sleep_secs)).await; self.keyboard_file = match KeyboardFile::new(&self.dev) { Ok(fd) => Some(fd), Err(e) => { error!("Error accessing keyboard {}", e); if sleep_secs == 0 { sleep_secs = 1; } else { sleep_secs = sleep_secs * 2; if sleep_secs > 4 { sleep_secs = 4; } } None } } } } self.keyboard_file.as_mut().unwrap().fd_mut() } pub async fn try_read_barcode(&mut self) -> Result<String, Box<dyn Error>> { let input_event_size = std::mem::size_of::<libc::input_event>(); let mut buf = [0u8; 2048]; let mut s = String::new(); let fd = self.acquire_keyboard_fd().await; loop { let r = fd.read(&mut buf).await?; if r == 0 { continue; } for event_buf in buf.chunks_exact(input_event_size) { let event = create_input_event(&event_buf); if event.type_ != 1 { continue; } if event.value != 0 { continue; } match event.code { 2 => s += "1", 3 => s += "2", 4 => s += "3", 5 => s += "4", 6 => s += "5", 7 => s += "6", 8 => s += "7", 9 => s += "8", 10 => s += "9", 11 => s += "0", 28 => { if s.len() > 0 { return Ok(s); } warn!("Tried submitting empty barcode. Skipping."); s.clear(); } _ => { warn!("Invalid scancode {}", event.code); s.clear(); } } } } } pub async fn read_barcode(&mut self) -> String { loop { match self.try_read_barcode().await { Ok(s) => return s, Err(e) => { error!("Error reading barcode {}", e); self.keyboard_file = None } } } } } pub fn run(dev: impl Into<PathBuf>) -> impl Stream<Item = String> { stream! { let mut scanner = BarcodeScanner::new(dev); loop { yield scanner.read_barcode().await; } } }
use async_stream::stream; use futures_core::stream::Stream; use libc::ioctl; use log::{error, warn}; use std::convert::TryFrom; use std::error::Error; use std::fs::File; use std::os::unix::io::AsRawFd; use std::path::{Path, PathBuf}; use tokio::io::AsyncReadExt; use tokio::time::{sleep, Duration}; use tokio_fd::AsyncFd; fn u8_8(u: &[u8]) -> [u8; 8] { [u[0], u[1], u[2], u[3], u[4], u[5], u[6], u[7]] } fn u8_4(u: &[u8]) -> [u8; 4] { [u[0], u[1], u[2], u[3]] } fn u8_2(u: &[u8]) -> [u8; 2] { [u[0], u[1]] } const EVIOCGRAB: u64 = 1074021776; fn create_input_event(buf: &[u8]) -> libc::input_event { libc::input_event { time: libc::timeval { tv_sec: i64::from_le_bytes(u8_8(&buf[0..8])), tv_usec: i64::from_le_bytes(u8_8(&buf[8..16])), }, type_: u16::from_le_bytes(u8_2(&buf[16..18])), code: u16::from_le_bytes(u8_2(&buf[18..20])), value: i32::from_le_bytes(u8_4(&buf[20..24])), } } struct KeyboardFile { _file: File, fd: AsyncFd, } impl KeyboardFile { pub fn new(dev: &Path) -> Result<KeyboardFile, Box<dyn Error>> { let file = File::open(dev)?; let fd = file.as_raw_fd(); unsafe { ioctl(fd, EVIOCGRAB, 1); } Ok(KeyboardFile { _file: file, fd: AsyncFd::try_from(fd)?, }) } pub fn fd_mut(&mut self) -> &mut AsyncFd { &mut self.fd } } struct BarcodeScanner { dev: PathBuf, keyboard_file: Option<KeyboardFile>, first_sleep_secs: Option<u64>, } impl BarcodeScanner { pub fn new(dev: impl Into<PathBuf>) -> BarcodeScanner { BarcodeScanner { dev: dev.into(), keyboard_file: None, first_sleep_secs: Some(0), } } pub async fn acquire_keyboard_fd(&mut self) -> &mut AsyncFd { if self.keyboard_file.is_none() { let mut sleep_secs = self.first_sleep_secs.take().unwrap_or(1); while self.keyboard_file.is_none() { sleep(Duration::from_secs(sleep_secs)).await; self.keyboard_file = match KeyboardFile::new(&self.dev) { Ok(fd) => Some(fd), Err(e) => { error!("Error accessing keyboard {}", e); if sleep_secs == 0 { sleep_secs = 1; } else { sleep_secs = sleep_secs * 2; if sleep_secs > 4 { sleep_secs = 4; } } None } } } } self.keyboard_file.as_mut().unwrap().fd_mut() } pub async fn try_read_barcode(&mut self) -> Result<String, Box<dyn Error>> { let input_event_size = std::mem::size_of::<libc::input_event>(); let mut buf = [0u8; 2048]; let mut s = String::new(); let fd = self.acquire_keyboard_fd().await; loop { let r = fd.read(&mut buf).await?; if r == 0 { continue; } for event_buf in buf.chunks_exact(input_event_size) { let event = create_input_event(&event_buf); if event.type_ != 1 { continue; } if event.value != 0 { continue; } match event.code { 2 => s += "1", 3 => s += "2", 4 => s += "3", 5 => s += "4", 6 => s += "5", 7 => s += "6", 8 => s += "7", 9 => s += "8", 10 => s += "9", 11 => s += "0", 28 => { if s.len() > 0 { return Ok(s); } warn!("Tried submitting empty barcode. Skipping."); s.clear(); } _ => { warn!("Invalid scancode {}", event.code); s.clear(); } } } } } pub async fn read_barcode(&mut self) -> String { loop {
} } } pub fn run(dev: impl Into<PathBuf>) -> impl Stream<Item = String> { stream! { let mut scanner = BarcodeScanner::new(dev); loop { yield scanner.read_barcode().await; } } }
match self.try_read_barcode().await { Ok(s) => return s, Err(e) => { error!("Error reading barcode {}", e); self.keyboard_file = None } }
if_condition
[ { "content": "pub fn run(dev: impl Into<PathBuf>) -> impl Stream<Item = ()> {\n\n stream! {\n\n let mut reader = StornoReader::new(dev);\n\n loop {\n\n yield reader.read_storno().await;\n\n }\n\n }\n\n}\n", "file_path": "src/stornoservice.rs", "rank": 1, "score"...
Rust
crates/holochain_sqlite/src/db/p2p_agent_store.rs
the-a-man-006/holochain
ace36de1c9021dd4f49e4bba226db81f671b818e
use crate::prelude::*; use crate::sql::*; use kitsune_p2p::agent_store::{AgentInfo, AgentInfoSigned}; use kitsune_p2p::dht_arc::DhtArc; use kitsune_p2p::KitsuneAgent; use rusqlite::*; pub trait AsP2pStateConExt { fn p2p_get(&mut self, agent: &KitsuneAgent) -> DatabaseResult<Option<AgentInfoSigned>>; fn p2p_list(&mut self) -> DatabaseResult<Vec<AgentInfoSigned>>; fn p2p_gossip_query( &mut self, since_ms: u64, until_ms: u64, within_arc: DhtArc, ) -> DatabaseResult<Vec<KitsuneAgent>>; } pub trait AsP2pStateTxExt { fn p2p_get(&self, agent: &KitsuneAgent) -> DatabaseResult<Option<AgentInfoSigned>>; fn p2p_list(&self) -> DatabaseResult<Vec<AgentInfoSigned>>; fn p2p_gossip_query( &self, since_ms: u64, until_ms: u64, within_arc: DhtArc, ) -> DatabaseResult<Vec<KitsuneAgent>>; } impl AsP2pStateConExt for crate::db::PConn { fn p2p_get(&mut self, agent: &KitsuneAgent) -> DatabaseResult<Option<AgentInfoSigned>> { self.with_reader(move |reader| reader.p2p_get(agent)) } fn p2p_list(&mut self) -> DatabaseResult<Vec<AgentInfoSigned>> { self.with_reader(move |reader| reader.p2p_list()) } fn p2p_gossip_query( &mut self, since_ms: u64, until_ms: u64, within_arc: DhtArc, ) -> DatabaseResult<Vec<KitsuneAgent>> { self.with_reader(move |reader| reader.p2p_gossip_query(since_ms, until_ms, within_arc)) } } pub async fn p2p_put(db: &DbWrite, signed: &AgentInfoSigned) -> DatabaseResult<()> { let record = P2pRecord::from_signed(signed)?; db.async_commit(move |txn| tx_p2p_put(txn, record)).await } pub async fn p2p_put_all( db: &DbWrite, signed: impl Iterator<Item = &AgentInfoSigned>, ) -> DatabaseResult<()> { let mut records = Vec::new(); for s in signed { records.push(P2pRecord::from_signed(s)?); } db.async_commit(move |txn| { for record in records { tx_p2p_put(txn, record)?; } Ok(()) }) .await } fn tx_p2p_put(txn: &mut Transaction, record: P2pRecord) -> DatabaseResult<()> { txn.execute( sql_p2p_agent_store::INSERT, named_params! { ":agent": &record.agent.0, ":encoded": &record.encoded, ":signed_at_ms": &record.signed_at_ms, ":expires_at_ms": &record.expires_at_ms, ":storage_center_loc": &record.storage_center_loc, ":storage_start_1": &record.storage_start_1, ":storage_end_1": &record.storage_end_1, ":storage_start_2": &record.storage_start_2, ":storage_end_2": &record.storage_end_2, }, )?; Ok(()) } pub async fn p2p_prune(db: &DbWrite) -> DatabaseResult<()> { db.async_commit(move |txn| { let now = std::time::SystemTime::now() .duration_since(std::time::SystemTime::UNIX_EPOCH) .unwrap() .as_millis() as u64; txn.execute(sql_p2p_agent_store::PRUNE, named_params! { ":now": now })?; DatabaseResult::Ok(()) }) .await?; Ok(()) } impl AsP2pStateTxExt for Transaction<'_> { fn p2p_get(&self, agent: &KitsuneAgent) -> DatabaseResult<Option<AgentInfoSigned>> { use std::convert::TryFrom; let mut stmt = self .prepare(sql_p2p_agent_store::SELECT) .map_err(|e| rusqlite::Error::ToSqlConversionFailure(e.into()))?; Ok(stmt .query_row(named_params! { ":agent": &agent.0 }, |r| { let r = r.get_ref(0)?; let r = r.as_blob()?; let signed = AgentInfoSigned::try_from(r) .map_err(|e| rusqlite::Error::ToSqlConversionFailure(e.into()))?; Ok(signed) }) .optional()?) } fn p2p_list(&self) -> DatabaseResult<Vec<AgentInfoSigned>> { use std::convert::TryFrom; let mut stmt = self .prepare(sql_p2p_agent_store::SELECT_ALL) .map_err(|e| rusqlite::Error::ToSqlConversionFailure(e.into()))?; let mut out = Vec::new(); for r in stmt.query_map([], |r| { let r = r.get_ref(0)?; let r = r.as_blob()?; let signed = AgentInfoSigned::try_from(r) .map_err(|e| rusqlite::Error::ToSqlConversionFailure(e.into()))?; Ok(signed) })? { out.push(r?); } Ok(out) } fn p2p_gossip_query( &self, since_ms: u64, until_ms: u64, within_arc: DhtArc, ) -> DatabaseResult<Vec<KitsuneAgent>> { let mut stmt = self .prepare(sql_p2p_agent_store::GOSSIP_QUERY) .map_err(|e| rusqlite::Error::ToSqlConversionFailure(e.into()))?; let (storage_1, storage_2) = split_arc(&within_arc); let mut out = Vec::new(); for r in stmt.query_map( named_params! { ":since_ms": clamp64(since_ms), ":until_ms": clamp64(until_ms), ":storage_start_1": storage_1.map(|s| s.0), ":storage_end_1": storage_1.map(|s| s.1), ":storage_start_2": storage_2.map(|s| s.0), ":storage_end_2": storage_2.map(|s| s.1), }, |r| { let agent: Vec<u8> = r.get(0)?; Ok(KitsuneAgent(agent)) }, )? { out.push(r?); } Ok(out) } } #[derive(Debug)] struct P2pRecord { agent: KitsuneAgent, encoded: Vec<u8>, signed_at_ms: i64, expires_at_ms: i64, storage_center_loc: u32, storage_start_1: Option<u32>, storage_end_1: Option<u32>, storage_start_2: Option<u32>, storage_end_2: Option<u32>, } pub type SplitRange = (u32, u32); pub fn split_arc(arc: &DhtArc) -> (Option<SplitRange>, Option<SplitRange>) { let mut storage_1 = None; let mut storage_2 = None; use std::ops::{Bound, RangeBounds}; let r = arc.range(); let s = r.start_bound(); let e = r.end_bound(); match (s, e) { (Bound::Excluded(_), Bound::Excluded(_)) => (), (Bound::Included(s), Bound::Included(e)) => { if s > e { storage_1 = Some((u32::MIN, *e)); storage_2 = Some((*s, u32::MAX)); } else { storage_1 = Some((*s, *e)); } } _ => unreachable!(), } (storage_1, storage_2) } fn clamp64(u: u64) -> i64 { if u > i64::MAX as u64 { i64::MAX } else { u as i64 } } impl P2pRecord { pub fn from_signed(signed: &AgentInfoSigned) -> DatabaseResult<Self> { use std::convert::TryFrom; let info = AgentInfo::try_from(signed).map_err(|e| anyhow::anyhow!(e))?; let agent = info.as_agent_ref().clone(); let encoded = <Vec<u8>>::try_from(signed).map_err(|e| anyhow::anyhow!(e))?; let signed_at_ms = info.signed_at_ms(); let expires_at_ms = signed_at_ms + info.expires_after_ms(); let arc = info.dht_arc().map_err(|e| anyhow::anyhow!(e))?; let storage_center_loc = arc.center_loc.into(); let (storage_1, storage_2) = split_arc(&arc); Ok(Self { agent, encoded, signed_at_ms: clamp64(signed_at_ms), expires_at_ms: clamp64(expires_at_ms), storage_center_loc, storage_start_1: storage_1.map(|s| s.0), storage_end_1: storage_1.map(|s| s.1), storage_start_2: storage_2.map(|s| s.0), storage_end_2: storage_2.map(|s| s.1), }) } } #[cfg(test)] mod p2p_test;
use crate::prelude::*; use crate::sql::*; use kitsune_p2p::agent_store::{AgentInfo, AgentInfoSigned}; use kitsune_p2p::dht_arc::DhtArc; use kitsune_p2p::KitsuneAgent; use rusqlite::*; pub trait AsP2pStateConExt { fn p2p_get(&mut self, agent: &KitsuneAgent) -> DatabaseResult<Option<AgentInfoSigned>>; fn p2p_list(&mut self) -> DatabaseResult<Vec<AgentInfoSigned>>; fn p2p_gossip_query( &mut self, since_ms: u64, until_ms: u64, within_arc: DhtArc, ) -> DatabaseResult<Vec<KitsuneAgent>>; } pub trait AsP2pStateTxExt { fn p2p_get(&self, agent: &KitsuneAgent) -> DatabaseResult<Option<AgentInfoSigned>>; fn p2p_list(&self) -> DatabaseResult<Vec<AgentInfoSigned>>; fn p2p_gossip_query( &self, since_ms: u64, until_ms: u64, within_arc: DhtArc, ) -> DatabaseResult<Vec<KitsuneAgent>>; } impl AsP2pStateConExt for crate::db::PConn { fn p2p_get(&mut self, agent: &KitsuneAgent) -> DatabaseResult<Option<AgentInfoSigned>> { self.with_reader(move |reader| reader.p2p_get(agent)) } fn p2p_list(&mut self) -> DatabaseResult<Vec<AgentInfoSigned>> { self.with_reader(move |reader| reader.p2p_list()) } fn p2p_gossip_query( &mut self, since_ms: u64, until_ms: u64, within_arc: DhtArc, ) -> DatabaseResult<Vec<KitsuneAgent>> { self.with_reader(move |reader| reader.p2p_gossip_query(since_ms, until_ms, within_arc)) } } pub async fn p2p_put(db: &DbWrite, signed: &AgentInfoSigned) -> DatabaseResult<()> { let record = P2pRecord::from_signed(signed)?; db.async_commit(move |txn| tx_p2p_put(txn, record)).await } pub async fn p2p_put_all( db: &DbWrite, signed: impl Iterator<Item = &AgentInfoSigned>, ) -> DatabaseResult<()> { let mut records = Vec::new(); for s in signed { records.push(P2pRecord::from_signed(s)?); } db.async_commit(move |txn| { for record in records { tx_p2p_put(txn, record)?; } Ok(()) }) .await } fn tx_p2p_put(txn: &mut Transaction, record: P2pRecord) -> DatabaseResult<()> { txn.execute( sql_p2p_agent_store::INSERT, named_params! { ":agent": &record.agent.0, ":encoded": &record.encoded, ":signed_at_ms": &record.signed_at_ms, ":expires_at_ms": &record.expires_at_ms, ":storage_center_loc": &record.storage_center_loc, ":storage_start_1": &record.storage_start_1, ":storage_end_1": &record.storage_end_1, ":storage_start_2": &record.storage_start_2, ":storage_end_2": &record.storage_end_2, }, )?; Ok(()) } pub async fn p2p_prune(db: &DbWrite) -> DatabaseResult<()> { db.async_commit(move |txn| { let now = std::time::SystemTime::now() .duration_since(std::time::SystemTime::UNIX_EPOCH) .unwrap() .as_millis() as u64; txn.execute(sql_p2p_agent_store::PRUNE, named_params! { ":now": now })?; DatabaseResult::Ok(()) }) .await?; Ok(()) } impl AsP2pStateTxExt for Transaction<'_> {
fn p2p_list(&self) -> DatabaseResult<Vec<AgentInfoSigned>> { use std::convert::TryFrom; let mut stmt = self .prepare(sql_p2p_agent_store::SELECT_ALL) .map_err(|e| rusqlite::Error::ToSqlConversionFailure(e.into()))?; let mut out = Vec::new(); for r in stmt.query_map([], |r| { let r = r.get_ref(0)?; let r = r.as_blob()?; let signed = AgentInfoSigned::try_from(r) .map_err(|e| rusqlite::Error::ToSqlConversionFailure(e.into()))?; Ok(signed) })? { out.push(r?); } Ok(out) } fn p2p_gossip_query( &self, since_ms: u64, until_ms: u64, within_arc: DhtArc, ) -> DatabaseResult<Vec<KitsuneAgent>> { let mut stmt = self .prepare(sql_p2p_agent_store::GOSSIP_QUERY) .map_err(|e| rusqlite::Error::ToSqlConversionFailure(e.into()))?; let (storage_1, storage_2) = split_arc(&within_arc); let mut out = Vec::new(); for r in stmt.query_map( named_params! { ":since_ms": clamp64(since_ms), ":until_ms": clamp64(until_ms), ":storage_start_1": storage_1.map(|s| s.0), ":storage_end_1": storage_1.map(|s| s.1), ":storage_start_2": storage_2.map(|s| s.0), ":storage_end_2": storage_2.map(|s| s.1), }, |r| { let agent: Vec<u8> = r.get(0)?; Ok(KitsuneAgent(agent)) }, )? { out.push(r?); } Ok(out) } } #[derive(Debug)] struct P2pRecord { agent: KitsuneAgent, encoded: Vec<u8>, signed_at_ms: i64, expires_at_ms: i64, storage_center_loc: u32, storage_start_1: Option<u32>, storage_end_1: Option<u32>, storage_start_2: Option<u32>, storage_end_2: Option<u32>, } pub type SplitRange = (u32, u32); pub fn split_arc(arc: &DhtArc) -> (Option<SplitRange>, Option<SplitRange>) { let mut storage_1 = None; let mut storage_2 = None; use std::ops::{Bound, RangeBounds}; let r = arc.range(); let s = r.start_bound(); let e = r.end_bound(); match (s, e) { (Bound::Excluded(_), Bound::Excluded(_)) => (), (Bound::Included(s), Bound::Included(e)) => { if s > e { storage_1 = Some((u32::MIN, *e)); storage_2 = Some((*s, u32::MAX)); } else { storage_1 = Some((*s, *e)); } } _ => unreachable!(), } (storage_1, storage_2) } fn clamp64(u: u64) -> i64 { if u > i64::MAX as u64 { i64::MAX } else { u as i64 } } impl P2pRecord { pub fn from_signed(signed: &AgentInfoSigned) -> DatabaseResult<Self> { use std::convert::TryFrom; let info = AgentInfo::try_from(signed).map_err(|e| anyhow::anyhow!(e))?; let agent = info.as_agent_ref().clone(); let encoded = <Vec<u8>>::try_from(signed).map_err(|e| anyhow::anyhow!(e))?; let signed_at_ms = info.signed_at_ms(); let expires_at_ms = signed_at_ms + info.expires_after_ms(); let arc = info.dht_arc().map_err(|e| anyhow::anyhow!(e))?; let storage_center_loc = arc.center_loc.into(); let (storage_1, storage_2) = split_arc(&arc); Ok(Self { agent, encoded, signed_at_ms: clamp64(signed_at_ms), expires_at_ms: clamp64(expires_at_ms), storage_center_loc, storage_start_1: storage_1.map(|s| s.0), storage_end_1: storage_1.map(|s| s.1), storage_start_2: storage_2.map(|s| s.0), storage_end_2: storage_2.map(|s| s.1), }) } } #[cfg(test)] mod p2p_test;
fn p2p_get(&self, agent: &KitsuneAgent) -> DatabaseResult<Option<AgentInfoSigned>> { use std::convert::TryFrom; let mut stmt = self .prepare(sql_p2p_agent_store::SELECT) .map_err(|e| rusqlite::Error::ToSqlConversionFailure(e.into()))?; Ok(stmt .query_row(named_params! { ":agent": &agent.0 }, |r| { let r = r.get_ref(0)?; let r = r.as_blob()?; let signed = AgentInfoSigned::try_from(r) .map_err(|e| rusqlite::Error::ToSqlConversionFailure(e.into()))?; Ok(signed) }) .optional()?) }
function_block-full_function
[ { "content": "/// Insert a [`Header`] into the database.\n\npub fn insert_header(txn: &mut Transaction, header: SignedHeaderHashed) -> StateMutationResult<()> {\n\n let (header, signature) = header.into_header_and_signature();\n\n let (header, hash) = header.into_inner();\n\n let header_type = header.h...
Rust
src/resources/automata.rs
Luminoth/remix-exploration
491e4b3bc1447eae35baa9a2599e182b743cdb41
use bevy::prelude::*; use crate::game::dna::*; use crate::game::stats::*; use crate::resources::*; pub trait AutomataStats { fn stats(&self) -> &StatSet; fn modify(&mut self, statid: StatId, amount: isize) -> bool; } macro_rules! impl_modify_stats { () => { fn modify(&mut self, statid: StatId, amount: isize) -> bool { if self.points - amount < 0 { return false; } match statid { StatId::Constitution => { if self.stats.constitution() + amount < 0 { return false; } self.stats .set_constitution(self.stats.constitution() + amount); } StatId::Dexterity => { if self.stats.dexterity() + amount < 0 { return false; } self.stats.set_dexterity(self.stats.dexterity() + amount); } StatId::Strength => { if self.stats.strength() + amount < 0 { return false; } self.stats.set_strength(self.stats.strength() + amount); } StatId::Fortitude => { if self.stats.fortitude() + amount < 0 { return false; } self.stats.set_fortitude(self.stats.fortitude() + amount); } StatId::Aggression => { if self.stats.aggression() + amount < 0 { return false; } self.stats.set_aggression(self.stats.aggression() + amount); } StatId::Intellect => { if self.stats.intellect() + amount < 0 { return false; } self.stats.set_intellect(self.stats.intellect() + amount); } } self.points -= amount; true } }; } #[derive(Debug, Default, Clone, Copy)] pub struct PlayerAutomataStats { points: isize, pub stats: StatSet, } impl AutomataStats for PlayerAutomataStats { fn stats(&self) -> &StatSet { &self.stats } impl_modify_stats!(); } impl PlayerAutomataStats { pub fn new(points: isize) -> Self { Self { points, ..Default::default() } } pub fn points(&self) -> isize { self.points } pub fn value(&self, statid: StatId) -> isize { match statid { StatId::Constitution => self.stats.constitution(), StatId::Dexterity => self.stats.dexterity(), StatId::Strength => self.stats.strength(), StatId::Fortitude => self.stats.fortitude(), StatId::Aggression => self.stats.aggression(), StatId::Intellect => self.stats.intellect(), } } } #[derive(Debug, Default, Clone, Copy)] pub struct AIAutomataStats { points: isize, pub stats: StatSet, } impl AutomataStats for AIAutomataStats { fn stats(&self) -> &StatSet { &self.stats } impl_modify_stats!(); } impl AIAutomataStats { pub fn new(points: isize, random: &mut Random) -> Self { Self { points, stats: StatSet::random(points, random), } } } #[derive(Debug)] pub struct AIAutomataPopulation { mutation_rate: f64, population: Vec<AIAutomataStats>, mating_pool: Vec<Dna>, } impl AIAutomataPopulation { pub fn new(mutation_rate: f64, rounds: usize, points: isize, random: &mut Random) -> Self { let mut population = Vec::with_capacity(rounds); for _ in 0..population.capacity() { population.push(AIAutomataStats::new(points, random)); } Self { mutation_rate, population, mating_pool: vec![], } } pub fn round_stats(&self, round: usize) -> &AIAutomataStats { self.population.get(round).unwrap() } } pub struct AutomataColors { pub cell: Color, pub player_automata: Color, pub ai_automata: Color, }
use bevy::prelude::*; use crate::game::dna::*; use crate::game::stats::*; use crate::resources::*; pub trait AutomataStats { fn stats(&self) -> &StatSet; fn modify(&mut self, statid: StatId, amount: isize) -> bool; } macro_rules! impl_modify_stats { () => { fn modify(&mut self, statid: StatId, amount: isize) -> bool { if self.points - amount < 0 { return false; } match statid { StatId::Constitution => { if self.stats.constitution() + amount < 0 { return false; } self.stats .set_constitution(self.stats.constitution() + amount); } StatId::Dexterity => { if self.stats.dexterity() + amount < 0 { return false; } self.stats.set_dexterity(self.stats.dexterity() + amount); } StatId::Strength => { if self.stats.strength() + amount < 0 { return false; } self.stats.set_strength(self.stats.strength() + amount); } StatId::Fortitude => { if self.stats.fortitude() + amount < 0 { return false; } self.stats.set_fortitude(self.stats.fortitude() + amount); } StatId::Aggression => { if self.stats.aggression() + amount < 0 { return false; } self.stats.set_aggression(self.stats.aggression() + amount); } StatId::Intellect => { if self.stats.intellect() + amount < 0 { return false; } self.stats.set_intellect(self.stats.intellect() + amount); } } self.points -= amount; true } }; } #[derive(Debug, Default, Clone, Copy)] pub struct PlayerAutomataStats { points: isize, pub stats: StatSet, } impl AutomataStats for PlayerAutomataStats { fn stats(&self) -> &StatSet { &self.stats } impl_modify_stats!(); } impl PlayerAutomataStats { pub fn new(points: isize) -> Self { Self { points, ..Default::default() } } pub fn points(&self) -> isize { self.points } pub fn value(&self, statid: StatId) -> isize { match statid { StatId::Constitution => self.stats.constitution(), StatId::Dexterity => self.stats.dexterity(), StatId::Strength => self.stats.strength(), StatId::Fortitude => self.stats.fortitude(), StatId::Aggression => self.stats.aggression(), StatId::Intellect => self.stats.intellect(), } } } #[derive(Debug, Default, Clone, Copy)] pub struct AIAutomataStats { points: isize, pub stats: StatSet, } impl AutomataStats for AIAutomataStats { fn stats(&self) -> &StatSet { &self.stats } impl_modify_stats!(); } impl AIAutomataStats { pub fn new(points: isize, random: &mut Random) -> Self { Self { points, stats: StatSet::random(points, random), } } } #[derive(Debug)] pub struct AIAutomataPopulation { mutation_rate: f64, population: Vec<AIAutomataStats>, mating_pool: Vec<Dna>, } impl AIAutomataPopulation { pub fn new(mutation_rate: f64, rounds: usize, points: isize, random: &mut Random) -> Self { let mut populat
, } } pub fn round_stats(&self, round: usize) -> &AIAutomataStats { self.population.get(round).unwrap() } } pub struct AutomataColors { pub cell: Color, pub player_automata: Color, pub ai_automata: Color, }
ion = Vec::with_capacity(rounds); for _ in 0..population.capacity() { population.push(AIAutomataStats::new(points, random)); } Self { mutation_rate, population, mating_pool: vec![]
function_block-random_span
[ { "content": "/// Stat modified event handler\n\npub fn stat_modified_event_handler(\n\n stats: ResMut<PlayerAutomataStats>,\n\n button_colors: Res<ButtonColors>,\n\n mut events: EventReader<StatModifiedEvent>,\n\n mut text_query: Query<(&mut Text, &StatModifierText), Without<PointsText>>,\n\n mu...
Rust
fusestore/store/src/api/rpc/flight_service_test.rs
tisonkun/datafuse
c0371078750cad9fbf4b3770c1c581dbe10c4b20
use common_arrow::arrow::array::ArrayRef; use common_datablocks::DataBlock; use common_datavalues::DataColumnarValue; use common_flights::GetTableActionResult; use common_flights::StoreClient; use common_planners::ScanPlan; use log::info; use pretty_assertions::assert_eq; use test_env_log::test; #[test(tokio::test)] async fn test_flight_create_database() -> anyhow::Result<()> { use common_planners::CreateDatabasePlan; use common_planners::DatabaseEngineType; let addr = crate::tests::start_store_server().await?; let mut client = StoreClient::try_create(addr.as_str(), "root", "xxx").await?; { let plan = CreateDatabasePlan { if_not_exists: false, db: "db1".to_string(), engine: DatabaseEngineType::Local, options: Default::default(), }; let res = client.create_database(plan.clone()).await; info!("create database res: {:?}", res); let res = res.unwrap(); assert_eq!(0, res.database_id, "first database id is 0"); } { let plan = CreateDatabasePlan { if_not_exists: false, db: "db2".to_string(), engine: DatabaseEngineType::Local, options: Default::default(), }; let res = client.create_database(plan.clone()).await; info!("create database res: {:?}", res); let res = res.unwrap(); assert_eq!(1, res.database_id, "second database id is 1"); } Ok(()) } #[test(tokio::test)] async fn test_flight_create_get_table() -> anyhow::Result<()> { use std::sync::Arc; use common_arrow::arrow::datatypes::DataType; use common_datavalues::DataField; use common_datavalues::DataSchema; use common_flights::StoreClient; use common_planners::CreateDatabasePlan; use common_planners::CreateTablePlan; use common_planners::DatabaseEngineType; use common_planners::TableEngineType; info!("init logging"); let addr = crate::tests::start_store_server().await?; let mut client = StoreClient::try_create(addr.as_str(), "root", "xxx").await?; { let plan = CreateDatabasePlan { if_not_exists: false, db: "db1".to_string(), engine: DatabaseEngineType::Local, options: Default::default(), }; let res = client.create_database(plan.clone()).await; info!("create database res: {:?}", res); let res = res.unwrap(); assert_eq!(0, res.database_id, "first database id is 0"); } { let schema = Arc::new(DataSchema::new(vec![DataField::new( "number", DataType::UInt64, false, )])); let mut plan = CreateTablePlan { if_not_exists: false, db: "db1".to_string(), table: "tb2".to_string(), schema: schema.clone(), options: maplit::hashmap! {"opt‐1".into() => "val-1".into()}, engine: TableEngineType::JsonEachRaw, }; { let res = client.create_table(plan.clone()).await.unwrap(); assert_eq!(1, res.table_id, "table id is 1"); let got = client.get_table("db1".into(), "tb2".into()).await.unwrap(); let want = GetTableActionResult { table_id: 1, db: "db1".into(), name: "tb2".into(), schema: schema.clone(), }; assert_eq!(want, got, "get created table"); } { plan.if_not_exists = true; let res = client.create_table(plan.clone()).await.unwrap(); assert_eq!(1, res.table_id, "new table id"); let got = client.get_table("db1".into(), "tb2".into()).await.unwrap(); let want = GetTableActionResult { table_id: 1, db: "db1".into(), name: "tb2".into(), schema: schema.clone(), }; assert_eq!(want, got, "get created table"); } { plan.if_not_exists = false; let res = client.create_table(plan.clone()).await; info!("create table res: {:?}", res); let status = res.err().unwrap(); assert_eq!( "status: Some entity that we attempted to create already exists: table exists", status.to_string() ); let got = client.get_table("db1".into(), "tb2".into()).await.unwrap(); let want = GetTableActionResult { table_id: 1, db: "db1".into(), name: "tb2".into(), schema: schema.clone(), }; assert_eq!(want, got, "get old table"); } } Ok(()) } #[test(tokio::test)] async fn test_do_append() -> anyhow::Result<()> { use std::sync::Arc; use common_arrow::arrow::datatypes::DataType; use common_datavalues::DataField; use common_datavalues::DataSchema; use common_datavalues::Int64Array; use common_datavalues::StringArray; use common_flights::StoreClient; use common_planners::CreateDatabasePlan; use common_planners::CreateTablePlan; use common_planners::DatabaseEngineType; use common_planners::TableEngineType; let addr = crate::tests::start_store_server().await?; let schema = Arc::new(DataSchema::new(vec![ DataField::new("col_i", DataType::Int64, false), DataField::new("col_s", DataType::Utf8, false), ])); let db_name = "test_db"; let tbl_name = "test_tbl"; let col0: ArrayRef = Arc::new(Int64Array::from(vec![0, 1, 2])); let col1: ArrayRef = Arc::new(StringArray::from(vec!["str1", "str2", "str3"])); let expected_rows = col0.data().len() * 2; let expected_cols = 2; let block = DataBlock::create_by_array(schema.clone(), vec![col0, col1]); let batches = vec![block.clone(), block]; let num_batch = batches.len(); let stream = futures::stream::iter(batches); let mut client = StoreClient::try_create(addr.as_str(), "root", "xxx").await?; { let plan = CreateDatabasePlan { if_not_exists: false, db: db_name.to_string(), engine: DatabaseEngineType::Local, options: Default::default(), }; client.create_database(plan.clone()).await?; let plan = CreateTablePlan { if_not_exists: false, db: db_name.to_string(), table: tbl_name.to_string(), schema: schema.clone(), options: maplit::hashmap! {"opt‐1".into() => "val-1".into()}, engine: TableEngineType::Parquet, }; client.create_table(plan.clone()).await?; } let res = client .append_data( db_name.to_string(), tbl_name.to_string(), schema, Box::pin(stream), ) .await?; log::info!("append res is {:?}", res); let summary = res.summary; assert_eq!(summary.rows, expected_rows); assert_eq!(res.parts.len(), num_batch); res.parts.iter().for_each(|p| { assert_eq!(p.rows, expected_rows / num_batch); assert_eq!(p.cols, expected_cols); }); Ok(()) } #[test(tokio::test)] async fn test_scan_partition() -> anyhow::Result<()> { use std::sync::Arc; use common_arrow::arrow::datatypes::DataType; use common_datavalues::DataField; use common_datavalues::DataSchema; use common_datavalues::Int64Array; use common_datavalues::StringArray; use common_flights::StoreClient; use common_planners::CreateDatabasePlan; use common_planners::CreateTablePlan; use common_planners::DatabaseEngineType; use common_planners::TableEngineType; let addr = crate::tests::start_store_server().await?; let schema = Arc::new(DataSchema::new(vec![ DataField::new("col_i", DataType::Int64, false), DataField::new("col_s", DataType::Utf8, false), ])); let db_name = "test_db"; let tbl_name = "test_tbl"; let col0: ArrayRef = Arc::new(Int64Array::from(vec![0, 1, 2])); let col1: ArrayRef = Arc::new(StringArray::from(vec!["str1", "str2", "str3"])); let expected_rows = col0.data().len() * 2; let expected_cols = 2; let block = DataBlock::create(schema.clone(), vec![ DataColumnarValue::Array(col0), DataColumnarValue::Array(col1), ]); let batches = vec![block.clone(), block]; let num_batch = batches.len(); let stream = futures::stream::iter(batches); let mut client = StoreClient::try_create(addr.as_str(), "root", "xxx").await?; { let plan = CreateDatabasePlan { if_not_exists: false, db: db_name.to_string(), engine: DatabaseEngineType::Local, options: Default::default(), }; client.create_database(plan.clone()).await?; let plan = CreateTablePlan { if_not_exists: false, db: db_name.to_string(), table: tbl_name.to_string(), schema: schema.clone(), options: maplit::hashmap! {"opt‐1".into() => "val-1".into()}, engine: TableEngineType::Parquet, }; client.create_table(plan.clone()).await?; } let res = client .append_data( db_name.to_string(), tbl_name.to_string(), schema, Box::pin(stream), ) .await?; log::info!("append res is {:?}", res); let summary = res.summary; assert_eq!(summary.rows, expected_rows); assert_eq!(res.parts.len(), num_batch); res.parts.iter().for_each(|p| { assert_eq!(p.rows, expected_rows / num_batch); assert_eq!(p.cols, expected_cols); }); let plan = ScanPlan { schema_name: tbl_name.to_string(), ..ScanPlan::empty() }; let res = client .scan_partition(db_name.to_string(), tbl_name.to_string(), &plan) .await; println!("scan res is {:?}", res); Ok(()) }
use common_arrow::arrow::array::ArrayRef; use common_datablocks::DataBlock; use common_datavalues::DataColumnarValue; use common_flights::GetTableActionResult; use common_flights::StoreClient; use common_planners::ScanPlan; use log::info; use pretty_assertions::assert_eq; use test_env_log::test; #[test(tokio::test)] async fn test_flight_create_database() -> anyhow::Result<()> { use common_planners::CreateDatabasePlan; use common_planners::DatabaseEngineType; let addr = crate::tests::start_store_server().await?; let mut client = StoreClient::try_create(addr.as_str(), "root", "xxx").await?; { let plan = CreateDatabasePlan { if_not_exists: false, db: "db1".to_string(), engine: DatabaseEngineType::Local, options: Default::default(), }; let res = client.create_database(plan.clone()).await; info!("create database res: {:?}", res); let res = res.unwrap(); assert_eq!(0, res.database_id, "first database id is 0"); } { let plan = CreateDatabasePlan { if_not_exists: false, db: "db2".to_string(), engine: DatabaseEngineType::Local, options: Default::default(), }; let res = client.create_database(plan.clone()).await; info!("create database res: {:?}", res); let res = res.unwrap(); assert_eq!(1, res.database_id, "second database id is 1"); } Ok(()) } #[test(tokio::test)] async fn test_flight_create_get_table() -> anyhow::Result<()> { use std::sync::Arc; use common_arrow::arrow::datatypes::DataType; use common_datavalues::DataField; use common_datavalues::DataSchema; use common_flights::StoreClient; use common_planners::CreateDatabasePlan; use common_planners::CreateTablePlan; use common_planners::DatabaseEngineType; use common_planners::TableEngineType; info!("init logging"); let addr = crate::tests::start_store_server().await?; let mut client = StoreClient::try_create(addr.as_str(), "root", "xxx").await?; { let plan = CreateDatabasePlan { if_not_exists: false, db: "db1".to_string(), engine: DatabaseEngineType::Local, options: Default::default(), }; let res = client.create_database(plan.clone()).await; info!("create database res: {:?}", res); let res = res.unwrap(); assert_eq!(0, res.database_id, "first database id is 0"); } { let schema = Arc::new(DataSchema::new(vec![DataField::new( "number", DataType::UInt64, false, )])); let mut plan = CreateTablePlan { if_not_exists: false, db: "db1".to_string(), table: "tb2".to_string(), schema: schema.clone(), options: maplit::hashmap! {"opt‐1".into() => "val-1".into()}, engine: TableEngineType::JsonEachRaw, }; { let res = client.create_table(plan.clone()).await.unwrap(); assert_eq!(1, res.table_id, "table id is 1"); let got = client.get_table("db1".into(), "tb2".into()).await.unwrap(); let want = GetTableActionResult { table_id: 1, db: "db1".into(), name: "tb2".into(), schema: schema.clone(), }; assert_eq!(want, got, "get created table"); } { plan.if_not_exists = true; let res = client.create_table(plan.clone()).await.unwrap(); assert_eq!(1, res.table_id, "new table id"); let got = client.get_table("db1".into(), "tb2".into()).await.unwrap(); let want = GetTableActionResult { table_id: 1, db: "db1".into(), name: "tb2".into(), schema: schema.clone(), }; assert_eq!(want, got, "get created table"); } { plan.if_not_exists = false; let res = client.create_table(plan.clone()).await; info!("create table res: {:?}", res); let status = res.err().unwrap(); assert_eq!( "status: Some entity that we attempted to create already exists: table exists", status.to_string() ); let got = client.get_table("db1".into(), "tb2".into()).await.unwrap(); let want = GetTableActionResult { table_id: 1, db: "db1".into(), name: "tb2".into(), schema: schema.clone(), }; assert_eq!(want, got, "get old table"); } } Ok(()) } #[test(tokio::test)] async fn test_do_append() -> anyhow::Result<()> { use std::sync::Arc; use common_arrow::arrow::datatypes::DataType; use common_datavalues::DataField; use common_datavalues::DataSchema; use common_datavalues::Int64Array; use common_datavalues::StringArray; use common_flights::StoreClient; use common_planners::CreateDatabasePlan; use common_planners::CreateTablePlan; use common_planners::DatabaseEngineType; use common_planners::TableEngineType; let addr = crate::tests::start_store_server().await?; let schema = Arc::new(DataSchema::new(vec![ DataField::new("col_i", DataType::Int64, false), DataField::new("col_s", DataType::Utf8, false), ]));
#[test(tokio::test)] async fn test_scan_partition() -> anyhow::Result<()> { use std::sync::Arc; use common_arrow::arrow::datatypes::DataType; use common_datavalues::DataField; use common_datavalues::DataSchema; use common_datavalues::Int64Array; use common_datavalues::StringArray; use common_flights::StoreClient; use common_planners::CreateDatabasePlan; use common_planners::CreateTablePlan; use common_planners::DatabaseEngineType; use common_planners::TableEngineType; let addr = crate::tests::start_store_server().await?; let schema = Arc::new(DataSchema::new(vec![ DataField::new("col_i", DataType::Int64, false), DataField::new("col_s", DataType::Utf8, false), ])); let db_name = "test_db"; let tbl_name = "test_tbl"; let col0: ArrayRef = Arc::new(Int64Array::from(vec![0, 1, 2])); let col1: ArrayRef = Arc::new(StringArray::from(vec!["str1", "str2", "str3"])); let expected_rows = col0.data().len() * 2; let expected_cols = 2; let block = DataBlock::create(schema.clone(), vec![ DataColumnarValue::Array(col0), DataColumnarValue::Array(col1), ]); let batches = vec![block.clone(), block]; let num_batch = batches.len(); let stream = futures::stream::iter(batches); let mut client = StoreClient::try_create(addr.as_str(), "root", "xxx").await?; { let plan = CreateDatabasePlan { if_not_exists: false, db: db_name.to_string(), engine: DatabaseEngineType::Local, options: Default::default(), }; client.create_database(plan.clone()).await?; let plan = CreateTablePlan { if_not_exists: false, db: db_name.to_string(), table: tbl_name.to_string(), schema: schema.clone(), options: maplit::hashmap! {"opt‐1".into() => "val-1".into()}, engine: TableEngineType::Parquet, }; client.create_table(plan.clone()).await?; } let res = client .append_data( db_name.to_string(), tbl_name.to_string(), schema, Box::pin(stream), ) .await?; log::info!("append res is {:?}", res); let summary = res.summary; assert_eq!(summary.rows, expected_rows); assert_eq!(res.parts.len(), num_batch); res.parts.iter().for_each(|p| { assert_eq!(p.rows, expected_rows / num_batch); assert_eq!(p.cols, expected_cols); }); let plan = ScanPlan { schema_name: tbl_name.to_string(), ..ScanPlan::empty() }; let res = client .scan_partition(db_name.to_string(), tbl_name.to_string(), &plan) .await; println!("scan res is {:?}", res); Ok(()) }
let db_name = "test_db"; let tbl_name = "test_tbl"; let col0: ArrayRef = Arc::new(Int64Array::from(vec![0, 1, 2])); let col1: ArrayRef = Arc::new(StringArray::from(vec!["str1", "str2", "str3"])); let expected_rows = col0.data().len() * 2; let expected_cols = 2; let block = DataBlock::create_by_array(schema.clone(), vec![col0, col1]); let batches = vec![block.clone(), block]; let num_batch = batches.len(); let stream = futures::stream::iter(batches); let mut client = StoreClient::try_create(addr.as_str(), "root", "xxx").await?; { let plan = CreateDatabasePlan { if_not_exists: false, db: db_name.to_string(), engine: DatabaseEngineType::Local, options: Default::default(), }; client.create_database(plan.clone()).await?; let plan = CreateTablePlan { if_not_exists: false, db: db_name.to_string(), table: tbl_name.to_string(), schema: schema.clone(), options: maplit::hashmap! {"opt‐1".into() => "val-1".into()}, engine: TableEngineType::Parquet, }; client.create_table(plan.clone()).await?; } let res = client .append_data( db_name.to_string(), tbl_name.to_string(), schema, Box::pin(stream), ) .await?; log::info!("append res is {:?}", res); let summary = res.summary; assert_eq!(summary.rows, expected_rows); assert_eq!(res.parts.len(), num_batch); res.parts.iter().for_each(|p| { assert_eq!(p.rows, expected_rows / num_batch); assert_eq!(p.cols, expected_cols); }); Ok(()) }
function_block-function_prefix_line
[ { "content": "#[test]\n\nfn test_mem_engine_create_get_table() -> anyhow::Result<()> {\n\n // TODO check generated ver\n\n let eng = MemEngine::create();\n\n\n\n let mut eng = eng.lock().unwrap();\n\n\n\n let cmdfoo = CmdCreateDatabase {\n\n db_name: \"foo\".into(),\n\n db: Some(Db {\n...
Rust
vrp-scientific/src/solomon/reader.rs
andrewgy8/vrp
c94574ad555c6ca06480f678f52850caf9aa71cb
#[cfg(test)] #[path = "../../tests/unit/solomon/reader_test.rs"] mod reader_test; use crate::common::*; use crate::utils::MatrixFactory; use std::io::{BufReader, Read}; use std::sync::Arc; use vrp_core::construction::constraints::*; use vrp_core::models::common::{TimeSpan, TimeWindow}; use vrp_core::models::problem::*; use vrp_core::models::Problem; use vrp_core::utils::TryCollect; pub fn read_solomon_format<R: Read>(reader: BufReader<R>) -> Result<Problem, String> { SolomonReader { buffer: String::new(), reader, matrix: MatrixFactory::default() }.read_problem() } pub trait SolomonProblem { fn read_solomon(self) -> Result<Problem, String>; } impl<R: Read> SolomonProblem for BufReader<R> { fn read_solomon(self) -> Result<Problem, String> { read_solomon_format(self) } } impl SolomonProblem for String { fn read_solomon(self) -> Result<Problem, String> { read_solomon_format(BufReader::new(self.as_bytes())) } } struct VehicleLine { number: usize, capacity: usize, } struct JobLine { id: usize, location: (i32, i32), demand: usize, tw: TimeWindow, service: usize, } struct SolomonReader<R: Read> { buffer: String, reader: BufReader<R>, matrix: MatrixFactory, } impl<R: Read> TextReader for SolomonReader<R> { fn read_fleet(&mut self) -> Result<Fleet, String> { self.skip_lines(4)?; let vehicle = self.read_vehicle()?; self.skip_lines(4)?; let depot = self.read_customer()?; Ok(create_fleet_with_distance_costs( vehicle.number, vehicle.capacity, self.matrix.collect(depot.location), depot.tw.clone(), )) } fn read_jobs(&mut self) -> Result<Vec<Job>, String> { let mut jobs: Vec<Job> = Default::default(); loop { match self.read_customer() { Ok(customer) => { let mut dimens = create_dimens_with_id("", customer.id); dimens.set_demand(Demand::<i32> { pickup: (0, 0), delivery: (customer.demand as i32, 0) }); jobs.push(Job::Single(Arc::new(Single { places: vec![Place { location: Some(self.matrix.collect(customer.location)), duration: customer.service as f64, times: vec![TimeSpan::Window(customer.tw.clone())], }], dimens, }))); } Err(error) => { if self.buffer.is_empty() { break; } else { return Err(error); } } } } Ok(jobs) } fn create_transport(&self) -> Result<Arc<dyn TransportCost + Send + Sync>, String> { self.matrix.create_transport() } } impl<R: Read> SolomonReader<R> { fn read_vehicle(&mut self) -> Result<VehicleLine, String> { read_line(&mut self.reader, &mut self.buffer)?; let (number, capacity) = self .buffer .split_whitespace() .map(|line| line.parse::<usize>().unwrap()) .try_collect() .ok_or_else(|| "Cannot parse vehicle number or/and capacity".to_string())?; Ok(VehicleLine { number, capacity }) } fn read_customer(&mut self) -> Result<JobLine, String> { read_line(&mut self.reader, &mut self.buffer)?; let (id, x, y, demand, start, end, service) = self .buffer .split_whitespace() .map(|line| line.parse::<i32>().unwrap()) .try_collect() .ok_or_else(|| "Cannot read customer line".to_string())?; Ok(JobLine { id: id as usize, location: (x, y), demand: demand as usize, tw: TimeWindow::new(start as f64, end as f64), service: service as usize, }) } fn skip_lines(&mut self, count: usize) -> Result<(), String> { for _ in 0..count { read_line(&mut self.reader, &mut self.buffer).map_err(|_| "Cannot skip lines")?; } Ok(()) } }
#[cfg(test)] #[path = "../../tests/unit/solomon/reader_test.rs"] mod reader_test; use crate::common::*; use crate::utils::MatrixFactory; use std::io::{BufReader, Read}; use std::sync::Arc; use vrp_core::construction::constraints::*; use vrp_core::models::common::{TimeSpan, TimeWindow}; use vrp_core::models::problem::*; use vrp_core::models::Problem; use vrp_core::utils::TryCollect; pub fn read_solomon_format<R: Read>(reader: BufReader<R>) -> Result<Problem, String> { SolomonReader { buffer: String::new(), reader, matrix: MatrixFactory::default() }.read_problem() } pub trait SolomonProblem { fn read_solomon(self) -> Result<Problem, String>; } impl<R: Read> SolomonProblem for BufReader<R> { fn read_solomon(self) -> Result<Problem, String> { read_solomon_format(self) } } impl SolomonProblem for String { fn read_solomon(self) -> Result<Problem, String> { read_solomon_format(BufReader::new(self.as_bytes())) } } struct VehicleLine { number: usize, capacity: usize, } struct JobLine { id: usize, location: (i32, i32), demand: usize, tw: TimeWindow, service: usize, } struct SolomonReader<R: Read> { buffer: String, reader: BufReader<R>, matrix: MatrixFactory, } impl<R: Read> TextReader for SolomonReader<R> { fn read_fleet(&mut self) -> Result<Fleet, String> { self.skip_lines(4)?; let vehicle = self.read_vehicle()?; self.skip_lines(4)?; let depot = self.read_customer()?; Ok(create_fleet_with_distance_costs( vehicle.number, vehicle.capacity, self.matrix.collect(depot.location), depot.tw.clone(), )) } fn read_jobs(&mut self) -> Result<Vec<Job>, String> { let mut jobs: Vec<Job> = Default::default(); loop { match self.read_customer() { Ok(customer) => { let mut dimens = create_dimens_with_id("", customer.id); dimens.set_demand(Demand::<i32> { pickup: (0, 0), delivery: (customer.demand as i32, 0) }); jobs.push(Job::Single(Arc::new(Single { places: vec![Place { location: Some(self.matrix.collect(customer.location)), duration: customer.service as f64, times: vec![TimeSpan::Window(customer.tw.clone())], }], dimens, }))); } Err(error) => { if self.buffer.is_empty() { break; } else { return Err(error); } } } } Ok(jobs) } fn create_transport(&self) -> Result<Arc<dyn TransportCost + Send + Sync>, String> { self.matrix.create_transport() } } impl<R: Read> SolomonReader<R> { fn read_vehicle(&mut self) -> Result<VehicleLine, String> { read_line(&mut self.reader, &mut self.buffer)?; let (number, capacity) = self .buffer .split_whitespace() .map(|line| line.parse::<usize>().unwrap()) .try_collect() .ok_or_else(|| "Cannot parse vehicle number or/and capacity".to_string())?; Ok(VehicleLine { number, capacity }) } fn read_customer(&mut self) -> Result<JobLine, String> { read_line(&mut self.reader, &mut self.buffer)?; let (id, x, y, demand, start, end, service) = self .buffer .split_whitespace() .map(|line| line.parse::<i32>().unwrap()) .try_collect() .ok_or_else(|| "Cannot read customer line".to_string())?; Ok(JobLine { id: id as usize, location: (x, y), demand: demand as usize, tw: TimeWindow::new(start as f64, end as f64), service: service as usize, }) } fn skip_lines(&mut self, count: usize) -> Result<(), String> {
}
for _ in 0..count { read_line(&mut self.reader, &mut self.buffer).map_err(|_| "Cannot skip lines")?; } Ok(()) }
function_block-function_prefix_line
[ { "content": "pub fn read_line<R: Read>(reader: &mut BufReader<R>, mut buffer: &mut String) -> Result<usize, String> {\n\n buffer.clear();\n\n reader.read_line(&mut buffer).map_err(|err| err.to_string())\n\n}\n", "file_path": "vrp-scientific/src/common/text_reader.rs", "rank": 0, "score": 6467...
Rust
src/boxed/api.rs
sharksforarms/bitvec
293e670d5b6fe89da595edccb3f93cafb75d8835
use crate::{ boxed::BitBox, order::BitOrder, pointer::BitPtr, slice::BitSlice, store::BitStore, vec::BitVec, }; use core::{ marker::Unpin, mem::ManuallyDrop, pin::Pin, }; use wyz::pipe::Pipe; impl<O, T> BitBox<O, T> where O: BitOrder, T: BitStore, { #[cfg_attr(not(tarpaulin), inline(always))] #[deprecated(since = "0.18.0", note = "Prefer `::from_bitslice`")] pub fn new(x: &BitSlice<O, T>) -> Self { Self::from_bitslice(x) } #[inline] pub fn pin(x: &BitSlice<O, T>) -> Pin<Self> where O: Unpin, T: Unpin, { x.pipe(Self::from_bitslice).pipe(Pin::new) } #[inline] pub unsafe fn from_raw(raw: *mut BitSlice<O, T>) -> Self { raw.pipe(BitPtr::from_bitslice_ptr_mut) .to_nonnull() .pipe(|pointer| Self { pointer }) } #[cfg_attr(not(tarpaulin), inline(always))] pub fn into_raw(b: Self) -> *mut BitSlice<O, T> { Self::leak(b) } #[inline] pub fn leak<'a>(b: Self) -> &'a mut BitSlice<O, T> where T: 'a { b.pipe(ManuallyDrop::new).bitptr().to_bitslice_mut() } #[inline] pub fn into_bitvec(self) -> BitVec<O, T> { let bitptr = self.bitptr(); let raw = self .pipe(ManuallyDrop::new) .with_box(|b| unsafe { ManuallyDrop::take(b) }) .into_vec() .pipe(ManuallyDrop::new); /* The distribution claims that `[T]::into_vec(Box<[T]>) -> Vec<T>` does not alter the address of the heap allocation, and only modifies the buffer handle. Since the address does not change, the `BitPtr` does not need to be updated; the only change is that buffer capacity is now carried locally, rather than frozen in the allocator’s state. Inspection of the distribution’s implementation shows that the conversion from `(buf, len)` to `(buf, cap, len)` is done by using the slice length as the buffer capacity. However, this is *not* a behavior guaranteed by the distribution, and so the pipeline above must remain in place in the event that this behavior ever changes. It should compile away to nothing, as it is almost entirely typesystem manipulation. */ unsafe { BitVec::from_raw_parts(bitptr.to_bitslice_ptr_mut(), raw.capacity()) } } }
use crate::{ boxed::BitBox, order::BitOrder, pointer::BitPtr, slice::BitSlice, store::BitStore, vec::BitVec, }; use core::{ marker::Unpin, mem::ManuallyDrop, pin::Pin, }; use wyz::pipe::Pipe; impl<O, T> BitBox<O, T> where O: BitOrder, T: BitStore, { #[cfg_attr(not(tarpaulin), inline(always))] #[deprecated(since = "0.18.0", note = "Prefer `::from_bitslice`")] pub fn new(x: &BitSlice<O, T>) -> Self { Self::from_bitslice(x) } #[inline] pub fn pin(x: &BitSlice<O, T>) -> Pin<Self> where O: Unpin, T: Unpin, { x.pipe(Self::from_bitslice).pipe(Pin::new) } #[inline] pub unsafe fn from_raw(raw: *mut BitSlice<O, T>) -> Self { raw.pipe(BitPtr::from_bitslice_ptr_mut) .to_nonnull() .pipe(|pointer| Self { pointer }) } #[cfg_attr(not(tarpaulin), inline(always))] pub fn into_raw(b: Self) -> *mut BitSlice<O, T> { Self::leak(b) } #[inline] pub fn leak<'a>(b: Self) -> &'a mut BitSlice<O, T> where T: 'a { b.pipe(ManuallyDrop::new).bitptr().to_bitslice_mut() } #[inline] pub fn into_bitvec(self) -> BitVec<O, T> { let bitptr = self.bitptr();
/* The distribution claims that `[T]::into_vec(Box<[T]>) -> Vec<T>` does not alter the address of the heap allocation, and only modifies the buffer handle. Since the address does not change, the `BitPtr` does not need to be updated; the only change is that buffer capacity is now carried locally, rather than frozen in the allocator’s state. Inspection of the distribution’s implementation shows that the conversion from `(buf, len)` to `(buf, cap, len)` is done by using the slice length as the buffer capacity. However, this is *not* a behavior guaranteed by the distribution, and so the pipeline above must remain in place in the event that this behavior ever changes. It should compile away to nothing, as it is almost entirely typesystem manipulation. */ unsafe { BitVec::from_raw_parts(bitptr.to_bitslice_ptr_mut(), raw.capacity()) } } }
let raw = self .pipe(ManuallyDrop::new) .with_box(|b| unsafe { ManuallyDrop::take(b) }) .into_vec() .pipe(ManuallyDrop::new);
assignment_statement
[ { "content": "#[inline(always)]\n\n#[cfg(not(tarpaulin_include))]\n\npub fn mem_mut<T>(x: &mut T) -> &mut T::Mem\n\nwhere T: BitStore {\n\n\tunsafe { &mut *(x as *mut _ as *mut _) }\n\n}\n\n\n\n/// Removes the `::Alias` marker from a register value’s type.\n", "file_path": "src/devel.rs", "rank": 0, ...
Rust
src/display.rs
adi-g15/Ludo-The_Game-rs
d4c9f776982b0a2dd0f1ab53b192ccd2d41cd0f8
use crossterm::{ self, cursor, style::{self, Color, Stylize}, terminal, ExecutableCommand, QueueableCommand, }; use std::{io::{stdout, Write, stdin}, thread, time::Duration}; mod parts; pub struct Display { player_name: String, } impl Display { pub fn new() -> Self { let display = Display { player_name: String::new(), }; stdout().execute(terminal::SetTitle("Ludo-The_Game")).unwrap(); Display::splash_screen("Namaste from Ludo-The_Game 🙏", None); thread::sleep(Duration::from_millis(1200)); display } pub fn get_player_names(&self) -> [String; 4] { let hor_char = "─"; let vert_char = "│"; let message = "Enter names of the Players (Leave empty if not playing)"; let mut names = [String::new(), String::new(), String::new(), String::new()]; let colors = ["🔴","🟢","🟡","🔵"]; let (columns, rows) = terminal::size().unwrap(); let original_position = cursor::position().unwrap(); let mut stdout = stdout(); self.header(); stdout .queue(cursor::MoveTo(1,3)).unwrap() .queue(style::Print(format!("┌{}┐", hor_char.repeat(columns as usize -3)))).unwrap(); for i in 4..rows { stdout .queue(cursor::MoveTo(1,i)).unwrap() .queue(style::Print(vert_char)).unwrap() .queue(cursor::MoveToColumn(columns)).unwrap() .queue(style::Print(vert_char)).unwrap() .queue(cursor::MoveToNextLine(1)).unwrap() ; } stdout .queue(cursor::MoveTo(1,rows-1)).unwrap() .queue(style::Print(format!("└{}┘", hor_char.repeat(columns as usize -3)))).unwrap() ; stdout .queue(cursor::MoveTo(((columns as usize - message.len()) as u16)/2, (rows - 5)/2)).unwrap(); stdout.queue(style::PrintStyledContent(message.bold())).unwrap(); for (i,name) in names.iter_mut().enumerate() { stdout .queue(cursor::MoveToNextLine(1)).unwrap() /* reason for -4-1 = 2 emoji ka, 1 space, 1 ':', baaki 1 aise hi*/ .queue(cursor::MoveToColumn((columns/2) - "Player".len() as u16 -4 -1 )).unwrap() .queue(style::Print(format!("{} Player{} : ", colors[i], i+1))).unwrap() ; stdout.flush().unwrap(); if stdin().read_line(name).is_err() { panic!("Failed to read name"); } name.clone_from(&name.trim().to_string()); stdout.queue(cursor::MoveToPreviousLine(1)).unwrap(); } stdout.queue(cursor::MoveTo(original_position.0, original_position.1)).unwrap(); stdout.flush().unwrap(); names } pub fn ensure_terminal_size() { let (mut curr_cols, mut curr_rows) = terminal::size().unwrap(); while curr_cols < 100 || curr_rows < 44 { Display::splash_screen("Please Zoom Out or Stretch to make the terminal window larger.", Some(Color::Red)); thread::sleep(Duration::from_millis(50)); (curr_cols, curr_rows) = terminal::size().unwrap(); } } pub fn set_player(&mut self, name: &str) { self.player_name = name.to_string(); } pub fn update_display(&self, board_contents: Vec<((u8,u8), String)> ) { Display::ensure_terminal_size(); let player_name = &self.player_name; let mut stdout = stdout(); stdout .queue(terminal::Clear(terminal::ClearType::All)) .unwrap() .queue(cursor::Hide) .unwrap(); let (columns, _rows) = match terminal::size() { Ok(size) => (size.0 as usize, size.1 as usize), Err(e) => panic!("{:?}", e), }; let h_scale: u16 = 3; let v_scale: u16 = 1; self.header(); let ((board_start_col, board_start_row),(board_end_col, board_end_row)) = self.board_design(h_scale, v_scale); self.update_according_to_ludo_board(board_start_col, board_start_row, h_scale, v_scale, board_contents); stdout .queue(cursor::MoveTo(board_end_col, board_end_row)).unwrap() ; if !player_name.is_empty() { stdout .queue(cursor::MoveToNextLine(1)) .unwrap() .queue(style::Print(format!( "{}{}", " ".repeat((columns - player_name.len()) / 2), player_name ))) .unwrap() .queue(cursor::MoveToNextLine(1)) .unwrap() .queue(style::Print("±".repeat(columns))) .unwrap(); } stdout.queue(cursor::MoveToNextLine(1)).unwrap(); if stdout.flush().is_err() { terminal::disable_raw_mode().unwrap(); panic!("Couldn't print board"); } stdout.execute(cursor::Show).unwrap(); } pub fn end_display(&mut self) { if terminal::disable_raw_mode().is_err() { /* Ignore */ }; } }
use crossterm::{ self, cursor, style::{self, Color, Stylize}, terminal, ExecutableCommand, QueueableCommand, }; use std::{io::{stdout, Write, stdin}, thread, time::Duration}; mod parts; pub struct Display { player_name: String, } impl Display { pub fn new() -> Self { let display = Display { player_name: String::new(), }; stdout().execute(terminal::SetTitle("Ludo-The_Game")).unwrap(); Display::splash_screen("Namaste from Ludo-The_Game 🙏", None); thread::sleep(Duration::from_millis(1200)); display } pub fn get_player_names(&self) -> [String; 4] { let hor_char = "─"; let vert_char = "│"; let message = "Enter names of the Players (Leave empty if not playing)"; let mut names = [String::new(), String::new(), String::new(), String::new()]; let colors = ["🔴","🟢","🟡","🔵"]; let (columns, rows) = terminal::size().unwrap(); let original_position = cursor::position().unwrap(); let mut stdout = stdout(); self.header(); stdout .queue(cursor::MoveTo(1,3)).unwrap() .queue(style::Print(format!("┌{}┐", hor_char.repeat(columns as usize -3)))).unwrap(); for i in 4..rows { stdout .queue(cursor::MoveTo(1,i)).unwrap() .queue(style::Print(vert_char)).unwrap() .queue(cursor::MoveToColumn(columns)).unwrap() .queue(style::Print(vert_char)).unwrap() .queue(cursor::MoveToNextLine(1)).unwrap() ; } stdout .queue(cursor::MoveTo(1,rows-1)).unwrap() .queue(style::Print(format!("└{}┘", hor_char.repeat(columns as usize -3)))).unwrap() ; stdout .queue(cursor::MoveTo(((columns as usize - message.len()) as u16)/2, (rows - 5)/2)).unwrap(); stdout.queue(style::PrintStyledContent(message.bold())).unwrap(); for (i,name) in names.iter_mut().enumerate() { stdout .queue(cursor::MoveToNextLine(1)).unwrap() /* reason for -4-1 = 2 emoji ka, 1 space, 1 ':', baaki 1 aise hi*/ .queue(cursor::MoveToColumn((columns/2) - "Player".len() as u16 -4 -1 )).unwrap() .queue(style::Print(format!("{} Player{} : ", colors[i], i+1))).unwrap() ; stdout.flush().unwrap(); if stdin().read_line(name).is_err() { panic!("Failed to read name"); } name.clone_from(&name.trim().to_string()); stdout.queue(cursor::MoveToPreviousLine(1)).unwrap(); } stdout.queue(cursor::MoveTo(original_position.0, original_position.1)).unwrap(); stdout.flush().unwrap(); names } pub fn ensure_terminal_size() { let (mut curr_cols, mut curr_rows) = terminal::size().unwrap(); while curr_cols < 100 || curr_rows < 44 { Display::splash_screen("Please Zoom Out or Stretch to make the terminal window larger.", Some(Color::Red)); thread::sleep(Duration::from_millis(50)); (curr_cols, curr_rows) = terminal::size().unwrap(); } } pub fn set_player(&mut self, name: &str) { self.player_name = name.to_string(); } pub fn update_display(&self, board_contents: Vec<((u8,u8), String)> ) { Display::ensure_terminal_size(); let player_name = &self.player_name; let mut stdout = stdout(); stdout .queue(terminal::Clear(terminal::ClearType:
ap() .queue(style::Print("±".repeat(columns))) .unwrap(); } stdout.queue(cursor::MoveToNextLine(1)).unwrap(); if stdout.flush().is_err() { terminal::disable_raw_mode().unwrap(); panic!("Couldn't print board"); } stdout.execute(cursor::Show).unwrap(); } pub fn end_display(&mut self) { if terminal::disable_raw_mode().is_err() { /* Ignore */ }; } }
:All)) .unwrap() .queue(cursor::Hide) .unwrap(); let (columns, _rows) = match terminal::size() { Ok(size) => (size.0 as usize, size.1 as usize), Err(e) => panic!("{:?}", e), }; let h_scale: u16 = 3; let v_scale: u16 = 1; self.header(); let ((board_start_col, board_start_row),(board_end_col, board_end_row)) = self.board_design(h_scale, v_scale); self.update_according_to_ludo_board(board_start_col, board_start_row, h_scale, v_scale, board_contents); stdout .queue(cursor::MoveTo(board_end_col, board_end_row)).unwrap() ; if !player_name.is_empty() { stdout .queue(cursor::MoveToNextLine(1)) .unwrap() .queue(style::Print(format!( "{}{}", " ".repeat((columns - player_name.len()) / 2), player_name ))) .unwrap() .queue(cursor::MoveToNextLine(1)) .unwr
random
[ { "content": "pub fn roll() -> u8 {\n\n rand::thread_rng().gen_range(1..7)\n\n}\n", "file_path": "src/engine/dice.rs", "rank": 0, "score": 47329.51763547484 }, { "content": "use std::io::{stdout, Write};\n\n\n\nuse crate::display::Display;\n\nuse crossterm::{\n\n cursor,\n\n style::...
Rust
pallets/kitties/src/lib.rs
pillarBoy/advance-lesson-2
f5dd1e736ec1c2e073c7e3093e252f004ffda68e
#![cfg_attr(not(feature = "std"), no_std)] use codec::{Encode, Decode}; use frame_support::{ Parameter, RuntimeDebug, StorageDoubleMap, StorageValue, decl_error, decl_event, decl_module, decl_storage, dispatch::{ DispatchError, DispatchResult }, ensure, traits::Get, traits::{ Currency, ExistenceRequirement::AllowDeath, ReservableCurrency, Randomness }, }; use sp_io::hashing::{blake2_128}; use frame_system::{self as system, ensure_signed}; use sp_runtime::traits::{AtLeast32BitUnsigned, Bounded, One, CheckedAdd}; use sp_std::prelude::*; #[derive(Encode, Decode, Clone, RuntimeDebug, PartialEq, Eq)] pub struct Kitty(pub [u8; 16]); #[derive(Encode, Decode, Clone, RuntimeDebug, PartialEq, Eq)] pub struct KittyNode<T: Trait> { _self: T::KittyIndex, companion: Option<(T::KittyIndex, T::KittyIndex)>, children: Vec<T::KittyIndex>, } type BalanceOf<T> = <<T as Trait>::Currency as Currency<<T as system::Trait>::AccountId>>::Balance; pub trait Trait: frame_system::Trait { type Event: From<Event<Self>> + Into<<Self as frame_system::Trait>::Event>; type Randomness: Randomness<Self::Hash>; type KittyIndex: Parameter + AtLeast32BitUnsigned + Bounded + Default + Copy; type Currency: Currency<Self::AccountId> + ReservableCurrency<Self::AccountId>; type KittyReserveFunds: Get<BalanceOf<Self>>; } decl_storage! { trait Store for Module<T: Trait> as Kitties { pub Kitties get(fn kitties): double_map hasher(blake2_128_concat) T::AccountId, hasher(blake2_128_concat) T::KittyIndex => Option<Kitty>; pub KittiesCount get(fn kitties_count): T::KittyIndex; pub KittyOwners get(fn kitty_owner): map hasher(blake2_128_concat) T::KittyIndex => Option<T::AccountId>; pub AccountKitties get(fn account_kitties): map hasher(blake2_128_concat) T::AccountId => Vec<(T::KittyIndex, Kitty)>; pub KittyLockAmount get(fn lock_amount): map hasher(blake2_128_concat) T::KittyIndex => Option<BalanceOf<T>>; pub KittyNodeStorage get(fn get_kitty_from_node): Vec<KittyNode<T>>; } } decl_event! { pub enum Event<T> where <T as frame_system::Trait>::AccountId, <T as Trait>::KittyIndex, Balance = BalanceOf<T>, BlockNumber = <T as system::Trait>::BlockNumber, { Created(AccountId, KittyIndex), Transfered(AccountId, AccountId, KittyIndex), LockFunds(AccountId, Balance, BlockNumber), UnlockFunds(AccountId, Balance, BlockNumber), TransferFunds(AccountId, AccountId, Balance, BlockNumber), } } decl_error! { pub enum Error for Module<T: Trait> { KittiesCountOverflow, InvalidaKittyId, RequireDifferentParent, AccountNotExist, BalanceNotEnough, } } decl_module! { pub struct Module<T: Trait> for enum Call where origin: T::Origin { type Error = Error<T>; fn deposit_event() = default; #[weight = 0] pub fn reserve_funds(origin, locker: T::AccountId, amount: BalanceOf<T>) -> DispatchResult { let _sender = ensure_signed(origin)?; T::Currency::reserve(&locker, amount) .map_err(|_| Error::<T>::BalanceNotEnough)?; let now = <system::Module<T>>::block_number(); Self::deposit_event(RawEvent::LockFunds(locker, amount, now)); Ok(()) } #[weight = 10_000] pub fn unreserve_and_transfer( origin, to_punish: T::AccountId, dest: T::AccountId, collateral: BalanceOf<T> ) -> DispatchResult { let _ = ensure_signed(origin)?; let overdraft = T::Currency::unreserve(&to_punish, collateral); T::Currency::transfer(&to_punish, &dest, collateral - overdraft, AllowDeath)?; let now = <system::Module<T>>::block_number(); Self::deposit_event(RawEvent::TransferFunds(to_punish, dest, collateral - overdraft, now)); Ok(()) } #[weight = 1000] pub fn create(origin) -> DispatchResult { let sender = ensure_signed(origin.clone())?; let kitty_id = Self::next_kitty_id()?; let dna = Self::random_value(&sender); let kitty = Kitty(dna); Self::insert_kitty(&sender, kitty_id, kitty)?; let amount = T::KittyReserveFunds::get(); KittyLockAmount::<T>::insert(kitty_id, amount); Self::reserve_funds(origin, sender.clone(), amount)?; Self::deposit_event(RawEvent::Created(sender.clone(), kitty_id)); let mut node_vec = KittyNodeStorage::<T>::take(); let node = KittyNode { _self: kitty_id, children: Vec::new(), companion: None, }; node_vec.push(node); KittyNodeStorage::<T>::put(node_vec); Ok(()) } #[weight = 0] pub fn transfer(origin, to: T::AccountId, kitty_id: T::KittyIndex) -> DispatchResult { let sender = ensure_signed(origin.clone())?; let kitty = Kitties::<T>::take(&sender, kitty_id).ok_or(Error::<T>::InvalidaKittyId)?; let sender_kitty_vec = AccountKitties::<T>::take(&sender); let mut to_kitty_vec = AccountKitties::<T>::take(&to); let mut new_sender_k_vec = Vec::new(); for (kid, kt) in sender_kitty_vec.iter() { if kid != &kitty_id { new_sender_k_vec.push((*kid, kt)); } else { to_kitty_vec.push((*kid, kitty.clone())); } } AccountKitties::<T>::insert(&sender, new_sender_k_vec); AccountKitties::<T>::insert(&to, to_kitty_vec); KittyOwners::<T>::insert(&kitty_id, to.clone()); let amount = Self::lock_amount(kitty_id).ok_or(Error::<T>::InvalidaKittyId)?; Self::unreserve_and_transfer(origin.clone(), sender.clone(), to.clone(), amount)?; Self::reserve_funds(origin, to.clone(), amount)?; Self::deposit_event(RawEvent::Transfered(sender, to, kitty_id)); Ok(()) } #[weight = 0] pub fn breed(origin, kitty_id_1: T::KittyIndex, kitty_id_2: T::KittyIndex) { let sender = ensure_signed(origin.clone())?; let amount = T::KittyReserveFunds::get(); let new_kitty_id = Self::do_breed(&sender, kitty_id_1, kitty_id_2)?; KittyLockAmount::<T>::insert(&new_kitty_id, amount.clone()); Self::reserve_funds(origin, sender.clone(), amount)?; let mut node_vec = KittyNodeStorage::<T>::take(); for k in &mut node_vec.iter_mut() { if k._self == kitty_id_1 { k.children.push(new_kitty_id); } else if k._self == kitty_id_2 { k.children.push(new_kitty_id); } } let node = KittyNode { _self: new_kitty_id, children: Vec::new(), companion: Some((kitty_id_1, kitty_id_2)), }; node_vec.push(node); KittyNodeStorage::<T>::put(node_vec); Self::deposit_event(RawEvent::Created(sender, new_kitty_id)); } } } fn combine_dna(dna1: u8, dna2: u8, selector: u8) -> u8 { (selector & dna1) | (!selector & dna2) } impl<T: Trait> Module<T> { fn next_kitty_id() -> sp_std::result::Result<T::KittyIndex, DispatchError> { let kitty_id = Self::kitties_count().checked_add(&One::one()).ok_or(Error::<T>::KittiesCountOverflow)?; Ok(kitty_id) } fn random_value(sender: &T::AccountId) -> [u8;16] { let payload = ( T::Randomness::random_seed(), &sender, <frame_system::Module<T>>::extrinsic_index(), ); payload.using_encoded(blake2_128) } fn insert_kitty(owner: &T::AccountId, kitty_id: T::KittyIndex, kitty: Kitty) -> DispatchResult { Kitties::<T>::insert(&owner, kitty_id, kitty.clone()); KittyOwners::<T>::insert(kitty_id, &owner); let mut kitty_vec = AccountKitties::<T>::take(&owner); kitty_vec.push((kitty_id, kitty)); AccountKitties::<T>::insert(&owner, kitty_vec); KittiesCount::<T>::put(kitty_id); Ok(()) } fn do_breed(sender: &T::AccountId, kitty_id_1: T::KittyIndex, kitty_id_2: T::KittyIndex) -> sp_std::result::Result<T::KittyIndex, DispatchError> { let kitty1 = Self::kitties(&sender, kitty_id_1).ok_or(Error::<T>::InvalidaKittyId)?; let kitty2 = Self::kitties(&sender, kitty_id_2).ok_or(Error::<T>::InvalidaKittyId)?; ensure!(kitty_id_1 != kitty_id_2, Error::<T>::RequireDifferentParent); let kitty_id = Self::next_kitty_id()?; let kitty1_dna = kitty1.0; let kitty2_dna = kitty2.0; let selector = Self::random_value(&sender); let mut new_dna = [0u8; 16]; for i in 0..kitty1_dna.len() { new_dna[i] = combine_dna(kitty1_dna[i], kitty2_dna[i], selector[i]); } Self::insert_kitty(sender, kitty_id, Kitty(new_dna))?; Ok(kitty_id) } }
#![cfg_attr(not(feature = "std"), no_std)] use codec::{Encode, Decode}; use frame_support::{ Parameter, RuntimeDebug, StorageDoubleMap, StorageValue, decl_error, decl_event, decl_module, decl_storage, dispatch::{ DispatchError, DispatchResult }, ensure, traits::Get, traits::{ Currency, ExistenceRequirement::AllowDeath, ReservableCurrency, Randomness }, }; use sp_io::hashing::{blake2_128}; use frame_system::{self as system, ensure_signed}; use sp_runtime::traits::{AtLeast32BitUnsigned, Bounded, One, CheckedAdd}; use sp_std::prelude::*; #[derive(Encode, Decode, Clone, RuntimeDebug, PartialEq, Eq)] pub struct Kitty(pub [u8; 16]); #[derive(Encode, Decode, Clone, RuntimeDebug, PartialEq, Eq)] pub struct KittyNode<T: Trait> { _self: T::KittyIndex, companion: Option<(T::KittyIndex, T::KittyIndex)>, children: Vec<T::KittyIndex>, } type BalanceOf<T> = <<T as Trait>::Currency as Currency<<T as system::Trait>::AccountId>>::Balance; pub trait Trait: frame_system::Trait { type Event: From<Event<Self>> + Into<<Self as frame_system::Trait>::Event>; type Randomness: Randomness<Self::Hash>; type KittyIndex: Parameter + AtLeast32BitUnsigned + Bounded + Default + Copy; type Currency: Currency<Self::AccountId> + ReservableCurrency<Self::AccountId>; type KittyReserveFunds: Get<BalanceOf<Self>>; } decl_storage! { trait Store for Module<T: Trait> as Kitties { pub Kitties get(fn kitties): double_map hasher(blake2_128_concat) T::AccountId, hasher(blake2_128_concat) T::KittyIndex => Option<Kitty>; pub KittiesCount get(fn kitties_count): T::KittyIndex; pub KittyOwners get(fn kitty_owner): map hasher(blake2_128_concat) T::KittyIndex => Option<T::AccountId>; pub AccountKitties get(fn account_kitties): map hasher(blake2_128_concat) T::AccountId => Vec<(T::KittyIndex, Kitty)>; pub KittyLockAmount get(fn lock_amount): map hasher(blake2_128_concat) T::KittyIndex => Option<BalanceOf<T>>; pub KittyNodeStorage get(fn get_kitty_from_node): Vec<KittyNode<T>>; } } decl_event! { pub enum Event<T> where <T as frame_system::Trait>::AccountId, <T as Trait>::KittyIndex, Balance = BalanceOf<T>, BlockNumber = <T as system::Trait>::BlockNumber, { Created(AccountId, KittyIndex), Transfered(AccountId, AccountId, KittyIndex), LockFunds(AccountId, Balance, BlockNumber), UnlockFunds(AccountId, Balance, BlockNumber), TransferFunds(AccountId, AccountId, Balance, BlockNumber), } } decl_error! { pub enum Error for Module<T: Trait> { KittiesCountOverflow, InvalidaKittyId, RequireDifferentParent, AccountNotExist, BalanceNotEnough, } } decl_module! { pub struct Module<T: Trait> for enum Call where origin: T::Origin { type Error = Error<T>; fn deposit_event() = default; #[weight = 0] pub fn reserve_funds(origin, locker: T::AccountId, amount: BalanceOf<T>) -> DispatchResult { let _sender = ensure_signed(origin)?; T::Currency::reserve(&locker, amount) .map_err(|_| Error::<T>::BalanceNotEnough)?; let now = <system::Module<T>>::block_number(); Self::deposit_event(RawEvent::LockFunds(locker, amount, now)); Ok(()) } #[weight = 10_000] pub fn unreserve_and_transfer( origin, to_punish: T::AccountId, dest: T::AccountId, collateral: BalanceOf<T> ) -> DispatchResult { let _ = ensure_signed(origin)?; let overdraft = T::Currency::unreserve(&to_punish, collateral); T::Currency::transfer(&to_punish, &dest, collateral - overdraft, AllowDeath)?; let now = <system::Module<T>>::block_number(); Self::deposit_event(RawEvent::TransferFunds(to_punish, dest, collateral - overdraft, now)); Ok(()) } #[weight = 1000] pub fn create(origin) -> DispatchResult { let sender = ensure_signed(origin.clone())?; let kitty_id = Self::next_kitty_id()?; let dna = Self::random_value(&sender); let kitty = Kitty(dna); Self::insert_kitty(&sender, kitty_id, kitty)?; let amount = T::KittyReserveFunds::get(); KittyLockAmount::<T>::insert(kitty_id, amount); Self::reserve_funds(origin, sender.clone(), amount)?; Self::deposit_event(RawEvent::Created(sender.clone(), kitty_id)); let mut node_vec = KittyNodeStorage::<T>::take(); let node = KittyNode { _self: kitty_id, children: Vec::new(), companion: None, }; node_vec.push(node); KittyNodeStorage::<T>::put(node_vec); Ok(()) } #[weight = 0] pub fn transfer(origin, to: T::AccountId, kitty_id: T::KittyIndex) -> DispatchResult { let sender = ensure_signed(origin.clone())?; let kitty = Kitties::<T>::take(&sender, kitty_id).ok_or(Error::<T>::InvalidaKittyId)?; let sender_kitty_vec = AccountKitties::<T>::take(&sender); let mut to_kitty_vec = AccountKitties::<T>::take(&to); let mut new_sender_k_vec = Vec::new(); for (kid, kt) in sender_kitty_vec.iter() { if kid != &kitty_id { new_sender_k_vec.push((*kid, kt)); } else { to_kitty_vec.push((*kid, kitty.clone())); } } AccountKitties::<T>::insert(&sender, new_sender_k_vec); AccountKitties::<T>::insert(&to, to_kitty_vec); KittyOwners::<T>::insert(&kitty_id, to.clone()); let amount = Self::lock_amount(kitty_id).ok_or(Error::<T>::InvalidaKittyId)?; Self::unreserve_and_transfer(origin.clone(), sender.clone(), to.clone(), amount)?; Self::reserve_funds(origin, to.clone(), amount)?; Self::deposit_event(RawEvent::Transfered(sender, to, kitty_id)); Ok(()) } #[weight = 0] pub fn breed(origin, kitty_id_1: T::KittyIndex, kitty_id_2: T::KittyIndex) { let sender = ensure_signed(origin.clone())?; let amount = T::KittyReserveFunds::get(); let new_kitty_id = Self::do_breed(&sender, kitty_id_1, kitty_id_2)?; KittyLockAmount::<T>::insert(&new_kitty_id, amount.clone()); Self::reserve_funds(origin, sender.clone(), amount)?; let mut node_vec = KittyNodeStorage::<T>::take(); for k in &mut node_vec.iter_mut() { if k._self == kitty_id_1 { k.children.push(new_kitty_id); } else if k._self == kitty_id_2 { k.children.push(new_kitty_id); } } let node = KittyNode { _self: new_kitty_id, children: Vec::new(), companion: Some((kitty_id_1, kitty_id_2)), }; node_vec.push(node); KittyNodeStorage::<T>::put(node_vec); Self::deposit_event(RawEvent::Created(sender, new_kitty_id)); } } } fn combine_dna(dna1: u8, dna2: u8, selector: u8) -> u8 { (selector & dna1) | (!selector & dna2) } impl<T: Trait> Module<T> { fn next_kitty_id() -> sp_std::result::Result<T::KittyIndex, DispatchError> { let kitty_id = Self::kitties_count().checked_add(&One::one()).ok_or(Error::<T>::KittiesCountOverflow)?; Ok(kitty_id) } fn random_value(sender: &T::AccountId) -> [u8;16] { let payload = ( T::Randomness::random_see
fn insert_kitty(owner: &T::AccountId, kitty_id: T::KittyIndex, kitty: Kitty) -> DispatchResult { Kitties::<T>::insert(&owner, kitty_id, kitty.clone()); KittyOwners::<T>::insert(kitty_id, &owner); let mut kitty_vec = AccountKitties::<T>::take(&owner); kitty_vec.push((kitty_id, kitty)); AccountKitties::<T>::insert(&owner, kitty_vec); KittiesCount::<T>::put(kitty_id); Ok(()) } fn do_breed(sender: &T::AccountId, kitty_id_1: T::KittyIndex, kitty_id_2: T::KittyIndex) -> sp_std::result::Result<T::KittyIndex, DispatchError> { let kitty1 = Self::kitties(&sender, kitty_id_1).ok_or(Error::<T>::InvalidaKittyId)?; let kitty2 = Self::kitties(&sender, kitty_id_2).ok_or(Error::<T>::InvalidaKittyId)?; ensure!(kitty_id_1 != kitty_id_2, Error::<T>::RequireDifferentParent); let kitty_id = Self::next_kitty_id()?; let kitty1_dna = kitty1.0; let kitty2_dna = kitty2.0; let selector = Self::random_value(&sender); let mut new_dna = [0u8; 16]; for i in 0..kitty1_dna.len() { new_dna[i] = combine_dna(kitty1_dna[i], kitty2_dna[i], selector[i]); } Self::insert_kitty(sender, kitty_id, Kitty(new_dna))?; Ok(kitty_id) } }
d(), &sender, <frame_system::Module<T>>::extrinsic_index(), ); payload.using_encoded(blake2_128) }
function_block-function_prefixed
[ { "content": "pub fn last_event() -> Event {\n\n System::events().last().unwrap().event.clone()\n\n}", "file_path": "pallets/kitties/src/mock.rs", "rank": 2, "score": 148286.20940153103 }, { "content": "/// Configure the pallet by specifying the parameters and types on which it depends.\n...
Rust
core/src/storage/collections/stash/impls.rs
jsulmont/ink
d1474ae4b7cb3b4119ec39d702358d4e51b0b5bf
#[cfg(feature = "ink-generate-abi")] use ink_abi::{ HasLayout, LayoutField, LayoutStruct, StorageLayout, }; use scale::{ Decode, Encode, }; #[cfg(feature = "ink-generate-abi")] use type_metadata::Metadata; use crate::storage::{ self, alloc::{ Allocate, AllocateUsing, Initialize, }, chunk::SyncChunk, Flush, Key, }; #[derive(Debug)] #[cfg_attr(feature = "ink-generate-abi", derive(Metadata))] pub struct Stash<T> { header: storage::Value<StashHeader>, entries: SyncChunk<Entry<T>>, } #[derive(Debug, Encode, Decode)] #[cfg_attr(feature = "ink-generate-abi", derive(Metadata))] struct StashHeader { next_vacant: u32, len: u32, max_len: u32, } impl Flush for StashHeader { #[inline] fn flush(&mut self) { self.next_vacant.flush(); self.len.flush(); self.max_len.flush(); } } #[derive(Debug)] pub struct Values<'a, T> { iter: Iter<'a, T>, } impl<'a, T> Values<'a, T> { pub(crate) fn new(stash: &'a Stash<T>) -> Self { Self { iter: stash.iter() } } } impl<T> Flush for Stash<T> where T: Encode + Flush, { #[inline] fn flush(&mut self) { self.header.flush(); self.entries.flush(); } } #[cfg(feature = "ink-generate-abi")] impl<T> HasLayout for Stash<T> where T: Metadata + 'static, { fn layout(&self) -> StorageLayout { LayoutStruct::new( Self::meta_type(), vec![ LayoutField::of("header", &self.header), LayoutField::of("entries", &self.entries), ], ) .into() } } impl<'a, T> Iterator for Values<'a, T> where T: scale::Codec, { type Item = &'a T; fn next(&mut self) -> Option<Self::Item> { self.iter.next().map(|(_index, value)| value) } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() } } impl<'a, T> ExactSizeIterator for Values<'a, T> where T: scale::Codec {} impl<'a, T> DoubleEndedIterator for Values<'a, T> where T: scale::Codec, { fn next_back(&mut self) -> Option<Self::Item> { self.iter.next_back().map(|(_index, value)| value) } } #[derive(Debug)] pub struct Iter<'a, T> { stash: &'a Stash<T>, begin: u32, end: u32, yielded: u32, } impl<'a, T> Iter<'a, T> { pub(crate) fn new(stash: &'a Stash<T>) -> Self { Self { stash, begin: 0, end: stash.max_len(), yielded: 0, } } } impl<'a, T> Iterator for Iter<'a, T> where T: scale::Codec, { type Item = (u32, &'a T); fn next(&mut self) -> Option<Self::Item> { debug_assert!(self.begin <= self.end); if self.yielded == self.stash.len() { return None } while self.begin < self.end { let cur = self.begin; self.begin += 1; if let Some(elem) = self.stash.get(cur) { self.yielded += 1; return Some((cur, elem)) } } None } fn size_hint(&self) -> (usize, Option<usize>) { let remaining = (self.stash.len() - self.yielded) as usize; (remaining, Some(remaining)) } } impl<'a, T> ExactSizeIterator for Iter<'a, T> where T: scale::Codec {} impl<'a, T> DoubleEndedIterator for Iter<'a, T> where T: scale::Codec, { fn next_back(&mut self) -> Option<Self::Item> { debug_assert!(self.begin <= self.end); if self.yielded == self.stash.len() { return None } while self.begin < self.end { self.end -= 1; if let Some(elem) = self.stash.get(self.end) { self.yielded += 1; return Some((self.end, elem)) } } None } } #[derive(Debug, Encode, Decode)] #[cfg_attr(feature = "ink-generate-abi", derive(Metadata))] enum Entry<T> { Vacant(u32), Occupied(T), } impl<T> Flush for Entry<T> where T: Flush, { #[inline] fn flush(&mut self) { match self { Entry::Vacant(_) => (), Entry::Occupied(occupied) => occupied.flush(), } } } impl<T> Encode for Stash<T> { fn encode_to<W: scale::Output>(&self, dest: &mut W) { self.header.encode_to(dest); self.entries.encode_to(dest); } } impl<T> Decode for Stash<T> { fn decode<I: scale::Input>(input: &mut I) -> Result<Self, scale::Error> { let header = storage::Value::decode(input)?; let entries = SyncChunk::decode(input)?; Ok(Self { header, entries }) } } impl<T> AllocateUsing for Stash<T> { #[inline] unsafe fn allocate_using<A>(alloc: &mut A) -> Self where A: Allocate, { Self { header: storage::Value::allocate_using(alloc), entries: SyncChunk::allocate_using(alloc), } } } impl<T> Initialize for Stash<T> { type Args = (); #[inline(always)] fn default_value() -> Option<Self::Args> { Some(()) } #[inline] fn initialize(&mut self, _args: Self::Args) { self.header.set(StashHeader { next_vacant: 0, len: 0, max_len: 0, }); } } impl<T> Stash<T> { pub fn iter(&self) -> Iter<T> { Iter::new(self) } pub fn values(&self) -> Values<T> { Values::new(self) } pub fn entries_key(&self) -> Key { self.entries.cells_key() } pub fn len(&self) -> u32 { self.header.len } pub fn max_len(&self) -> u32 { self.header.max_len } pub fn is_empty(&self) -> bool { self.len() == 0 } fn next_vacant(&self) -> u32 { self.header.next_vacant } } impl<T> Stash<T> where T: scale::Codec, { pub fn get(&self, n: u32) -> Option<&T> { self.entries.get(n).and_then(|entry| { match entry { Entry::Occupied(val) => Some(val), Entry::Vacant(_) => None, } }) } pub fn put(&mut self, val: T) -> u32 { let current_vacant = self.header.next_vacant; debug_assert!(current_vacant <= self.len()); if current_vacant == self.len() { self.entries.set(current_vacant, Entry::Occupied(val)); self.header.next_vacant = current_vacant + 1; self.header.max_len += 1; } else { let next_vacant = match self .entries .put(current_vacant, Entry::Occupied(val)) .expect( "[ink_core::Stash::put] Error: \ expected a vacant entry here, but no entry was found", ) { Entry::Vacant(next_vacant) => next_vacant, Entry::Occupied(_) => { unreachable!( "[ink_core::Stash::put] Error: \ a next_vacant index can never point to an occupied entry" ) } }; self.header.next_vacant = next_vacant; } self.header.len += 1; current_vacant } pub fn take(&mut self, n: u32) -> Option<T> { match self.entries.get(n) { None | Some(Entry::Vacant(_)) => None, Some(Entry::Occupied(_)) => { match self .entries .put(n, Entry::Vacant(self.next_vacant())) .expect( "[ink_core::Stash::take] Error: \ we already asserted that the entry at `n` exists", ) { Entry::Occupied(val) => { self.header.next_vacant = n; debug_assert!(!self.is_empty()); self.header.len -= 1; Some(val) } Entry::Vacant(_) => { unreachable!( "[ink_core::Stash::take] Error: \ we already asserted that the entry is occupied" ) } } } } } }
#[cfg(feature = "ink-generate-abi")] use ink_abi::{ HasLayout, LayoutField, LayoutStruct, StorageLayout, }; use scale::{ Decode, Encode, }; #[cfg(feature = "ink-generate-abi")] use type_metadata::Metadata; use crate::storage::{ self, alloc::{ Allocate, AllocateUsing, Initialize, }, chunk::SyncChunk, Flush, Key, }; #[derive(Debug)] #[cfg_attr(feature = "ink-generate-abi", derive(Metadata))] pub struct Stash<T> { header: storage::Value<StashHeader>, entries: SyncChunk<Entry<T>>, } #[derive(Debug, Encode, Decode)] #[cfg_attr(feature = "ink-generate-abi", derive(Metadata))] struct StashHeader { next_vacant: u32, len: u32, max_len: u32, } impl Flush for StashHeader { #[inline] fn flush(&mut self) { self.next_vacant.flush(); self.len.flush(); self.max_len.flush(); } } #[derive(Debug)] pub struct Values<'a, T> { iter: Iter<'a, T>, } impl<'a, T> Values<'a, T> { pub(crate) fn new(stash: &'a Stash<T>) -> Self { Self { iter: stash.iter() } } } impl<T> Flush for Stash<T> where T: Encode + Flush, { #[inline] fn flush(&mut self) { self.header.flush(); self.entries.flush(); } } #[cfg(feature = "ink-generate-abi")] impl<T> HasLayout for Stash<T> where T: Metadata + 'static, { fn layout(&self) -> StorageLayout { LayoutStruct::new( Self::meta_type(), vec![ LayoutField::of("header", &self.header), LayoutField::of("entries", &self.entries), ], ) .into() } } impl<'a, T> Iterator for Values<'a, T> where T: scale::Codec, { type Item = &'a T; fn next(&mut self) -> Option<Self::Item> { self.iter.next().map(|(_index, value)| value) } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() } } impl<'a, T> ExactSizeIterator for Values<'a, T> where T: scale::Codec {} impl<'a, T> DoubleEndedIterator for Values<'a, T> where T: scale::Codec, { fn next_back(&mut self) -> Option<Self::Item> { self.iter.next_back().map(|(_index, value)| value) } } #[derive(Debug)] pub struct Iter<'a, T> { stash: &'a Stash<T>, begin: u32, end: u32, yielded: u32, } impl<'a, T> Iter<'a, T> { pub(crate) fn new(stash: &'a Stash<T>) -> Self { Self { stash, begin: 0, end: stash.max_len(), yielded: 0, } } } impl<'a, T> Iterator for Iter<'a, T> where T: scale::Codec, { type Item = (u32, &'a T); fn next(&mut self) -> Option<Self::Item> { debug_assert!(self.begin <= self.end); if self.yielded == self.stash.len() { return None
self.header.len } pub fn max_len(&self) -> u32 { self.header.max_len } pub fn is_empty(&self) -> bool { self.len() == 0 } fn next_vacant(&self) -> u32 { self.header.next_vacant } } impl<T> Stash<T> where T: scale::Codec, { pub fn get(&self, n: u32) -> Option<&T> { self.entries.get(n).and_then(|entry| { match entry { Entry::Occupied(val) => Some(val), Entry::Vacant(_) => None, } }) } pub fn put(&mut self, val: T) -> u32 { let current_vacant = self.header.next_vacant; debug_assert!(current_vacant <= self.len()); if current_vacant == self.len() { self.entries.set(current_vacant, Entry::Occupied(val)); self.header.next_vacant = current_vacant + 1; self.header.max_len += 1; } else { let next_vacant = match self .entries .put(current_vacant, Entry::Occupied(val)) .expect( "[ink_core::Stash::put] Error: \ expected a vacant entry here, but no entry was found", ) { Entry::Vacant(next_vacant) => next_vacant, Entry::Occupied(_) => { unreachable!( "[ink_core::Stash::put] Error: \ a next_vacant index can never point to an occupied entry" ) } }; self.header.next_vacant = next_vacant; } self.header.len += 1; current_vacant } pub fn take(&mut self, n: u32) -> Option<T> { match self.entries.get(n) { None | Some(Entry::Vacant(_)) => None, Some(Entry::Occupied(_)) => { match self .entries .put(n, Entry::Vacant(self.next_vacant())) .expect( "[ink_core::Stash::take] Error: \ we already asserted that the entry at `n` exists", ) { Entry::Occupied(val) => { self.header.next_vacant = n; debug_assert!(!self.is_empty()); self.header.len -= 1; Some(val) } Entry::Vacant(_) => { unreachable!( "[ink_core::Stash::take] Error: \ we already asserted that the entry is occupied" ) } } } } } }
} while self.begin < self.end { let cur = self.begin; self.begin += 1; if let Some(elem) = self.stash.get(cur) { self.yielded += 1; return Some((cur, elem)) } } None } fn size_hint(&self) -> (usize, Option<usize>) { let remaining = (self.stash.len() - self.yielded) as usize; (remaining, Some(remaining)) } } impl<'a, T> ExactSizeIterator for Iter<'a, T> where T: scale::Codec {} impl<'a, T> DoubleEndedIterator for Iter<'a, T> where T: scale::Codec, { fn next_back(&mut self) -> Option<Self::Item> { debug_assert!(self.begin <= self.end); if self.yielded == self.stash.len() { return None } while self.begin < self.end { self.end -= 1; if let Some(elem) = self.stash.get(self.end) { self.yielded += 1; return Some((self.end, elem)) } } None } } #[derive(Debug, Encode, Decode)] #[cfg_attr(feature = "ink-generate-abi", derive(Metadata))] enum Entry<T> { Vacant(u32), Occupied(T), } impl<T> Flush for Entry<T> where T: Flush, { #[inline] fn flush(&mut self) { match self { Entry::Vacant(_) => (), Entry::Occupied(occupied) => occupied.flush(), } } } impl<T> Encode for Stash<T> { fn encode_to<W: scale::Output>(&self, dest: &mut W) { self.header.encode_to(dest); self.entries.encode_to(dest); } } impl<T> Decode for Stash<T> { fn decode<I: scale::Input>(input: &mut I) -> Result<Self, scale::Error> { let header = storage::Value::decode(input)?; let entries = SyncChunk::decode(input)?; Ok(Self { header, entries }) } } impl<T> AllocateUsing for Stash<T> { #[inline] unsafe fn allocate_using<A>(alloc: &mut A) -> Self where A: Allocate, { Self { header: storage::Value::allocate_using(alloc), entries: SyncChunk::allocate_using(alloc), } } } impl<T> Initialize for Stash<T> { type Args = (); #[inline(always)] fn default_value() -> Option<Self::Args> { Some(()) } #[inline] fn initialize(&mut self, _args: Self::Args) { self.header.set(StashHeader { next_vacant: 0, len: 0, max_len: 0, }); } } impl<T> Stash<T> { pub fn iter(&self) -> Iter<T> { Iter::new(self) } pub fn values(&self) -> Values<T> { Values::new(self) } pub fn entries_key(&self) -> Key { self.entries.cells_key() } pub fn len(&self) -> u32 {
random
[ { "content": "/// Returns an iterator over the uninterpreted bytes of all past emitted events.\n\npub fn emitted_events<T: EnvTypes>() -> impl Iterator<Item = Vec<u8>> {\n\n ContractEnv::<T>::emitted_events()\n\n}\n", "file_path": "core/src/env/test.rs", "rank": 0, "score": 403824.9360293277 },...
Rust
day-8/src/main.rs
jharmer95/advent2020
113fa84933ddf9d3d70fb0620aa27b585a9f3a74
use inputs::get_input; mod cpu_sim { use std::{cmp::Ordering, str::FromStr}; pub struct CPU { accumulator: isize, pc: usize, } pub enum ExecutionErr { Ok, Finished, OutOfBounds, } impl CPU { pub const fn new() -> Self { Self { accumulator: 0, pc: 0, } } pub fn reset(&mut self) { self.accumulator = 0; self.pc = 0; } pub fn run_one(&mut self, opcodes: &[Instruction]) -> ExecutionErr { let num_ins = opcodes.len(); match self.pc.cmp(&num_ins) { Ordering::Equal => return ExecutionErr::Finished, Ordering::Greater => { eprintln!("Invalid address loaded into PC!"); return ExecutionErr::OutOfBounds; } Ordering::Less => (), } match opcodes[self.pc] { Instruction::NOP(_) => self.pc += 1, Instruction::ACC(op) => { self.accumulator += op; self.pc += 1; } Instruction::JMP(op) => { if op < 0 { self.pc -= -op as usize; } else { self.pc += op as usize; } } } ExecutionErr::Ok } pub const fn accumulator(&self) -> isize { self.accumulator } pub const fn pc(&self) -> usize { self.pc } } #[derive(Debug)] pub enum Instruction { NOP(isize), ACC(isize), JMP(isize), } impl FromStr for Instruction { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { let x: Vec<&str> = s.split(' ').collect(); match x.get(0) { Some(&"nop") => Ok(Self::NOP(x[1].parse().unwrap())), Some(&"acc") => Ok(Self::ACC(x[1].parse().unwrap())), Some(&"jmp") => Ok(Self::JMP(x[1].parse().unwrap())), _ => Err(String::from("Invalid opcode passed")), } } } impl Clone for Instruction { fn clone(&self) -> Self { match self { Self::ACC(op) => Self::ACC(*op), Self::JMP(op) => Self::JMP(*op), Self::NOP(op) => Self::NOP(*op), } } } } fn part1(opcodes: &[cpu_sim::Instruction]) -> isize { let mut cpu = cpu_sim::CPU::new(); let mut pc_vals = vec![]; while !pc_vals.contains(&cpu.pc()) { pc_vals.push(cpu.pc()); cpu.run_one(opcodes); } cpu.accumulator() } fn test_sequence(cpu: &mut cpu_sim::CPU, opcodes: &[cpu_sim::Instruction]) -> bool { let mut pc_vals = vec![]; loop { if pc_vals.contains(&cpu.pc()) { return false; } pc_vals.push(cpu.pc()); match cpu.run_one(opcodes) { cpu_sim::ExecutionErr::Ok => (), cpu_sim::ExecutionErr::Finished => { return true; } cpu_sim::ExecutionErr::OutOfBounds => { eprintln!("Out of bounds error occurred!"); return false; } } } } fn part2(opcodes: &[cpu_sim::Instruction]) -> isize { let mut cpu = cpu_sim::CPU::new(); let mut opcodes2 = opcodes.to_vec(); for i in 0..opcodes2.len() { match opcodes2[i] { cpu_sim::Instruction::NOP(op) => { opcodes2[i] = cpu_sim::Instruction::JMP(op); if test_sequence(&mut cpu, &opcodes2) { return cpu.accumulator(); } opcodes2[i] = cpu_sim::Instruction::NOP(op); } cpu_sim::Instruction::ACC(_) => continue, cpu_sim::Instruction::JMP(op) => { opcodes2[i] = cpu_sim::Instruction::NOP(op); if test_sequence(&mut cpu, &opcodes2) { return cpu.accumulator(); } opcodes2[i] = cpu_sim::Instruction::JMP(op); } } cpu.reset(); } panic!("All possibilities exhausted!"); } fn main() { let inputs = get_input::<cpu_sim::Instruction>("inputs/day-8.txt").expect("Could not parse path!"); println!("Part 1 solution: {}", part1(&inputs)); println!("Part 2 solution: {}", part2(&inputs)); } #[test] fn check() { let inputs = get_input::<cpu_sim::Instruction>("../inputs/day-8.txt").expect("Could not parse path!"); assert_eq!(part1(&inputs), 1723); assert_eq!(part2(&inputs), 846); }
use inputs::get_input; mod cpu_sim { use std::{cmp::Ordering, str::FromStr}; pub struct CPU { accumulator: isize, pc: usize, } pub enum ExecutionErr { Ok, Finished, OutOfBounds, } impl CPU { pub const fn new() -> Self { Self { accumulator: 0, pc: 0, } } pub fn reset(&mut self) { self.accumulator = 0; self.pc = 0; } pub fn run_one(&mut self, opcodes: &[Instruction]) -> ExecutionErr { let num_ins = opcodes.len(); match self.pc.cmp(&num_ins) { Ordering::Equal => return ExecutionErr::Finished, Ordering::Greater => { eprintln!("Invalid address loaded into PC!"); return ExecutionErr::OutOfBounds; } Ordering::Less => (), } match opcodes[self.pc] { Instruction::NOP(_) => self.pc += 1, Instruction::ACC(op) => { self.accumulator += op; self.pc += 1; } Instruction::JMP(op) => { if op < 0 { self.pc -= -op as usize; } else { self.pc += op as usize; } } } ExecutionErr::Ok } pub const fn accumulator(&self) -> isize { self.accumulator } pub const fn pc(&self) -> usize { self.pc } } #[derive(Debug)] pub enum Instruction { NOP(isize), ACC(isize), JMP(isize), } impl FromStr for Instruction { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { let x: Vec<&str> = s.split(' ').collect(); match x.get(0) { Some(&"nop") => Ok(Self::NOP(x[1].parse().unwrap())), Some(&"acc") => Ok(Self::ACC(x[1].parse().unwrap())), Some(&"jmp") => Ok(Self::JMP(x[1].parse().unwrap())), _ => Err(String::from("Invalid opcode passed")), } } } impl Clone for Instruction {
} } fn part1(opcodes: &[cpu_sim::Instruction]) -> isize { let mut cpu = cpu_sim::CPU::new(); let mut pc_vals = vec![]; while !pc_vals.contains(&cpu.pc()) { pc_vals.push(cpu.pc()); cpu.run_one(opcodes); } cpu.accumulator() } fn test_sequence(cpu: &mut cpu_sim::CPU, opcodes: &[cpu_sim::Instruction]) -> bool { let mut pc_vals = vec![]; loop { if pc_vals.contains(&cpu.pc()) { return false; } pc_vals.push(cpu.pc()); match cpu.run_one(opcodes) { cpu_sim::ExecutionErr::Ok => (), cpu_sim::ExecutionErr::Finished => { return true; } cpu_sim::ExecutionErr::OutOfBounds => { eprintln!("Out of bounds error occurred!"); return false; } } } } fn part2(opcodes: &[cpu_sim::Instruction]) -> isize { let mut cpu = cpu_sim::CPU::new(); let mut opcodes2 = opcodes.to_vec(); for i in 0..opcodes2.len() { match opcodes2[i] { cpu_sim::Instruction::NOP(op) => { opcodes2[i] = cpu_sim::Instruction::JMP(op); if test_sequence(&mut cpu, &opcodes2) { return cpu.accumulator(); } opcodes2[i] = cpu_sim::Instruction::NOP(op); } cpu_sim::Instruction::ACC(_) => continue, cpu_sim::Instruction::JMP(op) => { opcodes2[i] = cpu_sim::Instruction::NOP(op); if test_sequence(&mut cpu, &opcodes2) { return cpu.accumulator(); } opcodes2[i] = cpu_sim::Instruction::JMP(op); } } cpu.reset(); } panic!("All possibilities exhausted!"); } fn main() { let inputs = get_input::<cpu_sim::Instruction>("inputs/day-8.txt").expect("Could not parse path!"); println!("Part 1 solution: {}", part1(&inputs)); println!("Part 2 solution: {}", part2(&inputs)); } #[test] fn check() { let inputs = get_input::<cpu_sim::Instruction>("../inputs/day-8.txt").expect("Could not parse path!"); assert_eq!(part1(&inputs), 1723); assert_eq!(part2(&inputs), 846); }
fn clone(&self) -> Self { match self { Self::ACC(op) => Self::ACC(*op), Self::JMP(op) => Self::JMP(*op), Self::NOP(op) => Self::NOP(*op), } }
function_block-full_function
[ { "content": "fn tokenize(inputs: &[String]) -> HashMap<&str, Vec<(String, usize)>> {\n\n let mut ret = HashMap::new();\n\n\n\n for line in inputs {\n\n let split: Vec<&str> = line.split(\" bags contain \").collect();\n\n let container = split[0];\n\n let contents = split[1];\n\n\n\n ...
Rust
src/game.rs
unixzii/game-of-life
21059c29140883e080ab4c5076a8d0f0a488d42e
use std::rc::Rc; use std::cell::{RefCell, RefMut}; use wasm_bindgen::{JsCast, JsValue}; use wasm_bindgen::closure::Closure; use web_sys::{console, window}; use crate::ui; use crate::engine; struct UiResponder { state: State, } impl ui::Responder for UiResponder { fn on_mouse_down(&self, point: ui::Point) { js_log!("on_mouse_down: {:?}", point); let mut state_inner = self.state.get_inner(); state_inner.is_mouse_down = true; drop(state_inner); self.state.put_cell(point.x, point.y); } fn on_mouse_move(&self, point: ui::Point) { if !self.state.get_inner().is_mouse_down { return; } js_log!("on_mouse_move: {:?}", point); self.state.put_cell(point.x, point.y); } fn on_mouse_up(&self) { js_log!("on_mouse_up"); self.state.get_inner().is_mouse_down = false; } } pub struct Config { pub update_interval: i32, } pub struct State { inner: Rc<RefCell<StateInner>>, } struct StateInner { canvas: ui::Canvas, world: engine::World, config: Config, is_mouse_down: bool, timer_closure: Option<Box<dyn Drop>>, timer_id: i32, } impl State { pub fn new(canvas: ui::Canvas, world: engine::World, config: Config) -> State { let state = State { inner: Rc::new(RefCell::new(StateInner { canvas: canvas, world: world, config: config, is_mouse_down: false, timer_closure: None, timer_id: -1, })), }; let responder = Box::new(UiResponder { state: state.clone() }); state.get_inner().canvas.install_responder(responder); return state; } pub fn clone(&self) -> State { return State { inner: self.inner.clone(), }; } pub fn resume(&self) { let mut inner = self.get_inner(); if inner.timer_closure.is_some() { return; } let state_clone = self.clone(); let closure = Box::new(Closure::wrap(Box::new(move || { state_clone.tick(); }) as Box<dyn FnMut()>)); let timer_id = window().unwrap().set_interval_with_callback_and_timeout_and_arguments_0( closure.as_ref().as_ref().unchecked_ref(), inner.config.update_interval ).unwrap(); inner.timer_closure = Some(closure); inner.timer_id = timer_id; } pub fn pause(&self) { let mut inner = self.get_inner(); if inner.timer_closure.is_none() { return; } window().unwrap().clear_interval_with_handle(inner.timer_id); inner.timer_closure = None; inner.timer_id = -1; } fn put_cell(&self, x: i32, y: i32) { self.get_inner().world.set_cell(x, y, engine::Cell::Alive); self.update_canvas(); } fn tick(&self) { self.get_inner().world.next_gen(); self.update_canvas(); } fn update_canvas(&self) { let inner = self.get_inner(); inner.canvas.clear(); for col in 0..(inner.world.height()) { for row in 0..(inner.world.width()) { if inner.world.cell_at(row, col) == engine::Cell::Alive { inner.canvas.draw_cell(row, col); } } } } fn get_inner<'a>(&'a self) -> RefMut<'a, StateInner> { return self.inner.borrow_mut(); } } impl Drop for State { fn drop(&mut self) { self.pause(); } }
use std::rc::Rc; use std::cell::{RefCell, RefMut}; use wasm_bindgen::{JsCast, JsValue}; use wasm_bindgen::closure::Closure; use web_sys::{console, window}; use crate::ui; use crate::engine; struct UiResponder { state: State, } impl ui::Responder for UiResponder { fn on_mouse_down(&self, point: ui::Point) { js_log!("on_mouse_down: {:?}", point); let mut state_inner = self.state.get_inner(); state_inner.is_mouse_down = true; drop(state_inner); self.state.put_cell(point.x, point.y); } fn on_mouse_move(&self, point: ui::Point) { if !self.state.get_inner().is_mouse_down { return; } js_log!("on_mouse_move: {:?}", point); self.state.put_cell(point.x, point.y); } fn on_mouse_up(&self) { js_log!("on_mouse_up"); self.state.get_inner().is_mouse_down = false; } } pub struct Config { pub update_interval: i32, } pub struct State { inner: Rc<RefCell<StateInner>>, } struct StateInner { canvas: ui::Canvas, world: engine::World, config: Config, is_mouse_down: bool, timer_closure: Option<Box<dyn Drop>>, timer_id: i32, } impl State { pub fn new(canvas: ui::Canvas, world: engine::World, config: Config) -> State { let state = State { inner: Rc::new(RefCell::new(StateInner { canvas: canvas, world: world, config: config, is_mouse_down: false, timer_closure: None, timer_id: -1, })), }; let responder = Box::new(UiResponder { state: state.clone() }); state.get_inner().canvas.install_responder(responder); return state; } pub fn clone(&self) -> State { return State { inner: self.inner.clone(), }; } pub fn resume(&self) { let mut inner = self.get_inner();
let timer_id = window().unwrap().set_interval_with_callback_and_timeout_and_arguments_0( closure.as_ref().as_ref().unchecked_ref(), inner.config.update_interval ).unwrap(); inner.timer_closure = Some(closure); inner.timer_id = timer_id; } pub fn pause(&self) { let mut inner = self.get_inner(); if inner.timer_closure.is_none() { return; } window().unwrap().clear_interval_with_handle(inner.timer_id); inner.timer_closure = None; inner.timer_id = -1; } fn put_cell(&self, x: i32, y: i32) { self.get_inner().world.set_cell(x, y, engine::Cell::Alive); self.update_canvas(); } fn tick(&self) { self.get_inner().world.next_gen(); self.update_canvas(); } fn update_canvas(&self) { let inner = self.get_inner(); inner.canvas.clear(); for col in 0..(inner.world.height()) { for row in 0..(inner.world.width()) { if inner.world.cell_at(row, col) == engine::Cell::Alive { inner.canvas.draw_cell(row, col); } } } } fn get_inner<'a>(&'a self) -> RefMut<'a, StateInner> { return self.inner.borrow_mut(); } } impl Drop for State { fn drop(&mut self) { self.pause(); } }
if inner.timer_closure.is_some() { return; } let state_clone = self.clone(); let closure = Box::new(Closure::wrap(Box::new(move || { state_clone.tick(); }) as Box<dyn FnMut()>));
random
[ { "content": "fn generate_initial_world(world: &mut engine::World) {\n\n for col in 0..(world.height()) {\n\n for row in 0..(world.width()) {\n\n if Math::random() < 0.3 {\n\n world.set_cell(row, col, engine::Cell::Alive);\n\n }\n\n }\n\n }\n\n}", "fi...
Rust
src/codec_impl.rs
Stebalien/libipld
92586bc1708fb69463ef865f81f9986b3cf31524
#[cfg(feature = "dag-cbor")] use crate::cbor::DagCborCodec; use crate::cid::Cid; use crate::codec::{Codec, Decode, Encode, References}; use crate::error::{Result, UnsupportedCodec}; use crate::ipld::Ipld; #[cfg(feature = "dag-json")] use crate::json::DagJsonCodec; #[cfg(feature = "dag-pb")] use crate::pb::DagPbCodec; use crate::raw::RawCodec; use core::convert::TryFrom; use std::io::{Read, Seek, Write}; #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum IpldCodec { Raw, #[cfg(feature = "dag-cbor")] DagCbor, #[cfg(feature = "dag-json")] DagJson, #[cfg(feature = "dag-pb")] DagPb, } impl TryFrom<u64> for IpldCodec { type Error = UnsupportedCodec; fn try_from(ccode: u64) -> core::result::Result<Self, Self::Error> { Ok(match ccode { 0x55 => Self::Raw, #[cfg(feature = "dag-cbor")] 0x71 => Self::DagCbor, #[cfg(feature = "dag-json")] 0x0129 => Self::DagJson, #[cfg(feature = "dag-pb")] 0x70 => Self::DagPb, _ => return Err(UnsupportedCodec(ccode)), }) } } impl From<IpldCodec> for u64 { fn from(mc: IpldCodec) -> Self { match mc { IpldCodec::Raw => 0x55, #[cfg(feature = "dag-cbor")] IpldCodec::DagCbor => 0x71, #[cfg(feature = "dag-json")] IpldCodec::DagJson => 0x0129, #[cfg(feature = "dag-pb")] IpldCodec::DagPb => 0x70, } } } impl From<RawCodec> for IpldCodec { fn from(_: RawCodec) -> Self { Self::Raw } } #[cfg(feature = "dag-cbor")] impl From<DagCborCodec> for IpldCodec { fn from(_: DagCborCodec) -> Self { Self::DagCbor } } #[cfg(feature = "dag-cbor")] impl From<IpldCodec> for DagCborCodec { fn from(_: IpldCodec) -> Self { Self } } #[cfg(feature = "dag-json")] impl From<DagJsonCodec> for IpldCodec { fn from(_: DagJsonCodec) -> Self { Self::DagJson } } #[cfg(feature = "dag-json")] impl From<IpldCodec> for DagJsonCodec { fn from(_: IpldCodec) -> Self { Self } } #[cfg(feature = "dag-pb")] impl From<DagPbCodec> for IpldCodec { fn from(_: DagPbCodec) -> Self { Self::DagPb } } #[cfg(feature = "dag-pb")] impl From<IpldCodec> for DagPbCodec { fn from(_: IpldCodec) -> Self { Self } } impl Codec for IpldCodec {} impl Encode<IpldCodec> for Ipld { fn encode<W: Write>(&self, c: IpldCodec, w: &mut W) -> Result<()> { match c { IpldCodec::Raw => self.encode(RawCodec, w)?, #[cfg(feature = "dag-cbor")] IpldCodec::DagCbor => self.encode(DagCborCodec, w)?, #[cfg(feature = "dag-json")] IpldCodec::DagJson => self.encode(DagJsonCodec, w)?, #[cfg(feature = "dag-pb")] IpldCodec::DagPb => self.encode(DagPbCodec, w)?, }; Ok(()) } } impl Decode<IpldCodec> for Ipld { fn decode<R: Read + Seek>(c: IpldCodec, r: &mut R) -> Result<Self> { Ok(match c { IpldCodec::Raw => Self::decode(RawCodec, r)?, #[cfg(feature = "dag-cbor")] IpldCodec::DagCbor => Self::decode(DagCborCodec, r)?, #[cfg(feature = "dag-json")] IpldCodec::DagJson => Self::decode(DagJsonCodec, r)?, #[cfg(feature = "dag-pb")] IpldCodec::DagPb => Self::decode(DagPbCodec, r)?, }) } } impl References<IpldCodec> for Ipld { fn references<R: Read + Seek, E: Extend<Cid>>( c: IpldCodec, r: &mut R, set: &mut E, ) -> Result<()> { match c { IpldCodec::Raw => <Self as References<RawCodec>>::references(RawCodec, r, set)?, #[cfg(feature = "dag-cbor")] IpldCodec::DagCbor => { <Self as References<DagCborCodec>>::references(DagCborCodec, r, set)? } #[cfg(feature = "dag-json")] IpldCodec::DagJson => { <Self as References<DagJsonCodec>>::references(DagJsonCodec, r, set)? } #[cfg(feature = "dag-pb")] IpldCodec::DagPb => <Self as References<DagPbCodec>>::references(DagPbCodec, r, set)?, }; Ok(()) } } #[cfg(test)] mod tests { use super::*; #[test] fn raw_encode() { let data = Ipld::Bytes([0x22, 0x33, 0x44].to_vec()); let result = IpldCodec::Raw.encode(&data).unwrap(); assert_eq!(result, vec![0x22, 0x33, 0x44]); } #[test] fn raw_decode() { let data = [0x22, 0x33, 0x44]; let result: Ipld = IpldCodec::Raw.decode(&data).unwrap(); assert_eq!(result, Ipld::Bytes(data.to_vec())); } #[cfg(feature = "dag-cbor")] #[test] fn dag_cbor_encode() { let data = Ipld::Bytes([0x22, 0x33, 0x44].to_vec()); let result = IpldCodec::DagCbor.encode(&data).unwrap(); assert_eq!(result, vec![0x43, 0x22, 0x33, 0x44]); } #[cfg(feature = "dag-cbor")] #[test] fn dag_cbor_decode() { let data = [0x43, 0x22, 0x33, 0x44]; let result: Ipld = IpldCodec::DagCbor.decode(&data).unwrap(); assert_eq!(result, Ipld::Bytes(vec![0x22, 0x33, 0x44])); } #[cfg(feature = "dag-json")] #[test] fn dag_json_encode() { let data = Ipld::Bool(true); let result = String::from_utf8(IpldCodec::DagJson.encode(&data).unwrap().to_vec()).unwrap(); assert_eq!(result, "true"); } #[cfg(feature = "dag-json")] #[test] fn dag_json_decode() { let data = b"true"; let result: Ipld = IpldCodec::DagJson.decode(data).unwrap(); assert_eq!(result, Ipld::Bool(true)); } #[cfg(feature = "dag-pb")] #[test] fn dag_pb_encode() { let mut data_map = std::collections::BTreeMap::<String, Ipld>::new(); data_map.insert("Data".to_string(), Ipld::Bytes(b"data".to_vec())); data_map.insert("Links".to_string(), Ipld::List(vec![])); let data = Ipld::Map(data_map); let result = IpldCodec::DagPb.encode(&data).unwrap(); assert_eq!(result, vec![0x0a, 0x04, 0x64, 0x61, 0x74, 0x61]); } #[cfg(feature = "dag-pb")] #[test] fn dag_pb_decode() { let mut data_map = std::collections::BTreeMap::<String, Ipld>::new(); data_map.insert("Data".to_string(), Ipld::Bytes(b"data".to_vec())); data_map.insert("Links".to_string(), Ipld::List(vec![])); let expected = Ipld::Map(data_map); let data = [0x0a, 0x04, 0x64, 0x61, 0x74, 0x61]; let result: Ipld = IpldCodec::DagPb.decode(&data).unwrap(); assert_eq!(result, expected); } }
#[cfg(feature = "dag-cbor")] use crate::cbor::DagCborCodec; use crate::cid::Cid; use crate::codec::{Codec, Decode, Encode, Refere
es<RawCodec>>::references(RawCodec, r, set)?, #[cfg(feature = "dag-cbor")] IpldCodec::DagCbor => { <Self as References<DagCborCodec>>::references(DagCborCodec, r, set)? } #[cfg(feature = "dag-json")] IpldCodec::DagJson => { <Self as References<DagJsonCodec>>::references(DagJsonCodec, r, set)? } #[cfg(feature = "dag-pb")] IpldCodec::DagPb => <Self as References<DagPbCodec>>::references(DagPbCodec, r, set)?, }; Ok(()) } } #[cfg(test)] mod tests { use super::*; #[test] fn raw_encode() { let data = Ipld::Bytes([0x22, 0x33, 0x44].to_vec()); let result = IpldCodec::Raw.encode(&data).unwrap(); assert_eq!(result, vec![0x22, 0x33, 0x44]); } #[test] fn raw_decode() { let data = [0x22, 0x33, 0x44]; let result: Ipld = IpldCodec::Raw.decode(&data).unwrap(); assert_eq!(result, Ipld::Bytes(data.to_vec())); } #[cfg(feature = "dag-cbor")] #[test] fn dag_cbor_encode() { let data = Ipld::Bytes([0x22, 0x33, 0x44].to_vec()); let result = IpldCodec::DagCbor.encode(&data).unwrap(); assert_eq!(result, vec![0x43, 0x22, 0x33, 0x44]); } #[cfg(feature = "dag-cbor")] #[test] fn dag_cbor_decode() { let data = [0x43, 0x22, 0x33, 0x44]; let result: Ipld = IpldCodec::DagCbor.decode(&data).unwrap(); assert_eq!(result, Ipld::Bytes(vec![0x22, 0x33, 0x44])); } #[cfg(feature = "dag-json")] #[test] fn dag_json_encode() { let data = Ipld::Bool(true); let result = String::from_utf8(IpldCodec::DagJson.encode(&data).unwrap().to_vec()).unwrap(); assert_eq!(result, "true"); } #[cfg(feature = "dag-json")] #[test] fn dag_json_decode() { let data = b"true"; let result: Ipld = IpldCodec::DagJson.decode(data).unwrap(); assert_eq!(result, Ipld::Bool(true)); } #[cfg(feature = "dag-pb")] #[test] fn dag_pb_encode() { let mut data_map = std::collections::BTreeMap::<String, Ipld>::new(); data_map.insert("Data".to_string(), Ipld::Bytes(b"data".to_vec())); data_map.insert("Links".to_string(), Ipld::List(vec![])); let data = Ipld::Map(data_map); let result = IpldCodec::DagPb.encode(&data).unwrap(); assert_eq!(result, vec![0x0a, 0x04, 0x64, 0x61, 0x74, 0x61]); } #[cfg(feature = "dag-pb")] #[test] fn dag_pb_decode() { let mut data_map = std::collections::BTreeMap::<String, Ipld>::new(); data_map.insert("Data".to_string(), Ipld::Bytes(b"data".to_vec())); data_map.insert("Links".to_string(), Ipld::List(vec![])); let expected = Ipld::Map(data_map); let data = [0x0a, 0x04, 0x64, 0x61, 0x74, 0x61]; let result: Ipld = IpldCodec::DagPb.decode(&data).unwrap(); assert_eq!(result, expected); } }
nces}; use crate::error::{Result, UnsupportedCodec}; use crate::ipld::Ipld; #[cfg(feature = "dag-json")] use crate::json::DagJsonCodec; #[cfg(feature = "dag-pb")] use crate::pb::DagPbCodec; use crate::raw::RawCodec; use core::convert::TryFrom; use std::io::{Read, Seek, Write}; #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum IpldCodec { Raw, #[cfg(feature = "dag-cbor")] DagCbor, #[cfg(feature = "dag-json")] DagJson, #[cfg(feature = "dag-pb")] DagPb, } impl TryFrom<u64> for IpldCodec { type Error = UnsupportedCodec; fn try_from(ccode: u64) -> core::result::Result<Self, Self::Error> { Ok(match ccode { 0x55 => Self::Raw, #[cfg(feature = "dag-cbor")] 0x71 => Self::DagCbor, #[cfg(feature = "dag-json")] 0x0129 => Self::DagJson, #[cfg(feature = "dag-pb")] 0x70 => Self::DagPb, _ => return Err(UnsupportedCodec(ccode)), }) } } impl From<IpldCodec> for u64 { fn from(mc: IpldCodec) -> Self { match mc { IpldCodec::Raw => 0x55, #[cfg(feature = "dag-cbor")] IpldCodec::DagCbor => 0x71, #[cfg(feature = "dag-json")] IpldCodec::DagJson => 0x0129, #[cfg(feature = "dag-pb")] IpldCodec::DagPb => 0x70, } } } impl From<RawCodec> for IpldCodec { fn from(_: RawCodec) -> Self { Self::Raw } } #[cfg(feature = "dag-cbor")] impl From<DagCborCodec> for IpldCodec { fn from(_: DagCborCodec) -> Self { Self::DagCbor } } #[cfg(feature = "dag-cbor")] impl From<IpldCodec> for DagCborCodec { fn from(_: IpldCodec) -> Self { Self } } #[cfg(feature = "dag-json")] impl From<DagJsonCodec> for IpldCodec { fn from(_: DagJsonCodec) -> Self { Self::DagJson } } #[cfg(feature = "dag-json")] impl From<IpldCodec> for DagJsonCodec { fn from(_: IpldCodec) -> Self { Self } } #[cfg(feature = "dag-pb")] impl From<DagPbCodec> for IpldCodec { fn from(_: DagPbCodec) -> Self { Self::DagPb } } #[cfg(feature = "dag-pb")] impl From<IpldCodec> for DagPbCodec { fn from(_: IpldCodec) -> Self { Self } } impl Codec for IpldCodec {} impl Encode<IpldCodec> for Ipld { fn encode<W: Write>(&self, c: IpldCodec, w: &mut W) -> Result<()> { match c { IpldCodec::Raw => self.encode(RawCodec, w)?, #[cfg(feature = "dag-cbor")] IpldCodec::DagCbor => self.encode(DagCborCodec, w)?, #[cfg(feature = "dag-json")] IpldCodec::DagJson => self.encode(DagJsonCodec, w)?, #[cfg(feature = "dag-pb")] IpldCodec::DagPb => self.encode(DagPbCodec, w)?, }; Ok(()) } } impl Decode<IpldCodec> for Ipld { fn decode<R: Read + Seek>(c: IpldCodec, r: &mut R) -> Result<Self> { Ok(match c { IpldCodec::Raw => Self::decode(RawCodec, r)?, #[cfg(feature = "dag-cbor")] IpldCodec::DagCbor => Self::decode(DagCborCodec, r)?, #[cfg(feature = "dag-json")] IpldCodec::DagJson => Self::decode(DagJsonCodec, r)?, #[cfg(feature = "dag-pb")] IpldCodec::DagPb => Self::decode(DagPbCodec, r)?, }) } } impl References<IpldCodec> for Ipld { fn references<R: Read + Seek, E: Extend<Cid>>( c: IpldCodec, r: &mut R, set: &mut E, ) -> Result<()> { match c { IpldCodec::Raw => <Self as Referenc
random
[ { "content": "/// Marker trait for types supporting the `DagCborCodec`.\n\npub trait DagCbor: Encode<DagCborCodec> + Decode<DagCborCodec> {}\n\n\n\nimpl<T: Encode<DagCborCodec> + Decode<DagCborCodec>> DagCbor for T {}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use libipld_core::cid::Cid;\n\n...
Rust
crates/sui-config/src/node.rs
MystenLabs/sui
b180b663c0b755c97ea37ad57ff636fb04f2e158
use crate::genesis; use crate::Config; use anyhow::Result; use debug_ignore::DebugIgnore; use multiaddr::Multiaddr; use narwhal_config::Committee as ConsensusCommittee; use narwhal_config::Parameters as ConsensusParameters; use narwhal_crypto::ed25519::Ed25519PublicKey; use serde::{Deserialize, Serialize}; use std::net::SocketAddr; use std::path::{Path, PathBuf}; use sui_types::base_types::SuiAddress; use sui_types::committee::StakeUnit; use sui_types::crypto::{KeyPair, PublicKeyBytes}; #[derive(Debug, Deserialize, Serialize)] #[serde(rename_all = "kebab-case")] pub struct NodeConfig { #[serde(default = "default_key_pair")] pub key_pair: KeyPair, pub db_path: PathBuf, #[serde(default = "default_grpc_address")] pub network_address: Multiaddr, #[serde(default = "default_metrics_address")] pub metrics_address: SocketAddr, #[serde(default = "default_json_rpc_address")] pub json_rpc_address: SocketAddr, #[serde(skip_serializing_if = "Option::is_none")] pub consensus_config: Option<ConsensusConfig>, pub genesis: Genesis, } fn default_key_pair() -> KeyPair { sui_types::crypto::get_key_pair().1 } fn default_grpc_address() -> Multiaddr { use multiaddr::multiaddr; multiaddr!(Ip4([0, 0, 0, 0]), Tcp(8080u16)) } fn default_metrics_address() -> SocketAddr { use std::net::{IpAddr, Ipv4Addr}; SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), 9184) } pub fn default_json_rpc_address() -> SocketAddr { use std::net::{IpAddr, Ipv4Addr}; SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), 9000) } impl Config for NodeConfig {} impl NodeConfig { pub fn key_pair(&self) -> &KeyPair { &self.key_pair } pub fn public_key(&self) -> PublicKeyBytes { *self.key_pair.public_key_bytes() } pub fn sui_address(&self) -> SuiAddress { SuiAddress::from(self.public_key()) } pub fn db_path(&self) -> &Path { &self.db_path } pub fn network_address(&self) -> &Multiaddr { &self.network_address } pub fn consensus_config(&self) -> Option<&ConsensusConfig> { self.consensus_config.as_ref() } pub fn genesis(&self) -> Result<&genesis::Genesis> { self.genesis.genesis() } } #[derive(Debug, Clone, Deserialize, Serialize)] #[serde(rename_all = "kebab-case")] pub struct ConsensusConfig { pub consensus_address: Multiaddr, pub consensus_db_path: PathBuf, #[serde(skip_serializing)] #[serde(default)] pub narwhal_config: DebugIgnore<ConsensusParameters>, pub narwhal_committee: DebugIgnore<ConsensusCommittee<Ed25519PublicKey>>, } impl ConsensusConfig { pub fn address(&self) -> &Multiaddr { &self.consensus_address } pub fn db_path(&self) -> &Path { &self.consensus_db_path } pub fn narwhal_config(&self) -> &ConsensusParameters { &self.narwhal_config } pub fn narwhal_committee(&self) -> &ConsensusCommittee<Ed25519PublicKey> { &self.narwhal_committee } } #[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)] #[serde(rename_all = "kebab-case")] pub struct ValidatorInfo { pub public_key: PublicKeyBytes, pub stake: StakeUnit, pub network_address: Multiaddr, } impl ValidatorInfo { pub fn sui_address(&self) -> SuiAddress { SuiAddress::from(self.public_key()) } pub fn public_key(&self) -> PublicKeyBytes { self.public_key } pub fn stake(&self) -> StakeUnit { self.stake } pub fn network_address(&self) -> &Multiaddr { &self.network_address } } #[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] pub struct Genesis { #[serde(flatten)] location: GenesisLocation, #[serde(skip)] genesis: once_cell::sync::OnceCell<genesis::Genesis>, } impl Genesis { pub fn new(genesis: genesis::Genesis) -> Self { Self { location: GenesisLocation::InPlace { genesis }, genesis: Default::default(), } } pub fn new_from_file<P: Into<PathBuf>>(path: P) -> Self { Self { location: GenesisLocation::File { genesis_file_location: path.into(), }, genesis: Default::default(), } } fn genesis(&self) -> Result<&genesis::Genesis> { match &self.location { GenesisLocation::InPlace { genesis } => Ok(genesis), GenesisLocation::File { genesis_file_location, } => self .genesis .get_or_try_init(|| genesis::Genesis::load(&genesis_file_location)), } } } #[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] #[serde(untagged)] enum GenesisLocation { InPlace { genesis: genesis::Genesis, }, File { #[serde(rename = "genesis-file-location")] genesis_file_location: PathBuf, }, } #[cfg(test)] mod tests { use super::Genesis; use crate::genesis; #[test] fn serialize_genesis_config_from_file() { let g = Genesis::new_from_file("path/to/file"); let s = serde_yaml::to_string(&g).unwrap(); assert_eq!("---\ngenesis-file-location: path/to/file\n", s); let loaded_genesis: Genesis = serde_yaml::from_str(&s).unwrap(); assert_eq!(g, loaded_genesis); } #[test] fn serialize_genesis_config_in_place() { let g = Genesis::new(genesis::Genesis::get_default_genesis()); let mut s = serde_yaml::to_string(&g).unwrap(); let loaded_genesis: Genesis = serde_yaml::from_str(&s).unwrap(); assert_eq!(g, loaded_genesis); s.push_str("\ngenesis-file-location: path/to/file"); let loaded_genesis: Genesis = serde_yaml::from_str(&s).unwrap(); assert_eq!(g, loaded_genesis); } #[test] fn load_genesis_config_from_file() { let file = tempfile::NamedTempFile::new().unwrap(); let genesis_config = Genesis::new_from_file(file.path()); let genesis = genesis::Genesis::get_default_genesis(); genesis.save(file.path()).unwrap(); let loaded_genesis = genesis_config.genesis().unwrap(); assert_eq!(&genesis, loaded_genesis); } }
use crate::genesis; use crate::Config; use anyhow::Result; use debug_ignore::DebugIgnore; use multiaddr::Multiaddr; use narwhal_config::Committee as ConsensusCommittee; use narwhal_config::Parameters as ConsensusParameters; use narwhal_crypto::ed25519::Ed25519PublicKey; use serde::{Deserialize, Serialize}; use std::net::SocketAddr; use std::path::{Path, PathBuf}; use sui_types::base_types::SuiAddress; use sui_types::committee::StakeUnit; use sui_types::crypto::{KeyPair, PublicKeyBytes}; #[derive(Debug, Deserialize, Serialize)] #[serde(rename_all = "kebab-case")] pub struct NodeConfig { #[serde(default = "default_key_pair")] pub key_pair: KeyPair, pub db_path: PathBuf, #[serde(default = "default_grpc_address")] pub network_ad
#[serde(skip)] genesis: once_cell::sync::OnceCell<genesis::Genesis>, } impl Genesis { pub fn new(genesis: genesis::Genesis) -> Self { Self { location: GenesisLocation::InPlace { genesis }, genesis: Default::default(), } } pub fn new_from_file<P: Into<PathBuf>>(path: P) -> Self { Self { location: GenesisLocation::File { genesis_file_location: path.into(), }, genesis: Default::default(), } } fn genesis(&self) -> Result<&genesis::Genesis> { match &self.location { GenesisLocation::InPlace { genesis } => Ok(genesis), GenesisLocation::File { genesis_file_location, } => self .genesis .get_or_try_init(|| genesis::Genesis::load(&genesis_file_location)), } } } #[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] #[serde(untagged)] enum GenesisLocation { InPlace { genesis: genesis::Genesis, }, File { #[serde(rename = "genesis-file-location")] genesis_file_location: PathBuf, }, } #[cfg(test)] mod tests { use super::Genesis; use crate::genesis; #[test] fn serialize_genesis_config_from_file() { let g = Genesis::new_from_file("path/to/file"); let s = serde_yaml::to_string(&g).unwrap(); assert_eq!("---\ngenesis-file-location: path/to/file\n", s); let loaded_genesis: Genesis = serde_yaml::from_str(&s).unwrap(); assert_eq!(g, loaded_genesis); } #[test] fn serialize_genesis_config_in_place() { let g = Genesis::new(genesis::Genesis::get_default_genesis()); let mut s = serde_yaml::to_string(&g).unwrap(); let loaded_genesis: Genesis = serde_yaml::from_str(&s).unwrap(); assert_eq!(g, loaded_genesis); s.push_str("\ngenesis-file-location: path/to/file"); let loaded_genesis: Genesis = serde_yaml::from_str(&s).unwrap(); assert_eq!(g, loaded_genesis); } #[test] fn load_genesis_config_from_file() { let file = tempfile::NamedTempFile::new().unwrap(); let genesis_config = Genesis::new_from_file(file.path()); let genesis = genesis::Genesis::get_default_genesis(); genesis.save(file.path()).unwrap(); let loaded_genesis = genesis_config.genesis().unwrap(); assert_eq!(&genesis, loaded_genesis); } }
dress: Multiaddr, #[serde(default = "default_metrics_address")] pub metrics_address: SocketAddr, #[serde(default = "default_json_rpc_address")] pub json_rpc_address: SocketAddr, #[serde(skip_serializing_if = "Option::is_none")] pub consensus_config: Option<ConsensusConfig>, pub genesis: Genesis, } fn default_key_pair() -> KeyPair { sui_types::crypto::get_key_pair().1 } fn default_grpc_address() -> Multiaddr { use multiaddr::multiaddr; multiaddr!(Ip4([0, 0, 0, 0]), Tcp(8080u16)) } fn default_metrics_address() -> SocketAddr { use std::net::{IpAddr, Ipv4Addr}; SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), 9184) } pub fn default_json_rpc_address() -> SocketAddr { use std::net::{IpAddr, Ipv4Addr}; SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), 9000) } impl Config for NodeConfig {} impl NodeConfig { pub fn key_pair(&self) -> &KeyPair { &self.key_pair } pub fn public_key(&self) -> PublicKeyBytes { *self.key_pair.public_key_bytes() } pub fn sui_address(&self) -> SuiAddress { SuiAddress::from(self.public_key()) } pub fn db_path(&self) -> &Path { &self.db_path } pub fn network_address(&self) -> &Multiaddr { &self.network_address } pub fn consensus_config(&self) -> Option<&ConsensusConfig> { self.consensus_config.as_ref() } pub fn genesis(&self) -> Result<&genesis::Genesis> { self.genesis.genesis() } } #[derive(Debug, Clone, Deserialize, Serialize)] #[serde(rename_all = "kebab-case")] pub struct ConsensusConfig { pub consensus_address: Multiaddr, pub consensus_db_path: PathBuf, #[serde(skip_serializing)] #[serde(default)] pub narwhal_config: DebugIgnore<ConsensusParameters>, pub narwhal_committee: DebugIgnore<ConsensusCommittee<Ed25519PublicKey>>, } impl ConsensusConfig { pub fn address(&self) -> &Multiaddr { &self.consensus_address } pub fn db_path(&self) -> &Path { &self.consensus_db_path } pub fn narwhal_config(&self) -> &ConsensusParameters { &self.narwhal_config } pub fn narwhal_committee(&self) -> &ConsensusCommittee<Ed25519PublicKey> { &self.narwhal_committee } } #[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)] #[serde(rename_all = "kebab-case")] pub struct ValidatorInfo { pub public_key: PublicKeyBytes, pub stake: StakeUnit, pub network_address: Multiaddr, } impl ValidatorInfo { pub fn sui_address(&self) -> SuiAddress { SuiAddress::from(self.public_key()) } pub fn public_key(&self) -> PublicKeyBytes { self.public_key } pub fn stake(&self) -> StakeUnit { self.stake } pub fn network_address(&self) -> &Multiaddr { &self.network_address } } #[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] pub struct Genesis { #[serde(flatten)] location: GenesisLocation,
random
[ { "content": "#[serde_as]\n\n#[derive(Eq, PartialEq, Clone, Copy, PartialOrd, Ord, Hash, Serialize, Deserialize)]\n\nstruct ObjectKey(pub ObjectID, pub VersionNumber);\n\n\n\nimpl ObjectKey {\n\n pub const ZERO: ObjectKey = ObjectKey(ObjectID::ZERO, VersionNumber::MIN);\n\n\n\n pub fn max_for_id(id: &Obje...
Rust
examples/responders/src/main.rs
rotoclone/Rocket
3a7559edcec7c443e68e22e038aaa2d90ef27c23
#[macro_use] extern crate rocket; #[cfg(test)] mod tests; /****************** `Result`, `Option` `NameFile` Responder *******************/ use std::{io, env}; use rocket::tokio::fs; use rocket::data::{Capped, TempFile}; use rocket::response::NamedFile; const FILENAME: &str = "big_file.dat"; #[post("/file", data = "<file>")] async fn upload(mut file: Capped<TempFile<'_>>) -> io::Result<String> { file.persist_to(env::temp_dir().join(FILENAME)).await?; Ok(format!("{} bytes at {}", file.n.written, file.path().unwrap().display())) } #[get("/file")] async fn file() -> Option<NamedFile> { NamedFile::open(env::temp_dir().join(FILENAME)).await.ok() } #[delete("/file")] async fn delete() -> Option<()> { fs::remove_file(env::temp_dir().join(FILENAME)).await.ok() } /***************************** `Stream` Responder *****************************/ use rocket::tokio::select; use rocket::tokio::time::{self, Duration}; use rocket::futures::stream::{repeat, StreamExt}; use rocket::Shutdown; use rocket::response::stream::TextStream; #[get("/stream/hi")] fn many_his() -> TextStream![&'static str] { TextStream(repeat("hi").take(100)) } #[get("/stream/hi/<n>")] fn one_hi_per_ms(mut shutdown: Shutdown, n: u8) -> TextStream![&'static str] { TextStream! { let mut interval = time::interval(Duration::from_millis(n as u64)); loop { select! { _ = interval.tick() => yield "hi", _ = &mut shutdown => { yield "goodbye"; break; } }; } } } /***************************** `Redirect` Responder ***************************/ use rocket::response::Redirect; #[get("/redir")] fn redir_root() -> Redirect { Redirect::to(uri!(redir_login)) } #[get("/redir/login")] fn redir_login() -> &'static str { "Hi! Please log in before continuing." } #[get("/redir/<name>")] fn maybe_redir(name: &str) -> Result<&'static str, Redirect> { match name { "Sergio" => Ok("Hello, Sergio!"), _ => Err(Redirect::to(uri!(redir_login))), } } /***************************** `content` Responders ***************************/ use rocket::Request; use rocket::response::content; #[get("/content", format = "xml", rank = 1)] fn xml() -> content::Xml<&'static str> { content::Xml("<payload>I'm here</payload>") } #[get("/content", format = "json", rank = 2)] fn json() -> content::Json<&'static str> { content::Json(r#"{ "payload": "I'm here" }"#) } #[catch(404)] fn not_found(request: &Request<'_>) -> content::Html<String> { let html = match request.format() { Some(ref mt) if !(mt.is_xml() || mt.is_html()) => { format!("<p>'{}' requests are not supported.</p>", mt) } _ => format!("<p>Sorry, '{}' is an invalid path! Try \ /hello/&lt;name&gt;/&lt;age&gt; instead.</p>", request.uri()) }; content::Html(html) } /******************************* `Either` Responder ***************************/ use rocket::Either; use rocket::response::content::{Json, MsgPack}; use rocket::http::uncased::AsUncased; #[get("/content/<kind>")] fn json_or_msgpack(kind: &str) -> Either<Json<&'static str>, MsgPack<&'static [u8]>> { if kind.as_uncased() == "msgpack" { Either::Right(MsgPack(&[162, 104, 105])) } else { Either::Left(Json("\"hi\"")) } } /******************************* Custom Responder *****************************/ use std::borrow::Cow; use rocket::response::content::Html; #[derive(Responder)] enum StoredData { File(Option<NamedFile>), String(Cow<'static, str>), Bytes(Vec<u8>), #[response(status = 401)] NotAuthorized(Html<&'static str>), } #[derive(FromFormField, UriDisplayQuery)] enum Kind { File, String, Bytes } #[get("/custom?<kind>")] async fn custom(kind: Option<Kind>) -> StoredData { match kind { Some(Kind::File) => { let path = env::temp_dir().join(FILENAME); StoredData::File(NamedFile::open(path).await.ok()) }, Some(Kind::String) => StoredData::String("Hey, I'm some data.".into()), Some(Kind::Bytes) => StoredData::Bytes(vec![72, 105]), None => StoredData::NotAuthorized(Html("No no no!")) } } #[launch] fn rocket() -> _ { rocket::build() .mount("/", routes![many_his, one_hi_per_ms, file, upload, delete]) .mount("/", routes![redir_root, redir_login, maybe_redir]) .mount("/", routes![xml, json, json_or_msgpack]) .mount("/", routes![custom]) .register("/", catchers![not_found]) }
#[macro_use] extern crate rocket; #[cfg(test)] mod tests; /****************** `Result`, `Option` `NameFile` Responder *******************/ use std::{io, env}; use rocket::tokio::fs; use rocket::data::{Capped, TempFile}; use rocket::response::NamedFile; const FILENAME: &str = "big_file.dat"; #[post("/file", data = "<file>")] async fn upload(mut file: Capped<TempFile<'_>>) -> io::Result<String> { file.persist_to(env::temp_dir().join(FILENAME)).await?; Ok(format!("{} bytes at {}", file.n.written, file.path().unwrap().display())) } #[get("/file")] async fn file() -> Option<NamedFile> { NamedFile::open(env::temp_dir().join(FILENAME)).await.ok() } #[delete("/file")] async fn delete() -> Option<()> { fs::remove_file(env::temp_dir().join(FILENAME)).await.ok() } /***************************** `Stream` Responder *****************************/ use rocket::tokio::select; use rocket::tokio::time::{self, Duration}; use rocket::futures::stream::{repeat, StreamExt}; use rocket::Shutdown; use rocket::response::stream::TextStream; #[get("/stream/hi")] fn many_his() -> TextStream![&'static str] { TextStream(repeat("hi").take(100)) } #[get("/stream/hi/<n>")] fn one_hi_per_ms(mut shutdown: Shutdown, n: u8) -> TextStream![&'static str] { TextStream! { let mut interval = time::interval(Duration::from_millis(n as u64)); loop { select! { _ = interval.tick() => yield "hi", _ = &mut shutdown => { yield "goodbye"; break; } }; } } } /***************************** `Redirect` Responder ***************************/ use rocket::response::Redirect; #[get("/redir")] fn redir_root() -> Redirect { Redirect::to(uri!(redir_login)) } #[get("/redir/login")] fn redir_login() -> &'static str { "Hi! Please log in before continuing." } #[get("/redir/<name>")] fn maybe_redir(name: &str) -> Result<&'static str, Redirect> { match name { "Sergio" => Ok("Hello, Sergio!"), _ => Err(Redirect::to(uri!(redir_login))), } } /***************************** `content` Responders ***************************/ use rocket::Request; use rocket::response::content; #[get("/content", format = "xml", rank = 1)] fn xml() -> content::Xml<&'static str> { content::Xml("<payload>I'm here</payload>") } #[get("/content", format = "json", rank = 2)] fn json() -> content::Json<&'static str> { content::Json(r#"{ "payload": "I'm here" }"#) } #[catch(404)] fn not_found(request: &Request<'_>) -> content::Html<String> { let html = match request.format() { Some(ref mt) if !(mt.is_xml() || mt.is_html()) => { format!("<p>'{}' requests are not supported.</p>", mt) } _ => format!("<p>Sorry, '{}' is an invalid path! Try \ /hello/&lt;name&gt;/&lt;age&gt; instead.</p>", request.uri()) }; content::Html(html) } /******************************* `Either` Responder ***************************/ use rocket::Either; use rocket::response::content::{Json, MsgPack}; use rocket::http::uncased::AsUncased; #[get("/content/<kind>")] fn json_or_msgpack(kind: &str) -> Either<Json<&'static str>, MsgPack<&'static [u8]>> { if kind.as_uncased() == "msgpack" { Either::Right(MsgPack(&[162, 104, 105])) } else { Either::Left(Json("\"hi\"")) } } /******************************* Custom Responder *****************************/ use std::borrow::Cow; use rocket::response::content::Html; #[derive(Responder)] enum StoredData { File(Option<NamedFile>), String(Cow<'static, str>), Bytes(Vec<u8>), #[response(status = 401)] NotAuthorized(Html<&'static str>), } #[derive(FromFormField, UriDisplayQuery)] enum Kind { File, String, Bytes } #[get("/custom?<kind>")] async fn custom(kind: Option<Kind>) -> StoredData { match kind { Some(Kind::File) => { let path = env::temp_dir().join(FILENAME); StoredData::File(NamedFile::open(path).await.ok()) }, Some(Kind::String) => StoredData::String("Hey, I'm some data.".into()), Some(Kind::Bytes) => StoredData::Bytes(vec![72, 105]), None => StoredData::NotAuthorized(Html("No no no!")) } } #[launch] fn rocket() -> _ { rocket::build() .mount("/", routes![many_his, one_hi_per_ms, file, upload, delete]) .mount("/", routes![redir_root, redir_login, maybe_redir]) .
mount("/", routes![xml, json, json_or_msgpack]) .mount("/", routes![custom]) .register("/", catchers![not_found]) }
function_block-function_prefix_line
[ { "content": "#[rocket::post(\"/\", data = \"<_data>\", format = \"json\")]\n\nfn index(_data: rocket::Data) -> &'static str { \"json\" }\n\n\n", "file_path": "core/lib/tests/replace-content-type-518.rs", "rank": 3, "score": 552084.1880964399 }, { "content": "fn read_file_content(path: &str)...
Rust
crates/shell/src/minifb/window.rs
Dmitry-Borodin/orbtk
235e0d84f7914605e28b8c313e4f21d00e6208b0
use std::{cell::RefCell, rc::Rc, sync::mpsc}; use derive_more::Constructor; use super::{KeyState, MouseState, WindowState, CONSOLE}; use crate::{ event::{ButtonState, KeyEvent, MouseButton, MouseEvent}, render::RenderContext2D, window_adapter::WindowAdapter, WindowRequest, }; #[derive(Constructor)] pub struct Window<A> where A: WindowAdapter, { window: minifb::Window, adapter: A, render_context: RenderContext2D, request_receiver: Option<mpsc::Receiver<WindowRequest>>, window_state: WindowState, mouse: MouseState, update: bool, redraw: bool, close: bool, key_states: Vec<KeyState>, key_events: Rc<RefCell<Vec<KeyEvent>>>, } impl<A> Window<A> where A: WindowAdapter, { fn push_mouse_event(&mut self, pressed: bool, button: MouseButton) { let state = if pressed { ButtonState::Down } else { ButtonState::Up }; self.adapter.mouse_event(MouseEvent { x: self.mouse.mouse_pos.0 as f64, y: self.mouse.mouse_pos.1 as f64, button, state, }); } fn push_key_down_event(&mut self, index: usize) { let key_repeat = match self.key_states.get(index).unwrap().minifb_key { minifb::Key::Left | minifb::Key::Right | minifb::Key::Up | minifb::Key::Down | minifb::Key::Backspace | minifb::Key::Delete => minifb::KeyRepeat::Yes, _ => minifb::KeyRepeat::No, }; if self .window .is_key_pressed(self.key_states.get(index).unwrap().minifb_key, key_repeat) { self.adapter.key_event(KeyEvent { key: self.key_states.get(index).unwrap().key, state: ButtonState::Down, text: String::default(), }); self.update = true; } } fn push_key_up_event(&mut self, index: usize) { if self .window .is_key_released(self.key_states.get(index).unwrap().minifb_key) { self.adapter.key_event(KeyEvent { key: self.key_states.get(index).unwrap().key, state: ButtonState::Up, text: String::default(), }); self.update = true; } } pub fn is_open(&self) -> bool { self.window.is_open() && !self.close } pub fn drain_events(&mut self) { self.window.update(); if let Some(pos) = self.window.get_mouse_pos(minifb::MouseMode::Discard) { if (pos.0.floor(), pos.1.floor()) != self.mouse.mouse_pos { self.adapter.mouse(pos.0 as f64, pos.1 as f64); self.mouse.mouse_pos = (pos.0.floor(), pos.1.floor()); self.update = true; } } let left_button_down = self.window.get_mouse_down(minifb::MouseButton::Left); let middle_button_down = self.window.get_mouse_down(minifb::MouseButton::Middle); let right_button_down = self.window.get_mouse_down(minifb::MouseButton::Right); if left_button_down != self.mouse.button_left { if left_button_down { self.push_mouse_event(true, MouseButton::Left); } else { self.push_mouse_event(false, MouseButton::Left); } self.mouse.button_left = left_button_down; self.update = true; } if middle_button_down != self.mouse.button_middle { if middle_button_down { self.push_mouse_event(true, MouseButton::Middle); } else { self.push_mouse_event(false, MouseButton::Middle); } self.mouse.button_middle = middle_button_down; self.update = true; } if right_button_down != self.mouse.button_right { if right_button_down { self.push_mouse_event(true, MouseButton::Right); } else { self.push_mouse_event(false, MouseButton::Right); } self.mouse.button_right = right_button_down; self.update = true; } if let Some(delta) = self.window.get_scroll_wheel() { self.adapter.scroll(delta.0 as f64, delta.1 as f64); self.update = true; } if self.window_state.size != self.window.get_size() { self.window_state.size = self.window.get_size(); self.render_context.resize( self.window_state.size.0 as f64, self.window_state.size.1 as f64, ); self.adapter.resize( self.window_state.size.0 as f64, self.window_state.size.1 as f64, ); self.update = true; } if self.window_state.active != self.window.is_active() { self.adapter.active(self.window.is_active()); self.window_state.active = self.window.is_active(); } while let Some(event) = self.key_events.borrow_mut().pop() { self.adapter.key_event(event); self.update = true; } for i in 0..self.key_states.len() { self.push_key_down_event(i); self.push_key_up_event(i); } } pub fn receive_requests(&mut self) { if let Some(request_receiver) = &self.request_receiver { for request in request_receiver.try_iter() { match request { WindowRequest::Redraw => { self.update = true; self.redraw = true; } WindowRequest::ChangeTitle(title) => { self.window.set_title(&title); self.update = true; self.redraw = true; } WindowRequest::Close => { self.close = true; } } } } } pub fn update(&mut self) { if !self.update { return; } self.adapter.run(&mut self.render_context); self.update = false; self.redraw = true; } pub fn render(&mut self) { if self.redraw { if let Some(data) = self.render_context.data() { let _ = self.window.update_with_buffer( data, self.window_state.size.0 as usize, self.window_state.size.1 as usize, ); self.redraw = false; } } } }
use std::{cell::RefCell, rc::Rc, sync::mpsc}; use derive_more::Constructor; use super::{KeyState, MouseState, WindowState, CONSOLE}; use crate::{ event::{ButtonState, KeyEvent, MouseButton, MouseEvent}, render::RenderContext2D, window_adapter::WindowAdapter, WindowRequest, }; #[derive(Constructor)] pub struct Window<A> where A: WindowAdapter, { window: minifb::Window, adapter: A, render_context: RenderContext2D, request_receiver: Option<mpsc::Receiver<WindowRequest>>, window_state: WindowState, mouse: MouseState, update: bool, redraw: bool, close: bool, key_states: Vec<KeyState>, key_events: Rc<RefCell<Vec<KeyEvent>>>, } impl<A> Window<A> where A: WindowAdapter, { fn push_mouse_event(&mut self, pressed: bool, button: MouseButton) { let state = if pressed { ButtonState::Down } else { ButtonState::Up }; self.adapter.mouse_event(MouseEvent { x: self.mouse.mouse_pos.0 as f64, y: self.mouse.mouse_pos.1 as f64, button, state, }); } fn push_key_down_event(&mut self, index: usize) { let key_repeat = match self.key_states.get(index).unwrap().minifb_key { minifb::Key::Left | minifb::Key::Right |
fn push_key_up_event(&mut self, index: usize) { if self .window .is_key_released(self.key_states.get(index).unwrap().minifb_key) { self.adapter.key_event(KeyEvent { key: self.key_states.get(index).unwrap().key, state: ButtonState::Up, text: String::default(), }); self.update = true; } } pub fn is_open(&self) -> bool { self.window.is_open() && !self.close } pub fn drain_events(&mut self) { self.window.update(); if let Some(pos) = self.window.get_mouse_pos(minifb::MouseMode::Discard) { if (pos.0.floor(), pos.1.floor()) != self.mouse.mouse_pos { self.adapter.mouse(pos.0 as f64, pos.1 as f64); self.mouse.mouse_pos = (pos.0.floor(), pos.1.floor()); self.update = true; } } let left_button_down = self.window.get_mouse_down(minifb::MouseButton::Left); let middle_button_down = self.window.get_mouse_down(minifb::MouseButton::Middle); let right_button_down = self.window.get_mouse_down(minifb::MouseButton::Right); if left_button_down != self.mouse.button_left { if left_button_down { self.push_mouse_event(true, MouseButton::Left); } else { self.push_mouse_event(false, MouseButton::Left); } self.mouse.button_left = left_button_down; self.update = true; } if middle_button_down != self.mouse.button_middle { if middle_button_down { self.push_mouse_event(true, MouseButton::Middle); } else { self.push_mouse_event(false, MouseButton::Middle); } self.mouse.button_middle = middle_button_down; self.update = true; } if right_button_down != self.mouse.button_right { if right_button_down { self.push_mouse_event(true, MouseButton::Right); } else { self.push_mouse_event(false, MouseButton::Right); } self.mouse.button_right = right_button_down; self.update = true; } if let Some(delta) = self.window.get_scroll_wheel() { self.adapter.scroll(delta.0 as f64, delta.1 as f64); self.update = true; } if self.window_state.size != self.window.get_size() { self.window_state.size = self.window.get_size(); self.render_context.resize( self.window_state.size.0 as f64, self.window_state.size.1 as f64, ); self.adapter.resize( self.window_state.size.0 as f64, self.window_state.size.1 as f64, ); self.update = true; } if self.window_state.active != self.window.is_active() { self.adapter.active(self.window.is_active()); self.window_state.active = self.window.is_active(); } while let Some(event) = self.key_events.borrow_mut().pop() { self.adapter.key_event(event); self.update = true; } for i in 0..self.key_states.len() { self.push_key_down_event(i); self.push_key_up_event(i); } } pub fn receive_requests(&mut self) { if let Some(request_receiver) = &self.request_receiver { for request in request_receiver.try_iter() { match request { WindowRequest::Redraw => { self.update = true; self.redraw = true; } WindowRequest::ChangeTitle(title) => { self.window.set_title(&title); self.update = true; self.redraw = true; } WindowRequest::Close => { self.close = true; } } } } } pub fn update(&mut self) { if !self.update { return; } self.adapter.run(&mut self.render_context); self.update = false; self.redraw = true; } pub fn render(&mut self) { if self.redraw { if let Some(data) = self.render_context.data() { let _ = self.window.update_with_buffer( data, self.window_state.size.0 as usize, self.window_state.size.1 as usize, ); self.redraw = false; } } } }
minifb::Key::Up | minifb::Key::Down | minifb::Key::Backspace | minifb::Key::Delete => minifb::KeyRepeat::Yes, _ => minifb::KeyRepeat::No, }; if self .window .is_key_pressed(self.key_states.get(index).unwrap().minifb_key, key_repeat) { self.adapter.key_event(KeyEvent { key: self.key_states.get(index).unwrap().key, state: ButtonState::Down, text: String::default(), }); self.update = true; } }
function_block-function_prefix_line
[ { "content": "fn get_mouse_button(button: event::MouseButton) -> MouseButton {\n\n match button {\n\n event::MouseButton::Wheel => MouseButton::Middle,\n\n event::MouseButton::Right => MouseButton::Right,\n\n _ => MouseButton::Left,\n\n }\n\n}\n\n\n", "file_path": "crates/shell/sr...
Rust
crates/ra_syntax/src/ast/edit.rs
ztlpn/rust-analyzer
6b9bd7bdd2712a7e85d6bfc70c231dbe36c2e585
use std::{iter, ops::RangeInclusive}; use arrayvec::ArrayVec; use rustc_hash::FxHashMap; use crate::{ algo, ast::{ self, make::{self, tokens}, AstNode, TypeBoundsOwner, }, AstToken, Direction, InsertPosition, SmolStr, SyntaxElement, SyntaxKind::{ATTR, COMMENT, WHITESPACE}, SyntaxNode, SyntaxToken, T, }; impl ast::FnDef { #[must_use] pub fn with_body(&self, body: ast::Block) -> ast::FnDef { let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new(); let old_body_or_semi: SyntaxElement = if let Some(old_body) = self.body() { old_body.syntax().clone().into() } else if let Some(semi) = self.semicolon_token() { to_insert.push(make::tokens::single_space().into()); semi.into() } else { to_insert.push(make::tokens::single_space().into()); to_insert.push(body.syntax().clone().into()); return insert_children(self, InsertPosition::Last, to_insert.into_iter()); }; to_insert.push(body.syntax().clone().into()); let replace_range = RangeInclusive::new(old_body_or_semi.clone(), old_body_or_semi); replace_children(self, replace_range, to_insert.into_iter()) } } impl ast::ItemList { #[must_use] pub fn append_items(&self, items: impl Iterator<Item = ast::ImplItem>) -> ast::ItemList { let mut res = self.clone(); if !self.syntax().text().contains_char('\n') { res = res.make_multiline(); } items.for_each(|it| res = res.append_item(it)); res } #[must_use] pub fn append_item(&self, item: ast::ImplItem) -> ast::ItemList { let (indent, position) = match self.impl_items().last() { Some(it) => ( leading_indent(it.syntax()).unwrap_or_default().to_string(), InsertPosition::After(it.syntax().clone().into()), ), None => match self.l_curly() { Some(it) => ( " ".to_string() + &leading_indent(self.syntax()).unwrap_or_default(), InsertPosition::After(it), ), None => return self.clone(), }, }; let ws = tokens::WsBuilder::new(&format!("\n{}", indent)); let to_insert: ArrayVec<[SyntaxElement; 2]> = [ws.ws().into(), item.syntax().clone().into()].into(); insert_children(self, position, to_insert.into_iter()) } fn l_curly(&self) -> Option<SyntaxElement> { self.syntax().children_with_tokens().find(|it| it.kind() == T!['{']) } fn make_multiline(&self) -> ast::ItemList { let l_curly = match self.syntax().children_with_tokens().find(|it| it.kind() == T!['{']) { Some(it) => it, None => return self.clone(), }; let sibling = match l_curly.next_sibling_or_token() { Some(it) => it, None => return self.clone(), }; let existing_ws = match sibling.as_token() { None => None, Some(tok) if tok.kind() != WHITESPACE => None, Some(ws) => { if ws.text().contains('\n') { return self.clone(); } Some(ws.clone()) } }; let indent = leading_indent(self.syntax()).unwrap_or("".into()); let ws = tokens::WsBuilder::new(&format!("\n{}", indent)); let to_insert = iter::once(ws.ws().into()); match existing_ws { None => insert_children(self, InsertPosition::After(l_curly), to_insert), Some(ws) => { replace_children(self, RangeInclusive::new(ws.clone().into(), ws.into()), to_insert) } } } } impl ast::RecordFieldList { #[must_use] pub fn append_field(&self, field: &ast::RecordField) -> ast::RecordFieldList { self.insert_field(InsertPosition::Last, field) } #[must_use] pub fn insert_field( &self, position: InsertPosition<&'_ ast::RecordField>, field: &ast::RecordField, ) -> ast::RecordFieldList { let is_multiline = self.syntax().text().contains_char('\n'); let ws; let space = if is_multiline { ws = tokens::WsBuilder::new(&format!( "\n{} ", leading_indent(self.syntax()).unwrap_or("".into()) )); ws.ws() } else { tokens::single_space() }; let mut to_insert: ArrayVec<[SyntaxElement; 4]> = ArrayVec::new(); to_insert.push(space.into()); to_insert.push(field.syntax().clone().into()); to_insert.push(tokens::comma().into()); macro_rules! after_l_curly { () => {{ let anchor = match self.l_curly() { Some(it) => it, None => return self.clone(), }; InsertPosition::After(anchor) }}; } macro_rules! after_field { ($anchor:expr) => { if let Some(comma) = $anchor .syntax() .siblings_with_tokens(Direction::Next) .find(|it| it.kind() == T![,]) { InsertPosition::After(comma) } else { to_insert.insert(0, tokens::comma().into()); InsertPosition::After($anchor.syntax().clone().into()) } }; }; let position = match position { InsertPosition::First => after_l_curly!(), InsertPosition::Last => { if !is_multiline { to_insert.pop(); } match self.fields().last() { Some(it) => after_field!(it), None => after_l_curly!(), } } InsertPosition::Before(anchor) => { InsertPosition::Before(anchor.syntax().clone().into()) } InsertPosition::After(anchor) => after_field!(anchor), }; insert_children(self, position, to_insert.iter().cloned()) } fn l_curly(&self) -> Option<SyntaxElement> { self.syntax().children_with_tokens().find(|it| it.kind() == T!['{']) } } impl ast::TypeParam { #[must_use] pub fn remove_bounds(&self) -> ast::TypeParam { let colon = match self.colon_token() { Some(it) => it, None => return self.clone(), }; let end = match self.type_bound_list() { Some(it) => it.syntax().clone().into(), None => colon.clone().into(), }; replace_children(self, RangeInclusive::new(colon.into(), end), iter::empty()) } } #[must_use] pub fn strip_attrs_and_docs<N: ast::AttrsOwner>(node: &N) -> N { N::cast(strip_attrs_and_docs_inner(node.syntax().clone())).unwrap() } fn strip_attrs_and_docs_inner(mut node: SyntaxNode) -> SyntaxNode { while let Some(start) = node.children_with_tokens().find(|it| it.kind() == ATTR || it.kind() == COMMENT) { let end = match &start.next_sibling_or_token() { Some(el) if el.kind() == WHITESPACE => el.clone(), Some(_) | None => start.clone(), }; node = algo::replace_children(&node, RangeInclusive::new(start, end), &mut iter::empty()); } node } #[must_use] pub fn replace_descendants<N: AstNode, D: AstNode>( parent: &N, replacement_map: impl Iterator<Item = (D, D)>, ) -> N { let map = replacement_map .map(|(from, to)| (from.syntax().clone().into(), to.syntax().clone().into())) .collect::<FxHashMap<_, _>>(); let new_syntax = algo::replace_descendants(parent.syntax(), &map); N::cast(new_syntax).unwrap() } #[derive(Debug, Clone, Copy)] pub struct IndentLevel(pub u8); impl From<u8> for IndentLevel { fn from(level: u8) -> IndentLevel { IndentLevel(level) } } impl IndentLevel { pub fn from_node(node: &SyntaxNode) -> IndentLevel { let first_token = match node.first_token() { Some(it) => it, None => return IndentLevel(0), }; for ws in prev_tokens(first_token).filter_map(ast::Whitespace::cast) { let text = ws.syntax().text(); if let Some(pos) = text.rfind('\n') { let level = text[pos + 1..].chars().count() / 4; return IndentLevel(level as u8); } } IndentLevel(0) } pub fn increase_indent<N: AstNode>(self, node: N) -> N { N::cast(self._increase_indent(node.syntax().clone())).unwrap() } fn _increase_indent(self, node: SyntaxNode) -> SyntaxNode { let replacements: FxHashMap<SyntaxElement, SyntaxElement> = node .descendants_with_tokens() .filter_map(|el| el.into_token()) .filter_map(ast::Whitespace::cast) .filter(|ws| { let text = ws.syntax().text(); text.contains('\n') }) .map(|ws| { ( ws.syntax().clone().into(), make::tokens::whitespace(&format!( "{}{:width$}", ws.syntax().text(), "", width = self.0 as usize * 4 )) .into(), ) }) .collect(); algo::replace_descendants(&node, &replacements) } } fn leading_indent(node: &SyntaxNode) -> Option<SmolStr> { for token in prev_tokens(node.first_token()?) { if let Some(ws) = ast::Whitespace::cast(token.clone()) { let ws_text = ws.text(); if let Some(pos) = ws_text.rfind('\n') { return Some(ws_text[pos + 1..].into()); } } if token.text().contains('\n') { break; } } None } fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> { iter::successors(Some(token), |token| token.prev_token()) } #[must_use] fn insert_children<N: AstNode>( parent: &N, position: InsertPosition<SyntaxElement>, mut to_insert: impl Iterator<Item = SyntaxElement>, ) -> N { let new_syntax = algo::insert_children(parent.syntax(), position, &mut to_insert); N::cast(new_syntax).unwrap() } #[must_use] fn replace_children<N: AstNode>( parent: &N, to_replace: RangeInclusive<SyntaxElement>, mut to_insert: impl Iterator<Item = SyntaxElement>, ) -> N { let new_syntax = algo::replace_children(parent.syntax(), to_replace, &mut to_insert); N::cast(new_syntax).unwrap() } #[test] fn test_increase_indent() { let arm_list = { let arm = make::match_arm(iter::once(make::placeholder_pat().into()), make::expr_unit()); make::match_arm_list(vec![arm.clone(), arm].into_iter()) }; assert_eq!( arm_list.syntax().to_string(), "{ _ => (), _ => (), }" ); let indented = IndentLevel(2).increase_indent(arm_list); assert_eq!( indented.syntax().to_string(), "{ _ => (), _ => (), }" ); }
use std::{iter, ops::RangeInclusive}; use arrayvec::ArrayVec; use rustc_hash::FxHashMap; use crate::{ algo, ast::{ self, make::{self, tokens}, AstNode, TypeBoundsOwner, }, AstToken, Direction, InsertPosition, SmolStr, SyntaxElement, SyntaxKind::{ATTR, COMMENT, WHITESPACE}, SyntaxNode, SyntaxToken, T, }; impl ast::FnDef { #[must_use] pub fn with_body(&self, body: ast::Block) -> ast::FnDef { let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new(); let old_body_or_semi: SyntaxElement = if let Some(old_body) = self.body() { old_body.syntax().clone().into() } else if let Some(semi) = self.semicolon_token() { to_insert.push(make::tokens::single_space().into()); semi.into() } else { to_insert.push(make::tokens::single_space().into()); to_insert.push(body.syntax().clone().into()); return insert_children(self, InsertPosition::Last, to_insert.int
} impl ast::ItemList { #[must_use] pub fn append_items(&self, items: impl Iterator<Item = ast::ImplItem>) -> ast::ItemList { let mut res = self.clone(); if !self.syntax().text().contains_char('\n') { res = res.make_multiline(); } items.for_each(|it| res = res.append_item(it)); res } #[must_use] pub fn append_item(&self, item: ast::ImplItem) -> ast::ItemList { let (indent, position) = match self.impl_items().last() { Some(it) => ( leading_indent(it.syntax()).unwrap_or_default().to_string(), InsertPosition::After(it.syntax().clone().into()), ), None => match self.l_curly() { Some(it) => ( " ".to_string() + &leading_indent(self.syntax()).unwrap_or_default(), InsertPosition::After(it), ), None => return self.clone(), }, }; let ws = tokens::WsBuilder::new(&format!("\n{}", indent)); let to_insert: ArrayVec<[SyntaxElement; 2]> = [ws.ws().into(), item.syntax().clone().into()].into(); insert_children(self, position, to_insert.into_iter()) } fn l_curly(&self) -> Option<SyntaxElement> { self.syntax().children_with_tokens().find(|it| it.kind() == T!['{']) } fn make_multiline(&self) -> ast::ItemList { let l_curly = match self.syntax().children_with_tokens().find(|it| it.kind() == T!['{']) { Some(it) => it, None => return self.clone(), }; let sibling = match l_curly.next_sibling_or_token() { Some(it) => it, None => return self.clone(), }; let existing_ws = match sibling.as_token() { None => None, Some(tok) if tok.kind() != WHITESPACE => None, Some(ws) => { if ws.text().contains('\n') { return self.clone(); } Some(ws.clone()) } }; let indent = leading_indent(self.syntax()).unwrap_or("".into()); let ws = tokens::WsBuilder::new(&format!("\n{}", indent)); let to_insert = iter::once(ws.ws().into()); match existing_ws { None => insert_children(self, InsertPosition::After(l_curly), to_insert), Some(ws) => { replace_children(self, RangeInclusive::new(ws.clone().into(), ws.into()), to_insert) } } } } impl ast::RecordFieldList { #[must_use] pub fn append_field(&self, field: &ast::RecordField) -> ast::RecordFieldList { self.insert_field(InsertPosition::Last, field) } #[must_use] pub fn insert_field( &self, position: InsertPosition<&'_ ast::RecordField>, field: &ast::RecordField, ) -> ast::RecordFieldList { let is_multiline = self.syntax().text().contains_char('\n'); let ws; let space = if is_multiline { ws = tokens::WsBuilder::new(&format!( "\n{} ", leading_indent(self.syntax()).unwrap_or("".into()) )); ws.ws() } else { tokens::single_space() }; let mut to_insert: ArrayVec<[SyntaxElement; 4]> = ArrayVec::new(); to_insert.push(space.into()); to_insert.push(field.syntax().clone().into()); to_insert.push(tokens::comma().into()); macro_rules! after_l_curly { () => {{ let anchor = match self.l_curly() { Some(it) => it, None => return self.clone(), }; InsertPosition::After(anchor) }}; } macro_rules! after_field { ($anchor:expr) => { if let Some(comma) = $anchor .syntax() .siblings_with_tokens(Direction::Next) .find(|it| it.kind() == T![,]) { InsertPosition::After(comma) } else { to_insert.insert(0, tokens::comma().into()); InsertPosition::After($anchor.syntax().clone().into()) } }; }; let position = match position { InsertPosition::First => after_l_curly!(), InsertPosition::Last => { if !is_multiline { to_insert.pop(); } match self.fields().last() { Some(it) => after_field!(it), None => after_l_curly!(), } } InsertPosition::Before(anchor) => { InsertPosition::Before(anchor.syntax().clone().into()) } InsertPosition::After(anchor) => after_field!(anchor), }; insert_children(self, position, to_insert.iter().cloned()) } fn l_curly(&self) -> Option<SyntaxElement> { self.syntax().children_with_tokens().find(|it| it.kind() == T!['{']) } } impl ast::TypeParam { #[must_use] pub fn remove_bounds(&self) -> ast::TypeParam { let colon = match self.colon_token() { Some(it) => it, None => return self.clone(), }; let end = match self.type_bound_list() { Some(it) => it.syntax().clone().into(), None => colon.clone().into(), }; replace_children(self, RangeInclusive::new(colon.into(), end), iter::empty()) } } #[must_use] pub fn strip_attrs_and_docs<N: ast::AttrsOwner>(node: &N) -> N { N::cast(strip_attrs_and_docs_inner(node.syntax().clone())).unwrap() } fn strip_attrs_and_docs_inner(mut node: SyntaxNode) -> SyntaxNode { while let Some(start) = node.children_with_tokens().find(|it| it.kind() == ATTR || it.kind() == COMMENT) { let end = match &start.next_sibling_or_token() { Some(el) if el.kind() == WHITESPACE => el.clone(), Some(_) | None => start.clone(), }; node = algo::replace_children(&node, RangeInclusive::new(start, end), &mut iter::empty()); } node } #[must_use] pub fn replace_descendants<N: AstNode, D: AstNode>( parent: &N, replacement_map: impl Iterator<Item = (D, D)>, ) -> N { let map = replacement_map .map(|(from, to)| (from.syntax().clone().into(), to.syntax().clone().into())) .collect::<FxHashMap<_, _>>(); let new_syntax = algo::replace_descendants(parent.syntax(), &map); N::cast(new_syntax).unwrap() } #[derive(Debug, Clone, Copy)] pub struct IndentLevel(pub u8); impl From<u8> for IndentLevel { fn from(level: u8) -> IndentLevel { IndentLevel(level) } } impl IndentLevel { pub fn from_node(node: &SyntaxNode) -> IndentLevel { let first_token = match node.first_token() { Some(it) => it, None => return IndentLevel(0), }; for ws in prev_tokens(first_token).filter_map(ast::Whitespace::cast) { let text = ws.syntax().text(); if let Some(pos) = text.rfind('\n') { let level = text[pos + 1..].chars().count() / 4; return IndentLevel(level as u8); } } IndentLevel(0) } pub fn increase_indent<N: AstNode>(self, node: N) -> N { N::cast(self._increase_indent(node.syntax().clone())).unwrap() } fn _increase_indent(self, node: SyntaxNode) -> SyntaxNode { let replacements: FxHashMap<SyntaxElement, SyntaxElement> = node .descendants_with_tokens() .filter_map(|el| el.into_token()) .filter_map(ast::Whitespace::cast) .filter(|ws| { let text = ws.syntax().text(); text.contains('\n') }) .map(|ws| { ( ws.syntax().clone().into(), make::tokens::whitespace(&format!( "{}{:width$}", ws.syntax().text(), "", width = self.0 as usize * 4 )) .into(), ) }) .collect(); algo::replace_descendants(&node, &replacements) } } fn leading_indent(node: &SyntaxNode) -> Option<SmolStr> { for token in prev_tokens(node.first_token()?) { if let Some(ws) = ast::Whitespace::cast(token.clone()) { let ws_text = ws.text(); if let Some(pos) = ws_text.rfind('\n') { return Some(ws_text[pos + 1..].into()); } } if token.text().contains('\n') { break; } } None } fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> { iter::successors(Some(token), |token| token.prev_token()) } #[must_use] fn insert_children<N: AstNode>( parent: &N, position: InsertPosition<SyntaxElement>, mut to_insert: impl Iterator<Item = SyntaxElement>, ) -> N { let new_syntax = algo::insert_children(parent.syntax(), position, &mut to_insert); N::cast(new_syntax).unwrap() } #[must_use] fn replace_children<N: AstNode>( parent: &N, to_replace: RangeInclusive<SyntaxElement>, mut to_insert: impl Iterator<Item = SyntaxElement>, ) -> N { let new_syntax = algo::replace_children(parent.syntax(), to_replace, &mut to_insert); N::cast(new_syntax).unwrap() } #[test] fn test_increase_indent() { let arm_list = { let arm = make::match_arm(iter::once(make::placeholder_pat().into()), make::expr_unit()); make::match_arm_list(vec![arm.clone(), arm].into_iter()) }; assert_eq!( arm_list.syntax().to_string(), "{ _ => (), _ => (), }" ); let indented = IndentLevel(2).increase_indent(arm_list); assert_eq!( indented.syntax().to_string(), "{ _ => (), _ => (), }" ); }
o_iter()); }; to_insert.push(body.syntax().clone().into()); let replace_range = RangeInclusive::new(old_body_or_semi.clone(), old_body_or_semi); replace_children(self, replace_range, to_insert.into_iter()) }
function_block-function_prefixed
[ { "content": "fn adj_comments(comment: &ast::Comment, dir: Direction) -> ast::Comment {\n\n let mut res = comment.clone();\n\n for element in comment.syntax().siblings_with_tokens(dir) {\n\n let token = match element.as_token() {\n\n None => break,\n\n Some(token) => token,\n\...
Rust
alacritty/src/macos/proc.rs
djpohly/alacritty
1df7dc5171abfe1eab3e95be964f61c5876198f1
use std::ffi::{CStr, CString, IntoStringError}; use std::fmt::{self, Display, Formatter}; use std::io; use std::mem::{self, MaybeUninit}; use std::os::raw::{c_int, c_void}; use std::path::PathBuf; #[derive(Debug)] pub enum Error { Io(io::Error), IntoString(IntoStringError), InvalidSize, } impl std::error::Error for Error { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match self { Error::InvalidSize => None, Error::Io(err) => err.source(), Error::IntoString(err) => err.source(), } } } impl Display for Error { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self { Error::InvalidSize => write!(f, "Invalid proc_pidinfo return size"), Error::Io(err) => write!(f, "Error getting current working directory: {}", err), Error::IntoString(err) => { write!(f, "Error when parsing current working directory: {}", err) }, } } } impl From<io::Error> for Error { fn from(val: io::Error) -> Self { Error::Io(val) } } impl From<IntoStringError> for Error { fn from(val: IntoStringError) -> Self { Error::IntoString(val) } } pub fn cwd(pid: c_int) -> Result<PathBuf, Error> { let mut info = MaybeUninit::<sys::proc_vnodepathinfo>::uninit(); let info_ptr = info.as_mut_ptr() as *mut c_void; let size = mem::size_of::<sys::proc_vnodepathinfo>() as c_int; let c_str = unsafe { let pidinfo_size = sys::proc_pidinfo(pid, sys::PROC_PIDVNODEPATHINFO, 0, info_ptr, size); match pidinfo_size { c if c < 0 => return Err(io::Error::last_os_error().into()), s if s != size => return Err(Error::InvalidSize), _ => CStr::from_ptr(info.assume_init().pvi_cdir.vip_path.as_ptr()), } }; Ok(CString::from(c_str).into_string().map(PathBuf::from)?) } #[allow(non_camel_case_types)] mod sys { use std::os::raw::{c_char, c_int, c_longlong, c_void}; pub const PROC_PIDVNODEPATHINFO: c_int = 9; type gid_t = c_int; type off_t = c_longlong; type uid_t = c_int; type fsid_t = fsid; #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct fsid { pub val: [i32; 2usize], } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct vinfo_stat { pub vst_dev: u32, pub vst_mode: u16, pub vst_nlink: u16, pub vst_ino: u64, pub vst_uid: uid_t, pub vst_gid: gid_t, pub vst_atime: i64, pub vst_atimensec: i64, pub vst_mtime: i64, pub vst_mtimensec: i64, pub vst_ctime: i64, pub vst_ctimensec: i64, pub vst_birthtime: i64, pub vst_birthtimensec: i64, pub vst_size: off_t, pub vst_blocks: i64, pub vst_blksize: i32, pub vst_flags: u32, pub vst_gen: u32, pub vst_rdev: u32, pub vst_qspare: [i64; 2usize], } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct vnode_info { pub vi_stat: vinfo_stat, pub vi_type: c_int, pub vi_pad: c_int, pub vi_fsid: fsid_t, } #[repr(C)] #[derive(Copy, Clone)] pub struct vnode_info_path { pub vip_vi: vnode_info, pub vip_path: [c_char; 1024usize], } #[repr(C)] #[derive(Copy, Clone)] pub struct proc_vnodepathinfo { pub pvi_cdir: vnode_info_path, pub pvi_rdir: vnode_info_path, } extern "C" { pub fn proc_pidinfo( pid: c_int, flavor: c_int, arg: u64, buffer: *mut c_void, buffersize: c_int, ) -> c_int; } } #[cfg(test)] mod tests { use super::*; use std::{env, process}; #[test] fn cwd_matches_current_dir() { assert_eq!(cwd(process::id() as i32).ok(), env::current_dir().ok()); } }
use std::ffi::{CStr, CString, IntoStringError}; use std::fmt::{self, Display, Formatter}; use std::io; use std::mem::{self, MaybeUninit}; use std::os::raw::{c_int, c_void}; use std::path::PathBuf; #[derive(Debug)] pub enum Error { Io(io::Error), IntoString(IntoStringError), InvalidSize, } impl std::error::Error for Error { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match self { Error::InvalidSize => None, Error::Io(err) => err.source(), Error::IntoString(err) => err.source(), } } } impl Display for Error {
} impl From<io::Error> for Error { fn from(val: io::Error) -> Self { Error::Io(val) } } impl From<IntoStringError> for Error { fn from(val: IntoStringError) -> Self { Error::IntoString(val) } } pub fn cwd(pid: c_int) -> Result<PathBuf, Error> { let mut info = MaybeUninit::<sys::proc_vnodepathinfo>::uninit(); let info_ptr = info.as_mut_ptr() as *mut c_void; let size = mem::size_of::<sys::proc_vnodepathinfo>() as c_int; let c_str = unsafe { let pidinfo_size = sys::proc_pidinfo(pid, sys::PROC_PIDVNODEPATHINFO, 0, info_ptr, size); match pidinfo_size { c if c < 0 => return Err(io::Error::last_os_error().into()), s if s != size => return Err(Error::InvalidSize), _ => CStr::from_ptr(info.assume_init().pvi_cdir.vip_path.as_ptr()), } }; Ok(CString::from(c_str).into_string().map(PathBuf::from)?) } #[allow(non_camel_case_types)] mod sys { use std::os::raw::{c_char, c_int, c_longlong, c_void}; pub const PROC_PIDVNODEPATHINFO: c_int = 9; type gid_t = c_int; type off_t = c_longlong; type uid_t = c_int; type fsid_t = fsid; #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct fsid { pub val: [i32; 2usize], } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct vinfo_stat { pub vst_dev: u32, pub vst_mode: u16, pub vst_nlink: u16, pub vst_ino: u64, pub vst_uid: uid_t, pub vst_gid: gid_t, pub vst_atime: i64, pub vst_atimensec: i64, pub vst_mtime: i64, pub vst_mtimensec: i64, pub vst_ctime: i64, pub vst_ctimensec: i64, pub vst_birthtime: i64, pub vst_birthtimensec: i64, pub vst_size: off_t, pub vst_blocks: i64, pub vst_blksize: i32, pub vst_flags: u32, pub vst_gen: u32, pub vst_rdev: u32, pub vst_qspare: [i64; 2usize], } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct vnode_info { pub vi_stat: vinfo_stat, pub vi_type: c_int, pub vi_pad: c_int, pub vi_fsid: fsid_t, } #[repr(C)] #[derive(Copy, Clone)] pub struct vnode_info_path { pub vip_vi: vnode_info, pub vip_path: [c_char; 1024usize], } #[repr(C)] #[derive(Copy, Clone)] pub struct proc_vnodepathinfo { pub pvi_cdir: vnode_info_path, pub pvi_rdir: vnode_info_path, } extern "C" { pub fn proc_pidinfo( pid: c_int, flavor: c_int, arg: u64, buffer: *mut c_void, buffersize: c_int, ) -> c_int; } } #[cfg(test)] mod tests { use super::*; use std::{env, process}; #[test] fn cwd_matches_current_dir() { assert_eq!(cwd(process::id() as i32).ok(), env::current_dir().ok()); } }
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self { Error::InvalidSize => write!(f, "Invalid proc_pidinfo return size"), Error::Io(err) => write!(f, "Error getting current working directory: {}", err), Error::IntoString(err) => { write!(f, "Error when parsing current working directory: {}", err) }, } }
function_block-full_function
[ { "content": "pub fn create_shader(kind: GLenum, source: &'static str) -> Result<GLuint, ShaderCreationError> {\n\n let len: [GLint; 1] = [source.len() as GLint];\n\n\n\n let shader = unsafe {\n\n let shader = gl::CreateShader(kind);\n\n gl::ShaderSource(shader, 1, &(source.as_ptr() as *cons...
Rust
libfat/src/directory/raw_dir_entry.rs
Orycterope/kfs_libfs
538c0156492db7fdbfa69d49f46609861febbd4a
use byteorder::{ByteOrder, LittleEndian}; use structview::{u16_le, u32_le, View}; use crate::attribute::Attributes; use crate::cluster::Cluster; use crate::datetime::FatDateTime; use crate::filesystem::FatFileSystem; use crate::name::{LongFileName, ShortFileName}; use libfs::block::{Block, BlockDevice, BlockIndex}; use libfs::FileSystemError; use libfs::FileSystemResult; #[derive(Clone, Copy, View)] #[repr(C)] pub struct LongFileNameDirEntry { pub order_entry: u8, pub char_part_0: [u16_le; 5], pub attribute: u8, pub lfn_entry_type: u8, pub lfn_checksum: u8, pub char_part_1: [u16_le; 6], pub reserved: u16_le, pub char_part_2: [u16_le; 2], } #[derive(Clone, Copy, View)] #[repr(C)] pub struct ShortFileNameDirEntry { pub name: [u8; ShortFileName::MAX_LEN], pub attribute: u8, pub reserved: u8, pub creation_tenths: u8, pub creation_time: u16_le, pub creation_date: u16_le, pub last_access_date: u16_le, pub high_cluster: u16_le, pub modification_time: u16_le, pub modification_date: u16_le, pub low_cluster: u16_le, pub file_size: u32_le, } #[derive(Clone, Copy)] pub struct FatDirEntry { pub entry_cluster: Cluster, pub entry_index: u32, pub entry_offset: u32, pub data: [u8; Self::LEN], } impl FatDirEntry { pub const LEN: usize = 32; pub fn from_raw( data: &[u8], entry_cluster: Cluster, entry_index: u32, entry_offset: u32, ) -> FatDirEntry { let mut data_copied = [0x0u8; Self::LEN]; data_copied[..data.len()].clone_from_slice(&data[..]); FatDirEntry { entry_cluster, entry_index, entry_offset, data: data_copied, } } pub fn get_first_byte(&self) -> u8 { self.data[0] } pub fn is_free(&self) -> bool { self.get_first_byte() == 0 } pub fn is_deleted(&self) -> bool { self.get_first_byte() == 0xE5 } pub fn set_deleted(&mut self) { self.data[0] = 0xE5; } pub fn clear(&mut self) { self.data = [0x0u8; Self::LEN]; } pub fn flush<T>(&self, fs: &FatFileSystem<T>) -> FileSystemResult<()> where T: BlockDevice, { let mut blocks = [Block::new()]; fs.block_device .read( &mut blocks, fs.partition_start, BlockIndex(self.entry_cluster.to_data_block_index(fs).0 + self.entry_index), ) .or(Err(FileSystemError::ReadFailed))?; let block = &mut blocks[0]; let entry_start = self.entry_offset as usize; let entry_end = entry_start + Self::LEN; for (i, val) in block[entry_start..entry_end].iter_mut().enumerate() { *val = self.data[i]; } fs.block_device .write( &blocks, fs.partition_start, BlockIndex(self.entry_cluster.to_data_block_index(fs).0 + self.entry_index), ) .or(Err(FileSystemError::WriteFailed)) } pub fn attribute(&self) -> Attributes { Attributes::new(self.data[11]) } pub fn set_attribute(&mut self, attribute: Attributes) { self.data[11] = attribute.get_value(); } pub fn is_long_file_name(&self) -> bool { self.attribute().is_lfn() } pub fn long_file_name_raw(&self) -> Option<LongFileName> { if self.is_long_file_name() { Some(LongFileName::from_lfn_dir_entry(self.as_lfn_entry())) } else { None } } pub fn short_name(&self) -> Option<ShortFileName> { if !self.is_long_file_name() { Some(ShortFileName::from_data(&self.as_sfn_entry().name)) } else { None } } pub fn set_lfn_index(&mut self, index: u8) { self.data[0] = index; } pub fn set_short_name(&mut self, short_name: &ShortFileName) { (&mut self.data[0..11]).copy_from_slice(&short_name.as_bytes()); } pub fn set_lfn_entry(&mut self, lfn: &str) { let lfn = LongFileName::from_utf8(lfn); let lfn = lfn.as_contents(); for (i, entry) in lfn.iter().enumerate().take(5) { let index = 1 + i * 2; LittleEndian::write_u16(&mut self.data[index..index + 2], *entry); } for i in 0..6 { let index = 0xE + i * 2; let i = i + 5; LittleEndian::write_u16(&mut self.data[index..index + 2], lfn[i]); } for i in 0..2 { let index = 0x1C + i * 2; let i = i + 11; LittleEndian::write_u16(&mut self.data[index..index + 2], lfn[i]); } } pub fn as_lfn_entry(&self) -> &LongFileNameDirEntry { LongFileNameDirEntry::view(&self.data).unwrap() } pub fn as_sfn_entry(&self) -> &ShortFileNameDirEntry { ShortFileNameDirEntry::view(&self.data).unwrap() } pub fn set_lfn_checksum(&mut self, checksum: u8) { self.data[13] = checksum; } pub fn get_cluster(&self) -> Cluster { let entry = self.as_sfn_entry(); let high_cluster = u32::from(entry.high_cluster.to_int()); let low_cluster = u32::from(entry.low_cluster.to_int()); Cluster(low_cluster | (high_cluster << 16)) } pub fn set_cluster(&mut self, cluster: Cluster) { let value = cluster.0; let high_cluster = ((value >> 16) & 0xFFFF) as u16; let low_cluster = (value & 0xFFFF) as u16; LittleEndian::write_u16(&mut self.data[20..22], high_cluster); LittleEndian::write_u16(&mut self.data[26..28], low_cluster); } pub fn get_file_size(&self) -> u32 { self.as_sfn_entry().file_size.to_int() } pub fn set_file_size(&mut self, new_size: u32) { LittleEndian::write_u32(&mut self.data[28..32], new_size); if new_size == 0 { self.set_cluster(Cluster(0)) } } pub fn get_creation_datetime(&self) -> FatDateTime { let entry = self.as_sfn_entry(); let raw_time = entry.creation_time.to_int(); let seconds = ((raw_time & 0x1f) << 1) as u8; let minutes = ((raw_time >> 5) & 0x3f) as u8; let hour = ((raw_time >> 11) & 0x1f) as u8; let raw_date = entry.creation_date.to_int(); let day = (raw_date & 0x1f) as u8; let month = ((raw_date >> 5) & 0xf) as u8; let year = (raw_date >> 9) & 0x7f; FatDateTime::new( 1980 + year, month, day, hour, minutes, seconds, self.data[13], ) } pub fn get_last_access_date(&self) -> FatDateTime { let entry = self.as_sfn_entry(); let raw_date = entry.last_access_date.to_int(); let day = (raw_date & 0x1f) as u8; let month = ((raw_date >> 5) & 0xf) as u8; let year = (raw_date >> 9) & 0x7f; FatDateTime::new(1980 + year, month, day, 0, 0, 0, 0) } pub fn get_modification_datetime(&self) -> FatDateTime { let entry = self.as_sfn_entry(); let raw_time = entry.modification_time.to_int(); let seconds = ((raw_time & 0x1f) << 1) as u8; let minutes = ((raw_time >> 5) & 0x3f) as u8; let hour = ((raw_time >> 11) & 0x1f) as u8; let raw_date = entry.modification_date.to_int(); let day = (raw_date & 0x1f) as u8; let month = ((raw_date >> 5) & 0xf) as u8; let year = (raw_date >> 9) & 0x7f; FatDateTime::new(1980 + year, month, day, hour, minutes, seconds, 0) } } impl<'a> core::fmt::Debug for FatDirEntry { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { write!(f, "FatDirEntry {{ ")?; write!(f, "{:?} ", self.attribute())?; if self.is_long_file_name() { if let Some(long_file_name) = self.long_file_name_raw() { if let Some(data) = long_file_name.chars() { write!(f, "LongFileName {{{:?}}}", data)?; } else { write!(f, "BROKEN LongFileName")?; } } else { write!(f, "LongFileName {{ \"not a long file name?????\" }}")?; } } else if let Some(short_file_name) = self.short_name() { write!(f, "ShortFileName {{{:?}}}", short_file_name.chars())?; } else { write!(f, "ShortFileName {{ \"not a short file name?????\" }}")?; } write!(f, " }}") } }
use byteorder::{ByteOrder, LittleEndian}; use structview::{u16_le, u32_le, View}; use crate::attribute::Attributes; use crate::cluster::Cluster; use crate::datetime::FatDateTime; use crate::filesystem::FatFileSystem; use crate::name::{LongFileName, ShortFileName}; use libfs::block::{Block, BlockDevice, BlockIndex}; use libfs::FileSystemError; use libfs::FileSystemResult; #[derive(Clone, Copy, View)] #[repr(C)] pub struct LongFileNameDirEntry { pub order_entry: u8, pub char_part_0: [u16_le; 5], pub attribute: u8, pub lfn_entry_type: u8, pub lfn_checksum: u8, pub char_part_1: [u16_le; 6], pub reserved: u16_le, pub char_part_2: [u16_le; 2], } #[derive(Clone, Copy, View)] #[repr(C)] pub struct ShortFileNameDirEntry { pub name: [u8; ShortFileName::MAX_LEN], pub attribute: u8, pub reserved: u8, pub creation_tenths: u8, pub creation_time: u16_le, pub creation_date: u16_le, pub last_access_date: u16_le, pub high_cluster: u16_le, pub modification_time: u16_le, pub modification_date: u16_le, pub low_cluster: u16_le, pub file_size: u32_le, } #[derive(Clone, Copy)] pub struct FatDirEntry { pub entry_cluster: Cluster, pub entry_index: u32, pub entry_offset: u32, pub data: [u8; Self::LEN], } impl FatDirEntry { pub const LEN: usize = 32; pub fn from_raw( data: &[u8], entry_cluster: Cluster, entry_index: u32, entry_offset: u32, ) -> FatDirEntry { let mut data_copied = [0x0u8; Self::LEN]; data_copied[..data.len()].clone_from_slice(&data[..]); FatDirEntry { entry_cluster, entry_index, entry_offset, data: data_copied, } } pub fn get_first_byte(&self) -> u8 { self.data[0] } pub fn is_free(&self) -> bool { self.get_first_byte() == 0 } pub fn is_deleted(&self) -> bool { self.get_first_byte() == 0xE5 } pub fn set_deleted(&mut self) { self.data[0] = 0xE5; } pub fn clear(&mut self) { self.data = [0x0u8; Self::LEN];
file_name() { Some(LongFileName::from_lfn_dir_entry(self.as_lfn_entry())) } else { None } } pub fn short_name(&self) -> Option<ShortFileName> { if !self.is_long_file_name() { Some(ShortFileName::from_data(&self.as_sfn_entry().name)) } else { None } } pub fn set_lfn_index(&mut self, index: u8) { self.data[0] = index; } pub fn set_short_name(&mut self, short_name: &ShortFileName) { (&mut self.data[0..11]).copy_from_slice(&short_name.as_bytes()); } pub fn set_lfn_entry(&mut self, lfn: &str) { let lfn = LongFileName::from_utf8(lfn); let lfn = lfn.as_contents(); for (i, entry) in lfn.iter().enumerate().take(5) { let index = 1 + i * 2; LittleEndian::write_u16(&mut self.data[index..index + 2], *entry); } for i in 0..6 { let index = 0xE + i * 2; let i = i + 5; LittleEndian::write_u16(&mut self.data[index..index + 2], lfn[i]); } for i in 0..2 { let index = 0x1C + i * 2; let i = i + 11; LittleEndian::write_u16(&mut self.data[index..index + 2], lfn[i]); } } pub fn as_lfn_entry(&self) -> &LongFileNameDirEntry { LongFileNameDirEntry::view(&self.data).unwrap() } pub fn as_sfn_entry(&self) -> &ShortFileNameDirEntry { ShortFileNameDirEntry::view(&self.data).unwrap() } pub fn set_lfn_checksum(&mut self, checksum: u8) { self.data[13] = checksum; } pub fn get_cluster(&self) -> Cluster { let entry = self.as_sfn_entry(); let high_cluster = u32::from(entry.high_cluster.to_int()); let low_cluster = u32::from(entry.low_cluster.to_int()); Cluster(low_cluster | (high_cluster << 16)) } pub fn set_cluster(&mut self, cluster: Cluster) { let value = cluster.0; let high_cluster = ((value >> 16) & 0xFFFF) as u16; let low_cluster = (value & 0xFFFF) as u16; LittleEndian::write_u16(&mut self.data[20..22], high_cluster); LittleEndian::write_u16(&mut self.data[26..28], low_cluster); } pub fn get_file_size(&self) -> u32 { self.as_sfn_entry().file_size.to_int() } pub fn set_file_size(&mut self, new_size: u32) { LittleEndian::write_u32(&mut self.data[28..32], new_size); if new_size == 0 { self.set_cluster(Cluster(0)) } } pub fn get_creation_datetime(&self) -> FatDateTime { let entry = self.as_sfn_entry(); let raw_time = entry.creation_time.to_int(); let seconds = ((raw_time & 0x1f) << 1) as u8; let minutes = ((raw_time >> 5) & 0x3f) as u8; let hour = ((raw_time >> 11) & 0x1f) as u8; let raw_date = entry.creation_date.to_int(); let day = (raw_date & 0x1f) as u8; let month = ((raw_date >> 5) & 0xf) as u8; let year = (raw_date >> 9) & 0x7f; FatDateTime::new( 1980 + year, month, day, hour, minutes, seconds, self.data[13], ) } pub fn get_last_access_date(&self) -> FatDateTime { let entry = self.as_sfn_entry(); let raw_date = entry.last_access_date.to_int(); let day = (raw_date & 0x1f) as u8; let month = ((raw_date >> 5) & 0xf) as u8; let year = (raw_date >> 9) & 0x7f; FatDateTime::new(1980 + year, month, day, 0, 0, 0, 0) } pub fn get_modification_datetime(&self) -> FatDateTime { let entry = self.as_sfn_entry(); let raw_time = entry.modification_time.to_int(); let seconds = ((raw_time & 0x1f) << 1) as u8; let minutes = ((raw_time >> 5) & 0x3f) as u8; let hour = ((raw_time >> 11) & 0x1f) as u8; let raw_date = entry.modification_date.to_int(); let day = (raw_date & 0x1f) as u8; let month = ((raw_date >> 5) & 0xf) as u8; let year = (raw_date >> 9) & 0x7f; FatDateTime::new(1980 + year, month, day, hour, minutes, seconds, 0) } } impl<'a> core::fmt::Debug for FatDirEntry { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { write!(f, "FatDirEntry {{ ")?; write!(f, "{:?} ", self.attribute())?; if self.is_long_file_name() { if let Some(long_file_name) = self.long_file_name_raw() { if let Some(data) = long_file_name.chars() { write!(f, "LongFileName {{{:?}}}", data)?; } else { write!(f, "BROKEN LongFileName")?; } } else { write!(f, "LongFileName {{ \"not a long file name?????\" }}")?; } } else if let Some(short_file_name) = self.short_name() { write!(f, "ShortFileName {{{:?}}}", short_file_name.chars())?; } else { write!(f, "ShortFileName {{ \"not a short file name?????\" }}")?; } write!(f, " }}") } }
} pub fn flush<T>(&self, fs: &FatFileSystem<T>) -> FileSystemResult<()> where T: BlockDevice, { let mut blocks = [Block::new()]; fs.block_device .read( &mut blocks, fs.partition_start, BlockIndex(self.entry_cluster.to_data_block_index(fs).0 + self.entry_index), ) .or(Err(FileSystemError::ReadFailed))?; let block = &mut blocks[0]; let entry_start = self.entry_offset as usize; let entry_end = entry_start + Self::LEN; for (i, val) in block[entry_start..entry_end].iter_mut().enumerate() { *val = self.data[i]; } fs.block_device .write( &blocks, fs.partition_start, BlockIndex(self.entry_cluster.to_data_block_index(fs).0 + self.entry_index), ) .or(Err(FileSystemError::WriteFailed)) } pub fn attribute(&self) -> Attributes { Attributes::new(self.data[11]) } pub fn set_attribute(&mut self, attribute: Attributes) { self.data[11] = attribute.get_value(); } pub fn is_long_file_name(&self) -> bool { self.attribute().is_lfn() } pub fn long_file_name_raw(&self) -> Option<LongFileName> { if self.is_long_
random
[ { "content": "/// Get the last cluster of a cluster chain.\n\npub fn get_last_cluster<T>(\n\n fs: &FatFileSystem<T>,\n\n cluster: Cluster,\n\n) -> Result<Cluster, FileSystemError>\n\nwhere\n\n T: BlockDevice,\n\n{\n\n Ok(get_last_and_previous_cluster(fs, cluster)?.0)\n\n}\n\n\n", "file_path": "l...
Rust
src/rust/bitbox02-rust/src/hww/api/ethereum/amount.rs
thisconnect/bitbox02-firmware
e081ac18dc28c2bdffdc58a94afa36a1bb5bade2
use alloc::string::String; use num_bigint::BigUint; pub struct Amount<'a> { pub unit: &'a str, pub decimals: usize, pub value: BigUint, } impl<'a> Amount<'a> { pub fn format(&self) -> String { const TRUNCATE_SIZE: usize = 13; let v = util::decimal::format(&self.value, self.decimals); if v.len() > TRUNCATE_SIZE { format!("{}... {}", &v[..TRUNCATE_SIZE], self.unit) } else { format!("{} {}", v, self.unit) } } } #[cfg(test)] mod tests { use super::*; #[test] pub fn test_format() { struct Test<'a> { bigendian: &'a [u8], decimals: usize, unit: &'a str, expected_result: &'a str, }; let tests = vec![ Test { bigendian: b"", decimals: 6, unit: "LOL", expected_result: "0 LOL", }, Test { bigendian: b"\x0f\x42\x40", decimals: 6, unit: "LOL", expected_result: "1 LOL", }, Test { bigendian: b"\x10\xc8\xe0", decimals: 6, unit: "LOL", expected_result: "1.1 LOL", }, Test { bigendian: b"\x20\x08\x1f\x97\x9a\x5c\x8d\x47\x29\x0e\x3e", decimals: 18, unit: "LOL", expected_result: "38723987.9327... LOL", }, Test { bigendian: b"\x01\xe2\x40", decimals: 8, unit: "LOL", expected_result: "0.00123456 LOL", }, Test { bigendian: b"\x01\xe2\x40", decimals: 8, unit: "LOL", expected_result: "0.00123456 LOL", }, Test { bigendian: b"\x1d\x00\xd3\x28\xcb", decimals: 10, unit: "LOL", expected_result: "12.4567890123 LOL", }, Test { bigendian: b"\x01\x22\x08\x3f\x97\xf2", decimals: 11, unit: "LOL", expected_result: "12.4567890123... LOL", }, ]; for test in tests.iter() { assert_eq!( Amount { unit: test.unit, decimals: test.decimals, value: BigUint::from_bytes_be(test.bigendian), } .format(), test.expected_result ); } } }
use alloc::string::String; use num_bigint::BigUint; pub struct Amount<'a> { pub unit: &'a str, pub decimals: usize, pub value: BigUint, } impl<'a> Amount<'a> { pub fn format(&self) -> String { const TRUNCATE_SIZE: usize = 13; let v = util::decimal::format(&self.value, self.decimals); if v.len() > TRUNCATE_SIZE { format!("{}... {}", &v[..TRUNCATE_SIZE], self.unit) } else { format!("{} {}", v, self.unit) } } } #[cfg(test)] mod tests { use super::*; #[test] pub fn test_format() { struct Test<'a> { bigendian: &'a [u8], decimals: usize, unit: &'a str, expected_result: &'a str, }; let tests = vec![ Test { bigendian: b"", decimals: 6, unit: "LOL", expected_result: "0 LOL", }, Test { bigendian: b"\x0f\x42\x40", decimals: 6, unit: "LOL", expected_result: "1 LOL", }, Test { bigendian: b"\x10\xc8\xe0", decimals: 6, unit: "LOL", expected_result: "1.1 LOL", }, Test { bigendian: b"\x20\x08\x1f\x97\x9a\x5c\x8d\x47\x29\x0e\x3e", decimals: 18, unit: "LOL", expected_result: "38723987.9327... LOL", }, Test { bigendian: b"\x01\xe2\x40", decimals: 8, unit: "LOL", expected_result: "0.00123456 LOL", }, Test { bigendian: b"\x01\xe2\x40", decimals: 8, unit: "LOL", expected_result: "0.00123456 LOL", }, Tes
}
t { bigendian: b"\x1d\x00\xd3\x28\xcb", decimals: 10, unit: "LOL", expected_result: "12.4567890123 LOL", }, Test { bigendian: b"\x01\x22\x08\x3f\x97\xf2", decimals: 11, unit: "LOL", expected_result: "12.4567890123... LOL", }, ]; for test in tests.iter() { assert_eq!( Amount { unit: test.unit, decimals: test.decimals, value: BigUint::from_bytes_be(test.bigendian), } .format(), test.expected_result ); } }
function_block-function_prefixed
[ { "content": "/// Formats integer `value` as `value / 10^decimals`, with up to `decimals` decimal places.\n\n/// E.g. \"123450\" with decimals=3: \"123.45\".\n\n/// Value must consists only of '0'-'9' digits.\n\npub fn format<F: Format>(value: F, decimals: usize) -> String {\n\n let mut v: String = format!(\...
Rust
debug/src/lib.rs
Simon-Bin/proc-macro-workshop
c8654295d4a10ab10827464267c5f653e4005c91
use std::collections::HashMap; use quote::quote; use syn::parse_quote; use syn::visit::{self, Visit}; #[proc_macro_derive(CustomDebug, attributes(debug))] pub fn derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let st = syn::parse_macro_input!(input as syn::DeriveInput); match do_expand(&st) { Ok(token_stream) => token_stream, Err(e) => e.to_compile_error(), } .into() } fn do_expand(st: &syn::DeriveInput) -> syn::Result<proc_macro2::TokenStream> { let ret = generate_debug_trait(st)?; return Ok(ret); } type StructFields = syn::punctuated::Punctuated<syn::Field, syn::Token!(,)>; fn get_fields_from_derive_input(d: &syn::DeriveInput) -> syn::Result<&StructFields> { if let syn::Data::Struct(syn::DataStruct { fields: syn::Fields::Named(syn::FieldsNamed { ref named, .. }), .. }) = d.data { return Ok(named); } Err(syn::Error::new_spanned( d, "Must define on a Struct,not Enum".to_string(), )) } fn generate_debug_trait_core(st: &syn::DeriveInput) -> syn::Result<proc_macro2::TokenStream> { let fields = get_fields_from_derive_input(st)?; let struct_name_ident = &st.ident; let struct_name_literal = struct_name_ident.to_string(); let mut fmt_body_stream = proc_macro2::TokenStream::new(); fmt_body_stream.extend(quote!( fmt.debug_struct(#struct_name_literal) )); for field in fields.iter() { let field_name_ident = field.ident.as_ref().unwrap(); let field_name_literal = field_name_ident.to_string(); let mut format_str = "{:?}".to_string(); if let Some(format) = get_custom_format_of_field(field)? { format_str = format; } fmt_body_stream.extend(quote!( .field(#field_name_literal, &format_args!(#format_str,self.#field_name_ident)) )); } fmt_body_stream.extend(quote!( .finish() )); Ok(fmt_body_stream) } fn generate_debug_trait(st: &syn::DeriveInput) -> syn::Result<proc_macro2::TokenStream> { let struct_name_ident = &st.ident; let fmt_body_stream = generate_debug_trait_core(st)?; let mut generics_param_to_modify = st.generics.clone(); if let Some(hatch) = get_struct_escape_hatch(st) { generics_param_to_modify.make_where_clause(); generics_param_to_modify .where_clause .as_mut() .unwrap() .predicates .push(syn::parse_str(hatch.as_str()).unwrap()); } else { let fields = get_fields_from_derive_input(st)?; let mut fields_type_names = Vec::new(); let mut phantomdata_type_param_names = Vec::new(); for field in fields { if let Some(s) = get_field_type_name(field)? { fields_type_names.push(s); } if let Some(s) = get_phantomdata_generic_type_name(field)? { phantomdata_type_param_names.push(s); } } let associated_types_map = get_generic_association_types(st); for g in generics_param_to_modify.params.iter_mut() { if let syn::GenericParam::Type(t) = g { let type_param_name = t.ident.to_string(); if phantomdata_type_param_names.contains(&type_param_name) && !fields_type_names.contains(&type_param_name) { continue; } if associated_types_map.contains_key(&type_param_name) && !fields_type_names.contains(&type_param_name) { continue; } t.bounds.push(parse_quote!(std::fmt::Debug)) } } generics_param_to_modify.make_where_clause(); for (_, associated_types) in associated_types_map { for associated_type in associated_types { generics_param_to_modify .where_clause .as_mut() .unwrap() .predicates .push(parse_quote!(#associated_type:std::fmt::Debug)); } } } let (impl_generics, type_generics, where_clause) = generics_param_to_modify.split_for_impl(); let ret_stream = quote!( impl #impl_generics std::fmt::Debug for #struct_name_ident #type_generics #where_clause{ fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { #fmt_body_stream } } ); Ok(ret_stream) } fn get_custom_format_of_field(field: &syn::Field) -> syn::Result<Option<String>> { for attr in &field.attrs { if let Ok(syn::Meta::NameValue(syn::MetaNameValue { ref path, ref lit, .. })) = attr.parse_meta() { if path.is_ident("debug") { if let syn::Lit::Str(ref ident_str) = lit { return Ok(Some(ident_str.value())); } } } } Ok(None) } fn get_phantomdata_generic_type_name(field: &syn::Field) -> syn::Result<Option<String>> { if let syn::Type::Path(syn::TypePath { path: syn::Path { ref segments, .. }, .. }) = field.ty { if let Some(syn::PathSegment { ref ident, ref arguments, }) = segments.last() { if ident == "PhantomData" { if let syn::PathArguments::AngleBracketed(syn::AngleBracketedGenericArguments { args, .. }) = arguments { if let Some(syn::GenericArgument::Type(syn::Type::Path(ref gp))) = args.first() { if let Some(generic_ident) = gp.path.segments.first() { return Ok(Some(generic_ident.ident.to_string())); } } } } } } Ok(None) } fn get_field_type_name(field: &syn::Field) -> syn::Result<Option<String>> { if let syn::Type::Path(syn::TypePath { path: syn::Path { ref segments, .. }, .. }) = field.ty { if let Some(syn::PathSegment { ref ident, .. }) = segments.last() { return Ok(Some(ident.to_string())); } } return Ok(None); } fn get_generic_association_types(st: &syn::DeriveInput) -> HashMap<String, Vec<syn::TypePath>> { let origin_generic_param_names: Vec<String> = st .generics .params .iter() .filter_map(|f| { if let syn::GenericParam::Type(ty) = f { return Some(ty.ident.to_string()); } return None; }) .collect(); let mut visitor = TypePathVisitor { generic_type_names: origin_generic_param_names, associated_types: HashMap::new(), }; visitor.visit_derive_input(st); return visitor.associated_types; } fn get_struct_escape_hatch(st: &syn::DeriveInput) -> Option<String> { if let Some(inert_arr) = st.attrs.last() { if let Ok(syn::Meta::List(syn::MetaList { nested, .. })) = inert_arr.parse_meta() { if let Some(syn::NestedMeta::Meta(syn::Meta::NameValue(path_value))) = nested.first() { if path_value.path.is_ident("bound") { if let syn::Lit::Str(ref lit) = path_value.lit { return Some(lit.value()); } } } } } None } struct TypePathVisitor { generic_type_names: Vec<String>, associated_types: HashMap<String, Vec<syn::TypePath>>, } impl<'ast> Visit<'ast> for TypePathVisitor { fn visit_type_path(&mut self, node: &'ast syn::TypePath) { if node.path.segments.len() >= 2 { let generic_type_name = node.path.segments[0].ident.to_string(); if self.generic_type_names.contains(&generic_type_name) { self.associated_types .entry(generic_type_name) .or_insert(Vec::new()) .push(node.clone()); } } visit::visit_type_path(self, node); } }
use std::collections::HashMap; use quote::quote; use syn::parse_quote; use syn::visit::{self, Visit}; #[proc_macro_derive(CustomDebug, attributes(debug))] pub fn derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let st = syn::parse_macro_input!(input as syn::DeriveInput); match do_expand(&st) { Ok(token_stream) => token_stream, Err(e) => e.to_compile_error(), } .into() } fn do_expand(st: &syn::DeriveInput) -> syn::Result<proc_macro2::TokenStream> { let ret = generate_debug_trait(st)?; return Ok(ret); } type StructFields = syn::punctuated::Punctuated<syn::Field, syn::Token!(,)>; fn get_fields_from_derive_input(d: &syn::DeriveInput) -> syn::Result<&StructFields> { if let syn::Data::Struct(syn::DataStruct { fields: syn::Fields::Named(syn::FieldsNamed { ref named, .. }), .. }) = d.data { return Ok(named); } Err(syn::Error::new_spanned( d, "Must define on a Struct,not Enum".to_string(), )) } fn generate_debug_trait_core(st: &syn::DeriveInput) -> syn::Result<proc_macro2::TokenStream> { let fields = get_fields_from_derive_input(st)?; let struct_name_ident = &st.ident; let struct_name_literal = struct_name_ident.to_string(); let mut fmt_body_stream = proc_macro2::TokenStream::new(); fmt_body_stream.extend(quote!( fmt.debug_struct(#struct_name_literal) )); for field in fields.iter() { let field_name_ident = field.ident.as_ref().unwrap(); let field_name_literal = field_name_ident.to_string(); let mut format_str = "{:?}".to_string(); if let Some(format) = get_custom_format_of_field(field)? { format_str = format; } fmt_body_stream.extend(quote!( .field(#field_name_literal, &format_args!(#format_str,self.#field_name_ident)) )); } fmt_body_stream.extend(quote!( .finish() )); Ok(fmt_body_stream) } fn generate_debug_trait(st: &syn::DeriveInput) -> syn::Result<proc_macro2::TokenStream> { let struct_name_ident = &st.ident; let fmt_body_stream = generate_debug_trait_core(st)?; let mut generics_param_to_modify = st.generics.clone(); if let Some(hatch) = get_struct_escape_hatch(st) { generics_param_to_modify.make_where_clause(); generics_param_to_modify .where_clause .as_mut() .unwrap() .predicates .push(syn::parse_str(hatch.as_str()).unwrap()); } else { let fields = get_fields_from_derive_input(st)?; let mut fields_type_names = Vec::new(); let mut phantomdata_type_param_names = Vec::new(); for field in fields { if let Some(s) = get_field_type_name(field)? { fields_type_names.push(s); } if let Some(s) = get_phantomdata_generic_type_name(field)? { phantomdata_type_param_names.push(s); } } let associated_types_map = get_generic_association_types(st); for g in generics_param_to_modify.params.iter_mut() { if let syn::GenericParam::Type(t) = g { let type_param_name = t.ident.to_str
tr in &field.attrs { if let Ok(syn::Meta::NameValue(syn::MetaNameValue { ref path, ref lit, .. })) = attr.parse_meta() { if path.is_ident("debug") { if let syn::Lit::Str(ref ident_str) = lit { return Ok(Some(ident_str.value())); } } } } Ok(None) } fn get_phantomdata_generic_type_name(field: &syn::Field) -> syn::Result<Option<String>> { if let syn::Type::Path(syn::TypePath { path: syn::Path { ref segments, .. }, .. }) = field.ty { if let Some(syn::PathSegment { ref ident, ref arguments, }) = segments.last() { if ident == "PhantomData" { if let syn::PathArguments::AngleBracketed(syn::AngleBracketedGenericArguments { args, .. }) = arguments { if let Some(syn::GenericArgument::Type(syn::Type::Path(ref gp))) = args.first() { if let Some(generic_ident) = gp.path.segments.first() { return Ok(Some(generic_ident.ident.to_string())); } } } } } } Ok(None) } fn get_field_type_name(field: &syn::Field) -> syn::Result<Option<String>> { if let syn::Type::Path(syn::TypePath { path: syn::Path { ref segments, .. }, .. }) = field.ty { if let Some(syn::PathSegment { ref ident, .. }) = segments.last() { return Ok(Some(ident.to_string())); } } return Ok(None); } fn get_generic_association_types(st: &syn::DeriveInput) -> HashMap<String, Vec<syn::TypePath>> { let origin_generic_param_names: Vec<String> = st .generics .params .iter() .filter_map(|f| { if let syn::GenericParam::Type(ty) = f { return Some(ty.ident.to_string()); } return None; }) .collect(); let mut visitor = TypePathVisitor { generic_type_names: origin_generic_param_names, associated_types: HashMap::new(), }; visitor.visit_derive_input(st); return visitor.associated_types; } fn get_struct_escape_hatch(st: &syn::DeriveInput) -> Option<String> { if let Some(inert_arr) = st.attrs.last() { if let Ok(syn::Meta::List(syn::MetaList { nested, .. })) = inert_arr.parse_meta() { if let Some(syn::NestedMeta::Meta(syn::Meta::NameValue(path_value))) = nested.first() { if path_value.path.is_ident("bound") { if let syn::Lit::Str(ref lit) = path_value.lit { return Some(lit.value()); } } } } } None } struct TypePathVisitor { generic_type_names: Vec<String>, associated_types: HashMap<String, Vec<syn::TypePath>>, } impl<'ast> Visit<'ast> for TypePathVisitor { fn visit_type_path(&mut self, node: &'ast syn::TypePath) { if node.path.segments.len() >= 2 { let generic_type_name = node.path.segments[0].ident.to_string(); if self.generic_type_names.contains(&generic_type_name) { self.associated_types .entry(generic_type_name) .or_insert(Vec::new()) .push(node.clone()); } } visit::visit_type_path(self, node); } }
ing(); if phantomdata_type_param_names.contains(&type_param_name) && !fields_type_names.contains(&type_param_name) { continue; } if associated_types_map.contains_key(&type_param_name) && !fields_type_names.contains(&type_param_name) { continue; } t.bounds.push(parse_quote!(std::fmt::Debug)) } } generics_param_to_modify.make_where_clause(); for (_, associated_types) in associated_types_map { for associated_type in associated_types { generics_param_to_modify .where_clause .as_mut() .unwrap() .predicates .push(parse_quote!(#associated_type:std::fmt::Debug)); } } } let (impl_generics, type_generics, where_clause) = generics_param_to_modify.split_for_impl(); let ret_stream = quote!( impl #impl_generics std::fmt::Debug for #struct_name_ident #type_generics #where_clause{ fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { #fmt_body_stream } } ); Ok(ret_stream) } fn get_custom_format_of_field(field: &syn::Field) -> syn::Result<Option<String>> { for at
random
[ { "content": "fn get_fields_from_derive_input(st: &syn::DeriveInput) -> syn::Result<&StructField> {\n\n if let syn::Data::Struct(syn::DataStruct {\n\n fields: syn::Fields::Named(syn::FieldsNamed { ref named, .. }),\n\n ..\n\n }) = st.data\n\n {\n\n return Ok(named);\n\n }\n\n ...
Rust
src/grpc/node.rs
searsaw/sensei
ee3d45d690c8a2c8a1c91c3b2cf0f27baf844fd4
use std::sync::Arc; pub use super::sensei::node_server::{Node, NodeServer}; use super::{ sensei::{ CloseChannelRequest, CloseChannelResponse, ConnectPeerRequest, ConnectPeerResponse, CreateInvoiceRequest, CreateInvoiceResponse, DecodeInvoiceRequest, DecodeInvoiceResponse, DeletePaymentRequest, DeletePaymentResponse, GetBalanceRequest, GetBalanceResponse, GetUnusedAddressRequest, GetUnusedAddressResponse, InfoRequest, InfoResponse, KeysendRequest, KeysendResponse, LabelPaymentRequest, LabelPaymentResponse, ListChannelsRequest, ListChannelsResponse, ListPaymentsRequest, ListPaymentsResponse, ListPeersRequest, ListPeersResponse, OpenChannelRequest, OpenChannelResponse, PayInvoiceRequest, PayInvoiceResponse, SignMessageRequest, SignMessageResponse, StartNodeRequest, StartNodeResponse, StopNodeRequest, StopNodeResponse, VerifyMessageRequest, VerifyMessageResponse, }, utils::raw_macaroon_from_metadata, }; use crate::{ services::{ admin::AdminRequest, node::{NodeRequest, NodeResponse}, }, utils, }; use tonic::{metadata::MetadataMap, Response, Status}; pub struct NodeService { pub request_context: Arc<crate::RequestContext>, } impl NodeService { async fn authenticated_request( &self, metadata: MetadataMap, request: NodeRequest, ) -> Result<NodeResponse, tonic::Status> { let macaroon_hex_string = raw_macaroon_from_metadata(metadata)?; let (macaroon, session) = utils::macaroon_with_session_from_hex_str(&macaroon_hex_string) .map_err(|_e| tonic::Status::unauthenticated("invalid macaroon"))?; let pubkey = session.pubkey.clone(); let node_directory = self.request_context.node_directory.lock().await; match node_directory.get(&session.pubkey) { Some(handle) => { handle .node .verify_macaroon(macaroon, session) .await .map_err(|_e| Status::unauthenticated("invalid macaroon: failed to verify"))?; match request { NodeRequest::StopNode {} => { drop(node_directory); let admin_request = AdminRequest::StopNode { pubkey }; let _ = self .request_context .admin_service .call(admin_request) .await .map_err(|_e| Status::unknown("failed to stop node"))?; Ok(NodeResponse::StopNode {}) } _ => handle .node .call(request) .await .map_err(|_e| Status::unknown("error")), } } None => match request { NodeRequest::StartNode { passphrase } => { drop(node_directory); let admin_request = AdminRequest::StartNode { passphrase, pubkey: session.pubkey, }; let _ = self .request_context .admin_service .call(admin_request) .await .map_err(|_e| { Status::unauthenticated( "failed to start node, likely invalid passphrase", ) })?; Ok(NodeResponse::StartNode {}) } _ => Err(Status::not_found("node with that pubkey not found")), }, } } } #[tonic::async_trait] impl Node for NodeService { async fn start_node( &self, request: tonic::Request<StartNodeRequest>, ) -> Result<tonic::Response<StartNodeResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn stop_node( &self, request: tonic::Request<StopNodeRequest>, ) -> Result<tonic::Response<StopNodeResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn get_unused_address( &self, request: tonic::Request<GetUnusedAddressRequest>, ) -> Result<tonic::Response<GetUnusedAddressResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn get_balance( &self, request: tonic::Request<GetBalanceRequest>, ) -> Result<tonic::Response<GetBalanceResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn open_channel( &self, request: tonic::Request<OpenChannelRequest>, ) -> Result<tonic::Response<OpenChannelResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn pay_invoice( &self, request: tonic::Request<PayInvoiceRequest>, ) -> Result<tonic::Response<PayInvoiceResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn decode_invoice( &self, request: tonic::Request<DecodeInvoiceRequest>, ) -> Result<tonic::Response<DecodeInvoiceResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn keysend( &self, request: tonic::Request<KeysendRequest>, ) -> Result<tonic::Response<KeysendResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn create_invoice( &self, request: tonic::Request<CreateInvoiceRequest>, ) -> Result<tonic::Response<CreateInvoiceResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn label_payment( &self, request: tonic::Request<LabelPaymentRequest>, ) -> Result<tonic::Response<LabelPaymentResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn delete_payment( &self, request: tonic::Request<DeletePaymentRequest>, ) -> Result<tonic::Response<DeletePaymentResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn connect_peer( &self, request: tonic::Request<ConnectPeerRequest>, ) -> Result<tonic::Response<ConnectPeerResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn list_channels( &self, request: tonic::Request<ListChannelsRequest>, ) -> Result<tonic::Response<ListChannelsResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn list_payments( &self, request: tonic::Request<ListPaymentsRequest>, ) -> Result<tonic::Response<ListPaymentsResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn close_channel( &self, request: tonic::Request<CloseChannelRequest>, ) -> Result<tonic::Response<CloseChannelResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn info( &self, request: tonic::Request<InfoRequest>, ) -> Result<tonic::Response<InfoResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn list_peers( &self, request: tonic::Request<ListPeersRequest>, ) -> Result<tonic::Response<ListPeersResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn sign_message( &self, request: tonic::Request<SignMessageRequest>, ) -> Result<tonic::Response<SignMessageResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn verify_message( &self, request: tonic::Request<VerifyMessageRequest>, ) -> Result<tonic::Response<VerifyMessageResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } }
use std::sync::Arc; pub use super::sensei::node_server::{Node, NodeServer}; use super::{ sensei::{ CloseChannelRequest, CloseChannelResponse, ConnectPeerRequest, ConnectPeerResponse, CreateInvoiceRequest, CreateInvoiceResponse, DecodeInvoiceRequest, DecodeInvoiceResponse, DeletePaymentRequest, DeletePaymentResponse, GetBalanceRequest, GetBalanceResponse, GetUnusedAddressRequest, GetUnusedAddressResponse, InfoRequest, InfoResponse, KeysendRequest, KeysendResponse, LabelPaymentRequest, LabelPaymentResponse, ListChannelsRequest, ListChannelsResponse, ListPaymentsRequest, ListPaymentsResponse, ListPeersRequest, ListPeersResponse, OpenChannelRequest, OpenChannelResponse, PayInvoiceRequest, PayInvoiceResponse, SignMessageRequest, SignMessageResponse, StartNodeRequest, StartNodeResponse, StopNodeRequest, StopNodeResponse, VerifyMessageRequest, VerifyMessageResponse, }, utils::raw_macaroon_from_metadata, }; use crate::{ services::{ admin::AdminRequest, node::{NodeRequest, NodeResponse}, }, utils, }; use tonic::{metadata::MetadataMap, Response, Status}; pub struct NodeService { pub request_context: Arc<crate::RequestContext>, } impl NodeService { async fn authenticated_request( &self, metadata: MetadataMap, request: NodeRequest, ) -> Result<NodeResponse, tonic::Status> { let macaroon_hex_string = raw_macaroon_from_metadata(metadata)?; let (macaroon, session) = utils::macaroon_with_session_from_hex_str(&macaroon_hex_string) .map_err(|_e| tonic::Status::unauthenticated("invalid macaroon"))?; let pubkey = session.pubkey.clone(); let node_directory = self.request_context.node_directory.lock().await; match node_directory.get(&session.pubkey) { Some(handle) => { handle .node .verify_macaroon(macaroon, session) .await .map_err(|_e| Status::unauthenticated("invalid macaroon: failed to verify"))?;
} None => match request { NodeRequest::StartNode { passphrase } => { drop(node_directory); let admin_request = AdminRequest::StartNode { passphrase, pubkey: session.pubkey, }; let _ = self .request_context .admin_service .call(admin_request) .await .map_err(|_e| { Status::unauthenticated( "failed to start node, likely invalid passphrase", ) })?; Ok(NodeResponse::StartNode {}) } _ => Err(Status::not_found("node with that pubkey not found")), }, } } } #[tonic::async_trait] impl Node for NodeService { async fn start_node( &self, request: tonic::Request<StartNodeRequest>, ) -> Result<tonic::Response<StartNodeResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn stop_node( &self, request: tonic::Request<StopNodeRequest>, ) -> Result<tonic::Response<StopNodeResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn get_unused_address( &self, request: tonic::Request<GetUnusedAddressRequest>, ) -> Result<tonic::Response<GetUnusedAddressResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn get_balance( &self, request: tonic::Request<GetBalanceRequest>, ) -> Result<tonic::Response<GetBalanceResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn open_channel( &self, request: tonic::Request<OpenChannelRequest>, ) -> Result<tonic::Response<OpenChannelResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn pay_invoice( &self, request: tonic::Request<PayInvoiceRequest>, ) -> Result<tonic::Response<PayInvoiceResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn decode_invoice( &self, request: tonic::Request<DecodeInvoiceRequest>, ) -> Result<tonic::Response<DecodeInvoiceResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn keysend( &self, request: tonic::Request<KeysendRequest>, ) -> Result<tonic::Response<KeysendResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn create_invoice( &self, request: tonic::Request<CreateInvoiceRequest>, ) -> Result<tonic::Response<CreateInvoiceResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn label_payment( &self, request: tonic::Request<LabelPaymentRequest>, ) -> Result<tonic::Response<LabelPaymentResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn delete_payment( &self, request: tonic::Request<DeletePaymentRequest>, ) -> Result<tonic::Response<DeletePaymentResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn connect_peer( &self, request: tonic::Request<ConnectPeerRequest>, ) -> Result<tonic::Response<ConnectPeerResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn list_channels( &self, request: tonic::Request<ListChannelsRequest>, ) -> Result<tonic::Response<ListChannelsResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn list_payments( &self, request: tonic::Request<ListPaymentsRequest>, ) -> Result<tonic::Response<ListPaymentsResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn close_channel( &self, request: tonic::Request<CloseChannelRequest>, ) -> Result<tonic::Response<CloseChannelResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn info( &self, request: tonic::Request<InfoRequest>, ) -> Result<tonic::Response<InfoResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn list_peers( &self, request: tonic::Request<ListPeersRequest>, ) -> Result<tonic::Response<ListPeersResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn sign_message( &self, request: tonic::Request<SignMessageRequest>, ) -> Result<tonic::Response<SignMessageResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } async fn verify_message( &self, request: tonic::Request<VerifyMessageRequest>, ) -> Result<tonic::Response<VerifyMessageResponse>, tonic::Status> { self.authenticated_request(request.metadata().clone(), request.into_inner().into()) .await? .try_into() .map(Response::new) .map_err(|_e| Status::unknown("unknown error")) } }
match request { NodeRequest::StopNode {} => { drop(node_directory); let admin_request = AdminRequest::StopNode { pubkey }; let _ = self .request_context .admin_service .call(admin_request) .await .map_err(|_e| Status::unknown("failed to stop node"))?; Ok(NodeResponse::StopNode {}) } _ => handle .node .call(request) .await .map_err(|_e| Status::unknown("error")), }
if_condition
[]
Rust
mesatee_services/fns/sgx_trusted_lib/src/trusted_worker/private_join_and_compute.rs
hshshjzsami/incubator-teaclave
1a671e6e9fdb1f1bc2e1b4804ac2e516409bae63
#[cfg(feature = "mesalock_sgx")] use std::prelude::v1::*; use std::collections::HashMap; use std::fmt::Write; use crate::worker::{FunctionType, Worker, WorkerContext}; use mesatee_core::{Error, ErrorKind, Result}; pub struct PrivateJoinAndComputeWorker { worker_id: u32, func_name: String, func_type: FunctionType, input: Option<PrivateJoinAndComputeWorkerInput>, } struct PrivateJoinAndComputeWorkerInput { file_list: Vec<String>, } impl PrivateJoinAndComputeWorker { pub fn new() -> Self { PrivateJoinAndComputeWorker { worker_id: 0, func_name: "private_join_and_compute".to_string(), func_type: FunctionType::Multiparty, input: None, } } } impl Worker for PrivateJoinAndComputeWorker { fn function_name(&self) -> &str { self.func_name.as_str() } fn function_type(&self) -> FunctionType { self.func_type } fn set_id(&mut self, worker_id: u32) { self.worker_id = worker_id; } fn id(&self) -> u32 { self.worker_id } fn prepare_input( &mut self, _dynamic_input: Option<String>, file_ids: Vec<String>, ) -> Result<()> { if file_ids.len() < 2 { return Err(Error::from(ErrorKind::InvalidInputError)); } self.input = Some(PrivateJoinAndComputeWorkerInput { file_list: file_ids, }); Ok(()) } fn execute(&mut self, context: WorkerContext) -> Result<String> { let input = self .input .take() .ok_or_else(|| Error::from(ErrorKind::InvalidInputError))?; let mut counter_map: HashMap<String, usize> = HashMap::new(); let mut add_map: HashMap<String, u32> = HashMap::new(); let number = input.file_list.len(); for file_id in input.file_list.iter() { let plaintext = context.read_file(file_id)?; let records = parse_input(plaintext)?; for (identity, amount) in records.into_iter() { let value = counter_map.get(&identity).cloned().unwrap_or(0); counter_map.insert(identity.to_owned(), value + 1); let value = add_map.get(&identity).cloned().unwrap_or(0); add_map.insert(identity, value + amount); } } counter_map.retain(|_, &mut v| v == number); let mut output = String::new(); for (identity, amount) in add_map.into_iter() { if counter_map.contains_key(&identity) { writeln!(&mut output, "{} : {}", identity, amount) .map_err(|_| Error::from(ErrorKind::OutputGenerationError))?; } } let output_bytes = output.as_bytes().to_vec(); for file_id in input.file_list.iter() { let _result_file = context.save_file_for_file_owner(&output_bytes, file_id)?; } Ok("Finished".to_string()) } } fn parse_input(data: Vec<u8>) -> Result<HashMap<String, u32>> { let data_list = String::from_utf8(data).map_err(|_| Error::from(ErrorKind::InvalidInputError))?; let mut ret: HashMap<String, u32> = HashMap::new(); for data_item in data_list.split('\n') { let pair = data_item.trim(); if pair.len() < 3 { continue; } let kv_pair: Vec<&str> = pair.split(':').collect(); if kv_pair.len() != 2 { continue; } let identity = kv_pair[0].trim().to_string(); let amount = match kv_pair[1].trim().parse::<u32>() { Ok(amount) => amount, Err(_) => continue, }; ret.insert(identity, amount); } Ok(ret) }
#[cfg(feature = "mesalock_sgx")] use std::prelude::v1::*; use std::collections::HashMap; use std::fmt::Write; use crate::worker::{FunctionType, Worker, WorkerContext}; use mesatee_core::{Error, ErrorKind, Result}; pub struct PrivateJoinAndComputeWorker { worker_id: u32, func_name: String, func_type: FunctionType, input: Option<PrivateJoinAndComputeWorkerInput>, } struct PrivateJoinAndComputeWorkerInput { file_list: Vec<String>, } impl PrivateJoinAndComputeWorker { pub fn new() -> Self { PrivateJoinAndComputeWorker { worker_id: 0, func_name: "private_join_and_compute".to_string(), func_type: FunctionType::Multiparty, input: None, } } } impl Worker for PrivateJoinAndComputeWorker { fn function_name(&self) -> &str { self.func_name.as_str() } fn function_type(&self) -> FunctionType { self.func_type } fn set_id(&mut self, worker_id: u32) { self.worker_id = worker_id; } fn id(&self) -> u32 { self.worker_id } fn prepare_input( &mut self, _dynamic_input: Option<String>, file_ids: Vec<String>, ) -> Result<()> { if file_ids.len() < 2 { return Err(Error::from(ErrorKind::InvalidInputError)); } self.input = Some(PrivateJoinAndComputeWorkerInput { file_list: file_ids, }); Ok(()) } fn execute(&mut self, context: WorkerContext) -> Result<String> { let input = self .input .take() .ok_or_else(|| Error::from(ErrorKind::InvalidInputError))?; let mut counter_map: HashMap<String, usize> = HashMap::new(); let mut add_map: HashMap<String, u32> = HashMap::new(); let number = input.file_list.len(); for file_id in input.file_list.iter() { let plaintext = context.read_file(file_id)?; let records = parse_input(plaintext)?; for (identity, amount) in records.into_iter() { let value = counter_map.get(&identity).cloned().unwrap_or(0); counter_map.insert(identity.to_owned(), value + 1); let value = add_map.get(&identity).cloned().unwrap_or(0); add_map.insert(identity, value + amount); } } counter_map.retain(|_, &mut v| v == number); let mut output = String::new(); for (identity, amount) in add_map.into_iter() { if counter_map.contains_key(&identity) { writeln!(&mut output, "{} : {}", identity, amount) .map_err(|_| Error::from(ErrorKind::OutputGenerationError))?; } } let output_bytes = output.as_bytes().to_vec(); for file_id in input.file_list.iter() { let _result_file = context.save_file_for_file_owner(&output_bytes, file_id)?; } Ok("Finished".to_string()) } } fn parse_input(data: Vec<u8>) -> Result<HashMap<String, u32>> { let data_list = String::from_utf8(data).map_err(|_| Error::from(ErrorKind::InvalidInputError))?; let mut ret: HashMap<String, u32> = HashMap::new(); for data_item in data_list.split('\n') { let pair = data_item.trim(); if pair.len() < 3 { continue; } let kv_pair: Vec<&str> = pair.sp
lit(':').collect(); if kv_pair.len() != 2 { continue; } let identity = kv_pair[0].trim().to_string(); let amount = match kv_pair[1].trim().parse::<u32>() { Ok(amount) => amount, Err(_) => continue, }; ret.insert(identity, amount); } Ok(ret) }
function_block-function_prefixed
[ { "content": "pub fn percent_decode(orig: &str) -> Result<String> {\n\n let orig = orig.replace(\"%0A\", \"\");\n\n let v: Vec<&str> = orig.split('%').collect();\n\n let mut ret = String::new();\n\n ret.push_str(v[0]);\n\n if v.len() > 1 {\n\n for s in v[1..].iter() {\n\n let di...
Rust
src/main.rs
mentaljam/qgsrepo
d19c60ac2e755d0d4bdce8af37908d5d54678301
extern crate zip; extern crate ini; extern crate xml; mod config; mod qgsmeta; use std::path::PathBuf; use std::fs; use zip::ZipArchive; use ini::Ini; use std::io::Read; use xml::writer; use qgsmeta::{ MetaEntries, metakey, xmlkey }; #[derive(Debug)] enum ExitCodes { Success = 0, NoRootDir, NoOutDir, FileExists, NoIconsDir } macro_rules! exit_with_code { ($code:path) => (std::process::exit($code as i32)) } macro_rules! write_url { ($writer:ident, $cfg:ident, $zip_name:ident) => { let url_tag = writer::XmlEvent::start_element(qgsmeta::xmlkey(&MetaEntries::DownloadUrl)); let mut url = $cfg.repourl.clone(); url.push('/'); url.push_str($zip_name.to_str().unwrap()); let url_text = writer::XmlEvent::characters(url.as_str()); $writer.write(url_tag).unwrap(); $writer.write(url_text).unwrap(); $writer.write(writer::XmlEvent::end_element()).unwrap(); } } macro_rules! write_file { ($writer:ident, $file_name:ident) => { let file_tag = writer::XmlEvent::start_element(qgsmeta::xmlkey(&MetaEntries::FileName)); let file_name = writer::XmlEvent::characters($file_name.as_str()); $writer.write(file_tag).unwrap(); $writer.write(file_name).unwrap(); $writer.write(writer::XmlEvent::end_element()).unwrap(); } } macro_rules! write_icon { ($writer:ident, $cfg:ident, $icon_tag_name:ident, $icon_name:ident) => { let icon_tag = writer::XmlEvent::start_element($icon_tag_name); let mut icon_path = $cfg.iconsdir.clone(); icon_path.push('/'); icon_path.push_str($icon_name.to_str().unwrap()); let icon_text = writer::XmlEvent::characters(icon_path.as_str()); $writer.write(icon_tag).unwrap(); $writer.write(icon_text).unwrap(); $writer.write(writer::XmlEvent::end_element()).unwrap(); } } macro_rules! write_entries { ($writer:ident, $section:ident, $entries:ident, $func:path) => { for entry in &$entries { match $section.get(metakey(&entry)) { Some(value) => if !value.is_empty() { let tag = writer::XmlEvent::start_element(qgsmeta::xmlkey(&entry)); $writer.write(tag).unwrap(); $writer.write($func(value)).unwrap(); $writer.write(writer::XmlEvent::end_element()).unwrap(); }, None => () } } } } fn main() { let mut cfg = config::Config::new(); cfg.parse_args(); let root = PathBuf::from(&cfg.reporoot); if !root.is_dir() { println!("Error: the root directory does not exist: \"{:?}\"", root); exit_with_code!(ExitCodes::NoRootDir); } let outpath = { if cfg.outname == "plugins.xml" { root.join(&cfg.outname) } else { let file = PathBuf::from(&cfg.outname); { let dir = file.parent().unwrap(); if !dir.is_dir() { println!("Error: the output file directory does not exist: \"{:?}\"", dir); exit_with_code!(ExitCodes::NoOutDir); } } file } }; if !cfg.force && outpath.is_file() { println!("Error: the output file already exists. Run with the -f option to overwrite: {:?}", outpath); exit_with_code!(ExitCodes::FileExists); } let iconsdir = root.join(&cfg.iconsdir); if cfg.withicons && !iconsdir.is_dir() { println!("Error: the icon directory does not exist: {:?}", iconsdir); exit_with_code!(ExitCodes::NoIconsDir); } let mut outfile = fs::File::create(outpath).unwrap(); let mut xmlwriter = writer::EmitterConfig::new().perform_indent(true).create_writer(&mut outfile); { let plugins = writer::XmlEvent::start_element("plugins"); xmlwriter.write(plugins).unwrap(); } let attr_entries = attr_entries!(); let text_entries = text_entries!(); let cdata_entries = cdata_entries!(); let entries = fs::read_dir(root).unwrap(); let mut icons = Vec::new(); for entry in entries { let path = entry.unwrap().path(); if !path.is_file() || path.extension().unwrap() != "zip" { continue } let zipfile = fs::File::open(&path).unwrap(); let zipname = path.file_name().unwrap(); println!("Processing: {:?}", zipname); let mut zipreader = ZipArchive::new(&zipfile).unwrap(); let plugin_dir = match zipreader.by_index(0) { Result::Ok(zipentry) => { let entry_path = PathBuf::from(zipentry.name()); let mut path_comps = entry_path.iter(); path_comps.next().unwrap().to_string_lossy().into_owned() }, Result::Err(err) => { println!("Warning: could not read zip, skipping: {}", err); continue }, }; let metadata_text = { let metadata_path = format!("{}/metadata.txt", plugin_dir); match zipreader.by_name(metadata_path.as_str()) { Result::Ok(mut metadata) => { let mut md = String::new(); metadata.read_to_string(&mut md).unwrap(); md.push_str("\ndummy=dummy"); md }, Result::Err(err) => { println!("Warning: could not read the \"metadata.txt\", skipping: {}", err); continue } } }; let metadata = match Ini::load_from_str(metadata_text.as_str()) { Result::Ok(metadata) => { metadata }, Result::Err(err) => { println!("Warning: could not parse plugin metadata, skipping: {:?}", err); continue } }; let general = match metadata.section(Some("general".to_owned())) { Some(section) => section, None => { println!("Warning: metadata file does not contain the \"general\" section, skipping"); continue } }; if cfg.strict { let mut ok = true; for entry in required_entries!() { let key = metakey(&entry); if !general.contains_key(key) { println!("Warning: strict check - metadata file does not contain the \"{}\" entry", key); ok = false; break } } if !ok { println!("Warning: strict check - skipping plugin due to bad metadata"); continue } } { let mut pyqgis_plugin = writer::XmlEvent::start_element("pyqgis_plugin"); let mut ok = true; for attr in &attr_entries { let key = metakey(&attr); match general.get(key) { Some(value) => pyqgis_plugin = pyqgis_plugin.attr(xmlkey(&attr), value), None => { println!("Warning: metadata file does not contain the required \"{}\" entry", key); ok = false; } } } if ok { xmlwriter.write(pyqgis_plugin).unwrap(); } else { println!("Warning: skipping plugin due to bad metadata"); continue } } write_url!(xmlwriter, cfg, zipname); write_file!(xmlwriter, plugin_dir); write_entries!(xmlwriter, general, text_entries, writer::XmlEvent::characters); write_entries!(xmlwriter, general, cdata_entries, writer::XmlEvent::cdata); if cfg.withicons { let icon_tag_name = metakey(&MetaEntries::Icon); let zipicon = match general.get(icon_tag_name) { Some(zipicon) => zipicon, None => { xmlwriter.write(writer::XmlEvent::end_element()).unwrap(); continue } }; let zipicon_path = PathBuf::from(format!("{}/{}", plugin_dir, zipicon)); let ext = zipicon_path.extension().unwrap(); let icon_name = { let mut icon_name = PathBuf::from(&zipname); icon_name.set_extension(&ext); icon_name.to_owned() }; icons.push(icon_name.as_os_str().to_owned()); write_icon!(xmlwriter, cfg, icon_tag_name, icon_name); let icon_path = iconsdir.join(&icon_name); if icon_path.exists() { xmlwriter.write(writer::XmlEvent::end_element()).unwrap(); continue } match zipreader.by_name(zipicon_path.to_str().unwrap()) { Result::Ok(icon) => { let mut icon_reader = icon; let mut icon_writer = fs::File::create(&icon_path).unwrap(); match std::io::copy(&mut icon_reader, &mut icon_writer) { Result::Err(err) => println!("Warning: could not extract plugin icon: {:?} - {}", zipicon_path, err), _ => () } }, Result::Err(err) => println!("Warning: could not read plugin icon: {:?} - {}", zipicon_path, err) } } xmlwriter.write(writer::XmlEvent::end_element()).unwrap(); } xmlwriter.write(writer::XmlEvent::end_element()).unwrap(); if cfg.withicons { println!("Removing obsolete icons"); let allicons = fs::read_dir(iconsdir).unwrap(); for entry in allicons { let entry_reader = entry.unwrap(); let entryname = &entry_reader.file_name(); let mut remove = true; for icon in &icons { if entryname == icon { remove = false; break } } if remove { match fs::remove_file(entry_reader.path()) { Result::Ok(_) => println!("{:?}", entryname), Result::Err(err) => println!("Warning: could not remove obsolete icon: {}", err), } } } } exit_with_code!(ExitCodes::Success); }
extern crate zip; extern crate ini; extern crate xml; mod config; mod qgsmeta; use std::path::PathBuf; use std::fs; use zip::ZipArchive; use ini::Ini; use std::io::Read; use xml::writer; use qgsmeta::{ MetaEntries, metakey, xmlkey }; #[derive(Debug)] enum ExitCodes { Success = 0, NoRootDir, NoOutDir, FileExists, NoIconsDir } macro_rules! exit_with_code { ($code:path) => (std::process::exit($code as i32)) } macro_rules! write_url { ($writer:ident, $cfg:ident, $zip_name:ident) => { let url_tag = writer::XmlEvent::start_element(qgsmeta::xmlkey(&MetaEntries::DownloadUrl)); let mut url = $cfg.repourl.clone(); url.push('/'); url.push_str($zip_name.to_str().unwrap()); let url_text = writer::XmlEvent::characters(url.as_str()); $writer.write(url_tag).unwrap(); $writer.write(url_text).unwrap(); $writer.write(writer::XmlEvent::end_element()).unwrap(); } } macro_rules! write_file { ($writer:ident, $file_name:ident) => { let file_tag = writer::XmlEvent::start_element(qgsmeta::xmlkey(&MetaEntries::FileName)); let file_name = writer::XmlEvent::characters($file_name.as_str()); $writer.write(file_tag).unwrap(); $writer.write(file_name).unwrap(); $writer.write(writer::XmlEvent::end_element()).unwrap(); } } macro_rules! write_icon { ($writer:ident, $cfg:ident, $icon_tag_name:ident, $icon_name:ident) => { let icon_tag = writer::XmlEvent::start_element($icon_tag_name); let mut icon_path = $cfg.iconsdir.clone(); icon_path.push('/'); icon_path.push_str($icon_name.to_str().unwrap()); let icon_text = writer::XmlEvent::characters(icon_path.as_str()); $writer.write(icon_tag).unwrap(); $writer.write(icon_text).unwrap(); $writer.write(writer::XmlEvent::end_element()).unwrap(); } } macro_rules! write_entries { ($writer:ident, $section:ident, $entries:ident, $func:path) => { for entry in &$entries { match $section.get(metakey(&entry)) { Some(value) => if !value.is_empty() { let tag = writer::XmlEvent::start_element(qgsmeta::xmlkey(&entry)); $writer.write(tag).unwrap(); $writer.write($func(value)).unwrap(); $writer.write(writer::XmlEvent::end_element()).unwrap(); }, None => () } } } } fn main() { let mut cfg = config::Config::new(); cfg.parse_args(); let root = PathBuf::from(&cfg.reporoot); if !root.is_dir() { println!("Error: the root directory does not exist: \"{:?}\"", root); exit_with_code!(ExitCodes::NoRootDir); } let outpath = { if cfg.outname == "plugins.xml" { root.join(&cfg.outname) } else { let file = PathBuf::from(&cfg.outname); { let dir = file.parent().unwrap(); if !dir.is_dir() { println!("Error: the output file directory does not exist: \"{:?}\"", dir); exit_with_code!(ExitCodes::NoOutDir); } } file } }; if !cfg.force && outpath.is_file() { println!("Error: the output file already exists. Run with the -f option to overwrite: {:?}", outpath); exit_with_code!(ExitCodes::FileExists); } let iconsdir = root.join(&cfg.iconsdir); if cfg.withicons && !iconsdir.is_dir() { println!("Error: the icon directory does not exist: {:?}", iconsdir); exit_with_code!(ExitCodes::NoIconsDir); } let mut outfile = fs::File::create(outpath).unwrap(); let mut xmlwriter = writer::EmitterConfig::new().perform_indent(true).create_writer(&mut outfile); { let plugins = writer::XmlEvent::start_element("plugins"); xmlwriter.write(plugins).unwrap(); } let attr_entries = attr_entries!(); let text_entries = text_entries!(); let cdata_entries = cdata_entries!(); let entries = fs::read_dir(root).unwrap(); let mut icons = Vec::new(); for entry in entries { let path = entry.unwrap().path(); if !path.is_file() || path.extension().unwrap() != "zip" { continue } let zipfile = fs::File::open(&path).unwrap(); let zipname = path.file_name().unwrap(); println!("Processing: {:?}", zipname); let mut zipreader = ZipArchive::new(&zipfile).unwrap(); let plugin_dir = match zipreader.by_index(0) { Result::Ok(zipentry) => { let entry_path = PathBuf::from(zipentry.name()); let mut path_comps = entry_path.iter(); path_comps.next().unwrap().to_string_lossy().into_owned() }, Result::Err(err) => { println!("Warning: could not read zip, skipping: {}", err); continue }, }; let metadata_text = { let metadata_path = format!("{}/metadata.txt", plugin_dir); match zipreader.by_name(metadata_path.as_str()) { Result::Ok(mut metadata) => { let mut md = String::new(); metadata.read_to_string(&mut md).unwrap();
md.push_str("\ndummy=dummy"); md }, Result::Err(err) => { println!("Warning: could not read the \"metadata.txt\", skipping: {}", err); continue } } }; let metadata = match Ini::load_from_str(metadata_text.as_str()) { Result::Ok(metadata) => { metadata }, Result::Err(err) => { println!("Warning: could not parse plugin metadata, skipping: {:?}", err); continue } }; let general = match metadata.section(Some("general".to_owned())) { Some(section) => section, None => { println!("Warning: metadata file does not contain the \"general\" section, skipping"); continue } }; if cfg.strict { let mut ok = true; for entry in required_entries!() { let key = metakey(&entry); if !general.contains_key(key) { println!("Warning: strict check - metadata file does not contain the \"{}\" entry", key); ok = false; break } } if !ok { println!("Warning: strict check - skipping plugin due to bad metadata"); continue } } { let mut pyqgis_plugin = writer::XmlEvent::start_element("pyqgis_plugin"); let mut ok = true; for attr in &attr_entries { let key = metakey(&attr); match general.get(key) { Some(value) => pyqgis_plugin = pyqgis_plugin.attr(xmlkey(&attr), value), None => { println!("Warning: metadata file does not contain the required \"{}\" entry", key); ok = false; } } } if ok { xmlwriter.write(pyqgis_plugin).unwrap(); } else { println!("Warning: skipping plugin due to bad metadata"); continue } } write_url!(xmlwriter, cfg, zipname); write_file!(xmlwriter, plugin_dir); write_entries!(xmlwriter, general, text_entries, writer::XmlEvent::characters); write_entries!(xmlwriter, general, cdata_entries, writer::XmlEvent::cdata); if cfg.withicons { let icon_tag_name = metakey(&MetaEntries::Icon); let zipicon = match general.get(icon_tag_name) { Some(zipicon) => zipicon, None => { xmlwriter.write(writer::XmlEvent::end_element()).unwrap(); continue } }; let zipicon_path = PathBuf::from(format!("{}/{}", plugin_dir, zipicon)); let ext = zipicon_path.extension().unwrap(); let icon_name = { let mut icon_name = PathBuf::from(&zipname); icon_name.set_extension(&ext); icon_name.to_owned() }; icons.push(icon_name.as_os_str().to_owned()); write_icon!(xmlwriter, cfg, icon_tag_name, icon_name); let icon_path = iconsdir.join(&icon_name); if icon_path.exists() { xmlwriter.write(writer::XmlEvent::end_element()).unwrap(); continue } match zipreader.by_name(zipicon_path.to_str().unwrap()) { Result::Ok(icon) => { let mut icon_reader = icon; let mut icon_writer = fs::File::create(&icon_path).unwrap(); match std::io::copy(&mut icon_reader, &mut icon_writer) { Result::Err(err) => println!("Warning: could not extract plugin icon: {:?} - {}", zipicon_path, err), _ => () } }, Result::Err(err) => println!("Warning: could not read plugin icon: {:?} - {}", zipicon_path, err) } } xmlwriter.write(writer::XmlEvent::end_element()).unwrap(); } xmlwriter.write(writer::XmlEvent::end_element()).unwrap(); if cfg.withicons { println!("Removing obsolete icons"); let allicons = fs::read_dir(iconsdir).unwrap(); for entry in allicons { let entry_reader = entry.unwrap(); let entryname = &entry_reader.file_name(); let mut remove = true; for icon in &icons { if entryname == icon { remove = false; break } } if remove { match fs::remove_file(entry_reader.path()) { Result::Ok(_) => println!("{:?}", entryname), Result::Err(err) => println!("Warning: could not remove obsolete icon: {}", err), } } } } exit_with_code!(ExitCodes::Success); }
function_block-function_prefix_line
[ { "content": "pub fn xmlkey(entry: &MetaEntries) -> &'static str {\n\n match entry {\n\n &MetaEntries::QgisMinimumVersion => \"qgis_minimum_version\",\n\n &MetaEntries::QgisMaximumVersion => \"qgis_maximum_version\",\n\n &MetaEntries::Author => \"author_name\",\n\n &Me...
Rust
src/net/raw/arp.rs
Zrus/arrow-client
61ead64b10cf8bba451d6798abacd58ce89e3233
use std::io; use std::mem; use std::io::Write; use std::net::Ipv4Addr; use crate::utils; use crate::net::raw::ether::packet::{EtherPacketBody, PacketParseError, Result}; use crate::net::raw::ether::MacAddr; use crate::net::raw::utils::Serialize; #[derive(Debug, Clone)] pub struct ArpPacket { pub htype: u16, pub ptype: u16, pub hlen: u8, pub plen: u8, pub oper: ArpOperation, pub sha: Box<[u8]>, pub spa: Box<[u8]>, pub tha: Box<[u8]>, pub tpa: Box<[u8]>, } #[allow(clippy::upper_case_acronyms)] #[derive(Debug, Copy, Clone, Eq, PartialEq)] pub enum ArpOperation { REQUEST, REPLY, UNKNOWN(u16), } impl ArpOperation { pub fn code(self) -> u16 { match self { Self::REQUEST => 1, Self::REPLY => 2, Self::UNKNOWN(op) => op, } } } impl From<u16> for ArpOperation { fn from(v: u16) -> Self { match v { 1 => Self::REQUEST, 2 => Self::REPLY, op => Self::UNKNOWN(op), } } } const ARP_HTYPE_EHER: u16 = 0x0001; const ARP_PTYPE_IPV4: u16 = 0x0800; impl ArpPacket { pub fn ipv4_over_ethernet( oper: ArpOperation, sha: MacAddr, spa: Ipv4Addr, tha: MacAddr, tpa: Ipv4Addr, ) -> Self { Self { htype: ARP_HTYPE_EHER, ptype: ARP_PTYPE_IPV4, hlen: 6, plen: 4, oper, sha: sha.octets().to_vec().into_boxed_slice(), spa: spa.octets().to_vec().into_boxed_slice(), tha: tha.octets().to_vec().into_boxed_slice(), tpa: tpa.octets().to_vec().into_boxed_slice(), } } pub fn parse(data: &[u8]) -> Result<Self> { let size = mem::size_of::<RawArpPacketHeader>(); if data.len() < size { Err(PacketParseError::new( "unable to parse ARP packet, not enough data", )) } else { let ptr = data.as_ptr(); let ptr = ptr as *const RawArpPacketHeader; let rh = unsafe { &*ptr }; let hlen = rh.hlen as usize; let plen = rh.plen as usize; let required = size + (hlen << 1) + (plen << 1); if data.len() < required { Err(PacketParseError::new( "unable to parse ARP packet, not enough data", )) } else { let offset_1 = size; let offset_2 = offset_1 + hlen; let offset_3 = offset_2 + plen; let offset_4 = offset_3 + hlen; let sha = &data[offset_1..offset_1 + hlen]; let spa = &data[offset_2..offset_2 + plen]; let tha = &data[offset_3..offset_3 + hlen]; let tpa = &data[offset_4..offset_4 + plen]; let res = Self { htype: u16::from_be(rh.htype), ptype: u16::from_be(rh.ptype), hlen: rh.hlen, plen: rh.plen, oper: ArpOperation::from(u16::from_be(rh.oper)), sha: sha.to_vec().into_boxed_slice(), spa: spa.to_vec().into_boxed_slice(), tha: tha.to_vec().into_boxed_slice(), tpa: tpa.to_vec().into_boxed_slice(), }; Ok(res) } } } } impl Serialize for ArpPacket { fn serialize(&self, w: &mut dyn Write) -> io::Result<()> { let rh = RawArpPacketHeader::new(self); w.write_all(utils::as_bytes(&rh))?; w.write_all(&self.sha)?; w.write_all(&self.spa)?; w.write_all(&self.tha)?; w.write_all(&self.tpa)?; Ok(()) } } impl EtherPacketBody for ArpPacket {} #[repr(packed)] struct RawArpPacketHeader { htype: u16, ptype: u16, hlen: u8, plen: u8, oper: u16, } impl RawArpPacketHeader { fn new(arp: &ArpPacket) -> Self { let operation = arp.oper.code(); Self { htype: arp.htype.to_be(), ptype: arp.ptype.to_be(), hlen: arp.hlen, plen: arp.plen, oper: operation.to_be(), } } } pub mod scanner { use super::*; use std::net::Ipv4Addr; use std::time::Duration; use bytes::Bytes; use crate::net::raw::pcap; use crate::net::raw::devices::EthernetDevice; use crate::net::raw::ether::packet::EtherPacket; use crate::net::raw::ether::MacAddr; use crate::net::raw::pcap::Scanner; use crate::net::raw::utils::Serialize; use crate::net::utils::Ipv4AddrEx; pub struct Ipv4ArpScanner { device: EthernetDevice, scanner: Scanner, } impl Ipv4ArpScanner { pub fn scan_device(device: &EthernetDevice) -> pcap::Result<Vec<(MacAddr, Ipv4Addr)>> { Self::new(device).scan() } fn new(device: &EthernetDevice) -> Self { Self { device: device.clone(), scanner: Scanner::new(&device.name), } } fn scan(&mut self) -> pcap::Result<Vec<(MacAddr, Ipv4Addr)>> { let bcast = MacAddr::new(0xff, 0xff, 0xff, 0xff, 0xff, 0xff); let hdst = MacAddr::new(0x00, 0x00, 0x00, 0x00, 0x00, 0x00); let hsrc = self.device.mac_addr; let psrc = self.device.ip_addr; let mask = self.device.netmask.as_u32(); let addr = self.device.ip_addr.as_u32(); let end = addr | !mask; let mut current = (addr & mask) + 1; let mut buffer = Vec::new(); let mut generator = move || { if current < end { let pdst = Ipv4Addr::from(current); let arpp = ArpPacket::ipv4_over_ethernet( ArpOperation::REQUEST, hsrc, psrc, hdst, pdst, ); let pkt = EtherPacket::arp(hsrc, bcast, arpp); buffer.clear(); pkt.serialize(&mut buffer).unwrap(); current += 1; let pkt = Bytes::copy_from_slice(&buffer); Some(pkt) } else { None } }; let filter = format!("arp and ether dst {}", self.device.mac_addr); let packets = self.scanner.sr( &filter, &mut generator, Duration::from_secs(2), Some(Duration::from_secs(20)), )?; let mut hosts = Vec::new(); for ep in packets { if let Some(arp) = ep.body::<ArpPacket>() { let sha = MacAddr::from_slice(arp.sha.as_ref()); let spa = Ipv4Addr::from_slice(arp.spa.as_ref()); hosts.push((sha, spa)); } } Ok(hosts) } } } #[cfg(test)] mod tests { use super::*; use std::net::Ipv4Addr; use crate::net::raw::ether::packet::EtherPacket; use crate::net::raw::ether::MacAddr; use crate::net::raw::utils::Serialize; #[test] fn test_arp_packet() { let sip = Ipv4Addr::new(192, 168, 3, 7); let smac = MacAddr::new(1, 2, 3, 4, 5, 6); let dip = Ipv4Addr::new(192, 168, 8, 1); let dmac = MacAddr::new(6, 5, 4, 3, 2, 1); let arp = ArpPacket::ipv4_over_ethernet(ArpOperation::REQUEST, smac, sip, dmac, dip); let pkt = EtherPacket::arp(smac, dmac, arp); let mut buf = Vec::new(); pkt.serialize(&mut buf).unwrap(); let ep2 = EtherPacket::parse(buf.as_ref()).unwrap(); let arpp1 = pkt.body::<ArpPacket>().unwrap(); let arpp2 = ep2.body::<ArpPacket>().unwrap(); assert_eq!(arpp1.htype, arpp2.htype); assert_eq!(arpp1.ptype, arpp2.ptype); assert_eq!(arpp1.hlen, arpp2.hlen); assert_eq!(arpp1.plen, arpp2.plen); assert_eq!(arpp1.oper, arpp2.oper); assert_eq!(arpp1.sha, arpp2.sha); assert_eq!(arpp1.spa, arpp2.spa); assert_eq!(arpp1.tha, arpp2.tha); assert_eq!(arpp1.tpa, arpp2.tpa); } }
use std::io; use std::mem; use std::io::Write; use std::net::Ipv4Addr; use crate::utils; use crate::net::raw::ether::packet::{EtherPacketBody, PacketParseError, Result}; use crate::net::raw::ether::MacAddr; use crate::net::raw::utils::Serialize; #[derive(Debug, Clone)] pub struct ArpPacket { pub htype: u16, pub ptype: u16, pub hlen: u8, pub plen: u8, pub oper: ArpOperation, pub sha: Box<[u8]>, pub spa: Box<[u8]>, pub tha: Box<[u8]>, pub tpa: Box<[u8]>, } #[allow(clippy::upper_case_acronyms)] #[derive(Debug, Copy, Clone, Eq, PartialEq)] pub enum ArpOperation { REQUEST, REPLY, UNKNOWN(u16), } impl ArpOperation { pub fn code(self) -> u16 { match self { Self::REQUEST => 1, Self::REPLY => 2, Self::UNKNOWN(op) => op, } } } impl From<u16> for ArpOperation { fn from(v: u16) -> Self { match v { 1 => Self::REQUEST, 2 => Self::REPLY, op => Self::UNKNOWN(op), } } } const ARP_HTYPE_EHER: u16 = 0x0001; const ARP_PTYPE_IPV4: u16 = 0x0800; impl ArpPacket { pub fn ipv4_over_ethernet( oper: ArpOperation, sha: MacAddr, spa: Ipv4Addr, tha: MacAddr, tpa: Ipv4Addr, ) -> Self { Self { htype: ARP_HTYPE_EHER, ptype: ARP_PTYPE_IPV4,
pub fn parse(data: &[u8]) -> Result<Self> { let size = mem::size_of::<RawArpPacketHeader>(); if data.len() < size { Err(PacketParseError::new( "unable to parse ARP packet, not enough data", )) } else { let ptr = data.as_ptr(); let ptr = ptr as *const RawArpPacketHeader; let rh = unsafe { &*ptr }; let hlen = rh.hlen as usize; let plen = rh.plen as usize; let required = size + (hlen << 1) + (plen << 1); if data.len() < required { Err(PacketParseError::new( "unable to parse ARP packet, not enough data", )) } else { let offset_1 = size; let offset_2 = offset_1 + hlen; let offset_3 = offset_2 + plen; let offset_4 = offset_3 + hlen; let sha = &data[offset_1..offset_1 + hlen]; let spa = &data[offset_2..offset_2 + plen]; let tha = &data[offset_3..offset_3 + hlen]; let tpa = &data[offset_4..offset_4 + plen]; let res = Self { htype: u16::from_be(rh.htype), ptype: u16::from_be(rh.ptype), hlen: rh.hlen, plen: rh.plen, oper: ArpOperation::from(u16::from_be(rh.oper)), sha: sha.to_vec().into_boxed_slice(), spa: spa.to_vec().into_boxed_slice(), tha: tha.to_vec().into_boxed_slice(), tpa: tpa.to_vec().into_boxed_slice(), }; Ok(res) } } } } impl Serialize for ArpPacket { fn serialize(&self, w: &mut dyn Write) -> io::Result<()> { let rh = RawArpPacketHeader::new(self); w.write_all(utils::as_bytes(&rh))?; w.write_all(&self.sha)?; w.write_all(&self.spa)?; w.write_all(&self.tha)?; w.write_all(&self.tpa)?; Ok(()) } } impl EtherPacketBody for ArpPacket {} #[repr(packed)] struct RawArpPacketHeader { htype: u16, ptype: u16, hlen: u8, plen: u8, oper: u16, } impl RawArpPacketHeader { fn new(arp: &ArpPacket) -> Self { let operation = arp.oper.code(); Self { htype: arp.htype.to_be(), ptype: arp.ptype.to_be(), hlen: arp.hlen, plen: arp.plen, oper: operation.to_be(), } } } pub mod scanner { use super::*; use std::net::Ipv4Addr; use std::time::Duration; use bytes::Bytes; use crate::net::raw::pcap; use crate::net::raw::devices::EthernetDevice; use crate::net::raw::ether::packet::EtherPacket; use crate::net::raw::ether::MacAddr; use crate::net::raw::pcap::Scanner; use crate::net::raw::utils::Serialize; use crate::net::utils::Ipv4AddrEx; pub struct Ipv4ArpScanner { device: EthernetDevice, scanner: Scanner, } impl Ipv4ArpScanner { pub fn scan_device(device: &EthernetDevice) -> pcap::Result<Vec<(MacAddr, Ipv4Addr)>> { Self::new(device).scan() } fn new(device: &EthernetDevice) -> Self { Self { device: device.clone(), scanner: Scanner::new(&device.name), } } fn scan(&mut self) -> pcap::Result<Vec<(MacAddr, Ipv4Addr)>> { let bcast = MacAddr::new(0xff, 0xff, 0xff, 0xff, 0xff, 0xff); let hdst = MacAddr::new(0x00, 0x00, 0x00, 0x00, 0x00, 0x00); let hsrc = self.device.mac_addr; let psrc = self.device.ip_addr; let mask = self.device.netmask.as_u32(); let addr = self.device.ip_addr.as_u32(); let end = addr | !mask; let mut current = (addr & mask) + 1; let mut buffer = Vec::new(); let mut generator = move || { if current < end { let pdst = Ipv4Addr::from(current); let arpp = ArpPacket::ipv4_over_ethernet( ArpOperation::REQUEST, hsrc, psrc, hdst, pdst, ); let pkt = EtherPacket::arp(hsrc, bcast, arpp); buffer.clear(); pkt.serialize(&mut buffer).unwrap(); current += 1; let pkt = Bytes::copy_from_slice(&buffer); Some(pkt) } else { None } }; let filter = format!("arp and ether dst {}", self.device.mac_addr); let packets = self.scanner.sr( &filter, &mut generator, Duration::from_secs(2), Some(Duration::from_secs(20)), )?; let mut hosts = Vec::new(); for ep in packets { if let Some(arp) = ep.body::<ArpPacket>() { let sha = MacAddr::from_slice(arp.sha.as_ref()); let spa = Ipv4Addr::from_slice(arp.spa.as_ref()); hosts.push((sha, spa)); } } Ok(hosts) } } } #[cfg(test)] mod tests { use super::*; use std::net::Ipv4Addr; use crate::net::raw::ether::packet::EtherPacket; use crate::net::raw::ether::MacAddr; use crate::net::raw::utils::Serialize; #[test] fn test_arp_packet() { let sip = Ipv4Addr::new(192, 168, 3, 7); let smac = MacAddr::new(1, 2, 3, 4, 5, 6); let dip = Ipv4Addr::new(192, 168, 8, 1); let dmac = MacAddr::new(6, 5, 4, 3, 2, 1); let arp = ArpPacket::ipv4_over_ethernet(ArpOperation::REQUEST, smac, sip, dmac, dip); let pkt = EtherPacket::arp(smac, dmac, arp); let mut buf = Vec::new(); pkt.serialize(&mut buf).unwrap(); let ep2 = EtherPacket::parse(buf.as_ref()).unwrap(); let arpp1 = pkt.body::<ArpPacket>().unwrap(); let arpp2 = ep2.body::<ArpPacket>().unwrap(); assert_eq!(arpp1.htype, arpp2.htype); assert_eq!(arpp1.ptype, arpp2.ptype); assert_eq!(arpp1.hlen, arpp2.hlen); assert_eq!(arpp1.plen, arpp2.plen); assert_eq!(arpp1.oper, arpp2.oper); assert_eq!(arpp1.sha, arpp2.sha); assert_eq!(arpp1.spa, arpp2.spa); assert_eq!(arpp1.tha, arpp2.tha); assert_eq!(arpp1.tpa, arpp2.tpa); } }
hlen: 6, plen: 4, oper, sha: sha.octets().to_vec().into_boxed_slice(), spa: spa.octets().to_vec().into_boxed_slice(), tha: tha.octets().to_vec().into_boxed_slice(), tpa: tpa.octets().to_vec().into_boxed_slice(), } }
function_block-function_prefix_line
[ { "content": "/// Convert given 32-bit unsigned sum into 16-bit unsigned checksum.\n\npub fn sum_to_checksum(sum: u32) -> u16 {\n\n let mut checksum = sum;\n\n while (checksum & 0xffff_0000) != 0 {\n\n let hw = checksum >> 16;\n\n let lw = checksum & 0xffff;\n\n checksum = lw + hw;\n\...
Rust
crates/eosio_token/src/lib.rs
datudou/rust-eos
636073c21d2ab21af3f853fd09c907a3564a5a4e
use eosio::*; #[eosio_action] fn create(issuer: AccountName, max_supply: Asset) { let receiver = AccountName::receiver(); require_auth(receiver); let symbol = max_supply.symbol; eosio_assert(max_supply.amount > 0, "max-supply must be positive"); let symbol_name = symbol.name(); let table = CurrencyStats::table(receiver, symbol_name); eosio_assert( !table.exists(symbol_name), "token with symbol already existss", ); let stats = CurrencyStats { supply: Asset { amount: 0, symbol }, max_supply, issuer, }; table.emplace(receiver, &stats).assert("write"); } #[eosio_action] fn issue(to: AccountName, quantity: Asset, memo: String) { let receiver = AccountName::receiver(); let symbol = quantity.symbol; eosio_assert(memo.len() <= 256, "memo has more than 256 bytes"); let table = CurrencyStats::table(receiver, symbol.name()); let cursor = table .find(symbol.name()) .assert("token with symbol does not exist, create token before issue"); let mut st = cursor.get().assert("read"); require_auth(st.issuer); eosio_assert(quantity.amount > 0, "must issue positive quantity"); eosio_assert( quantity.symbol == st.supply.symbol, "symbol precision mismatch", ); eosio_assert( quantity.amount <= st.max_supply.amount - st.supply.amount, "quantity exceeds available supply", ); st.supply += quantity; cursor.modify(None, &st).assert("write"); add_balance(st.issuer, quantity, st.issuer); if to != st.issuer { let action = TransferAction { from: st.issuer, to, quantity, memo, }; action .send_inline(vec![Authorization { actor: st.issuer, permission: n!(active).into(), }]) .assert("failed to send inline action"); } } #[eosio_action] fn open(owner: AccountName, symbol: Symbol, ram_payer: AccountName) { require_auth(ram_payer); let receiver = AccountName::receiver(); let accounts_table = Account::table(receiver, symbol.name()); let cursor = accounts_table.find(symbol.name()); if cursor.is_none() { let account = Account { balance: Asset { amount: 0, symbol }, }; accounts_table.emplace(ram_payer, &account).assert("write"); } } #[eosio_action] fn close(owner: AccountName, symbol: Symbol) { require_auth(owner); let receiver = AccountName::receiver(); let accounts_table = Account::table(receiver, symbol.name()); let cursor = accounts_table .find(symbol.name()) .assert("Balance row already deleted or never existed. Action won't have any effect."); let account = cursor.get().assert("read"); eosio_assert( account.balance.amount == 0, "Cannot close because the balance is not zero.", ); cursor.erase().assert("read"); } #[eosio_action] fn retire(quantity: Asset, memo: String) { eosio_assert(memo.len() <= 256, "memo has more than 256 bytes"); let receiver = AccountName::receiver(); let symbol = quantity.symbol; let stats_table = CurrencyStats::table(receiver, symbol.name()); let cursor = stats_table .find(symbol.name()) .assert("token with symbol does not exist"); let mut st = cursor.get().assert("error reading stats table"); require_auth(st.issuer); eosio_assert(quantity.amount > 0, "must retire positive quantity"); eosio_assert( quantity.symbol == st.supply.symbol, "symbol precision mismatch", ); st.supply -= quantity; cursor.modify(None, &st).assert("write"); } #[eosio_action] fn transfer(from: AccountName, to: AccountName, quantity: Asset, memo: String) { eosio_assert(from != to, "cannot transfer to self"); require_auth(from); to.is_account().assert("to account does not exist"); let receiver = AccountName::receiver(); let symbol_name = quantity.symbol.name(); let stats_table = CurrencyStats::table(receiver, symbol_name); let cursor = stats_table .find(symbol_name) .assert("token with symbol does not exist"); let st = cursor.get().assert("read"); require_recipient(from); require_recipient(to); eosio_assert(quantity.amount > 0, "must transfer positive quantity"); eosio_assert( quantity.symbol == st.supply.symbol, "symbol precision mismatch", ); eosio_assert(memo.len() <= 256, "memo has more than 256 bytes"); let payer = if to.has_auth() { to } else { from }; sub_balance(from, quantity); add_balance(to, quantity, payer); } eosio_abi!(create, issue, transfer, open, close, retire); #[cfg(feature = "contract")] fn sub_balance(owner: AccountName, value: Asset) { let receiver = AccountName::receiver(); let table = Account::table(receiver, owner); let cursor = table .find(value.symbol.name()) .assert("no balance object found"); let mut account = cursor.get().assert("read"); account.balance -= value; cursor.modify(Some(owner), &account).assert("write"); } #[cfg(feature = "contract")] fn add_balance(owner: AccountName, value: Asset, ram_payer: AccountName) { let receiver = AccountName::receiver(); let accounts_table = Account::table(receiver, owner); let cursor = accounts_table.find(value.symbol.name()); match cursor { Some(cursor) => { let mut account = cursor.get().assert("read"); account.balance += value; cursor.modify(Some(ram_payer), &account).assert("write"); } None => { let account = Account { balance: value }; accounts_table.emplace(ram_payer, &account).assert("write"); } } } #[derive(Read, Write, NumBytes, Copy, Clone)] pub struct Account { balance: Asset, } #[cfg(feature = "contract")] impl TableRow for Account { const TABLE_NAME: u64 = n!(accounts); fn primary_key(&self) -> u64 { self.balance.symbol.name().into() } } #[derive(Read, Write, NumBytes, Copy, Clone)] pub struct CurrencyStats { supply: Asset, max_supply: Asset, issuer: AccountName, } #[cfg(feature = "contract")] impl TableRow for CurrencyStats { const TABLE_NAME: u64 = n!(stat); fn primary_key(&self) -> u64 { self.supply.symbol.name().into() } }
use eosio::*; #[eosio_action] fn create(issuer: AccountName, max_supply: Asset) { let receiver = AccountName::receiver(); require_auth(receiver); let symbol = max_supply.symbol; eosio_assert(max_supply.amount > 0, "max-supply must be positive"); let symbol_name = symbol.name(); let table = CurrencyStats::table(receiver, symbol_name); eosio_assert( !table.exists(symbol_name), "token with symbol already existss", ); let stats = CurrencyStats { supply: Asset { amount: 0, symbol }, max_supply, issuer, }; table.emplace(receiver, &stats).assert("write"); } #[eosio_action] fn issue(to: AccountName, quantity: Asset, memo: String) { let receiver = AccountName::receiver(); let symbol = quantity.symbol; eosio_assert(memo.len() <= 256, "memo has more than 256 bytes"); let table = CurrencyStats::table(receiver, symbol.name()); let cursor = table .find(symbol.name()) .assert("token with symbol does not exist, create token before issue"); let mut st = cursor.get().assert("read"); require_auth(st.issuer); eosio_assert(quantity.amount > 0, "must issue positive quantity"); eosio_assert( quantity.symbol == st.supply.symbol, "symbol precision mismatch", ); eosio_assert( quantity.amount <= st.max_supply.amount - st.supply.amount, "quantity exceeds available supply", ); st.supply += quantity; cursor.modify(None, &st).assert("write"); add_balance(st.issuer, quantity, st.issuer); if to != st.issuer { let action = TransferAction {
m_payer); let receiver = AccountName::receiver(); let accounts_table = Account::table(receiver, symbol.name()); let cursor = accounts_table.find(symbol.name()); if cursor.is_none() { let account = Account { balance: Asset { amount: 0, symbol }, }; accounts_table.emplace(ram_payer, &account).assert("write"); } } #[eosio_action] fn close(owner: AccountName, symbol: Symbol) { require_auth(owner); let receiver = AccountName::receiver(); let accounts_table = Account::table(receiver, symbol.name()); let cursor = accounts_table .find(symbol.name()) .assert("Balance row already deleted or never existed. Action won't have any effect."); let account = cursor.get().assert("read"); eosio_assert( account.balance.amount == 0, "Cannot close because the balance is not zero.", ); cursor.erase().assert("read"); } #[eosio_action] fn retire(quantity: Asset, memo: String) { eosio_assert(memo.len() <= 256, "memo has more than 256 bytes"); let receiver = AccountName::receiver(); let symbol = quantity.symbol; let stats_table = CurrencyStats::table(receiver, symbol.name()); let cursor = stats_table .find(symbol.name()) .assert("token with symbol does not exist"); let mut st = cursor.get().assert("error reading stats table"); require_auth(st.issuer); eosio_assert(quantity.amount > 0, "must retire positive quantity"); eosio_assert( quantity.symbol == st.supply.symbol, "symbol precision mismatch", ); st.supply -= quantity; cursor.modify(None, &st).assert("write"); } #[eosio_action] fn transfer(from: AccountName, to: AccountName, quantity: Asset, memo: String) { eosio_assert(from != to, "cannot transfer to self"); require_auth(from); to.is_account().assert("to account does not exist"); let receiver = AccountName::receiver(); let symbol_name = quantity.symbol.name(); let stats_table = CurrencyStats::table(receiver, symbol_name); let cursor = stats_table .find(symbol_name) .assert("token with symbol does not exist"); let st = cursor.get().assert("read"); require_recipient(from); require_recipient(to); eosio_assert(quantity.amount > 0, "must transfer positive quantity"); eosio_assert( quantity.symbol == st.supply.symbol, "symbol precision mismatch", ); eosio_assert(memo.len() <= 256, "memo has more than 256 bytes"); let payer = if to.has_auth() { to } else { from }; sub_balance(from, quantity); add_balance(to, quantity, payer); } eosio_abi!(create, issue, transfer, open, close, retire); #[cfg(feature = "contract")] fn sub_balance(owner: AccountName, value: Asset) { let receiver = AccountName::receiver(); let table = Account::table(receiver, owner); let cursor = table .find(value.symbol.name()) .assert("no balance object found"); let mut account = cursor.get().assert("read"); account.balance -= value; cursor.modify(Some(owner), &account).assert("write"); } #[cfg(feature = "contract")] fn add_balance(owner: AccountName, value: Asset, ram_payer: AccountName) { let receiver = AccountName::receiver(); let accounts_table = Account::table(receiver, owner); let cursor = accounts_table.find(value.symbol.name()); match cursor { Some(cursor) => { let mut account = cursor.get().assert("read"); account.balance += value; cursor.modify(Some(ram_payer), &account).assert("write"); } None => { let account = Account { balance: value }; accounts_table.emplace(ram_payer, &account).assert("write"); } } } #[derive(Read, Write, NumBytes, Copy, Clone)] pub struct Account { balance: Asset, } #[cfg(feature = "contract")] impl TableRow for Account { const TABLE_NAME: u64 = n!(accounts); fn primary_key(&self) -> u64 { self.balance.symbol.name().into() } } #[derive(Read, Write, NumBytes, Copy, Clone)] pub struct CurrencyStats { supply: Asset, max_supply: Asset, issuer: AccountName, } #[cfg(feature = "contract")] impl TableRow for CurrencyStats { const TABLE_NAME: u64 = n!(stat); fn primary_key(&self) -> u64 { self.supply.symbol.name().into() } }
from: st.issuer, to, quantity, memo, }; action .send_inline(vec![Authorization { actor: st.issuer, permission: n!(active).into(), }]) .assert("failed to send inline action"); } } #[eosio_action] fn open(owner: AccountName, symbol: Symbol, ram_payer: AccountName) { require_auth(ra
random
[ { "content": "fn titlecase(s: &str) -> String {\n\n let mut c = s.chars();\n\n match c.next() {\n\n None => String::new(),\n\n Some(f) => f.to_uppercase().collect::<String>() + c.as_str(),\n\n }\n\n}\n", "file_path": "crates/eosio_macros_impl/src/eosio_action.rs", "rank": 5, "...
Rust
src/lib.rs
Rufflewind/tokio-file-unix
3937ab35a53a34bb78e20e0e9f3483043fb5b231
use std::cell::RefCell; use std::os::unix::io::{AsRawFd, FromRawFd, RawFd}; use std::{fs, io}; use tokio::io::PollEvented; unsafe fn dupe_file_from_fd(old_fd: RawFd) -> io::Result<fs::File> { let fd = libc::fcntl(old_fd, libc::F_DUPFD_CLOEXEC, 0); if fd < 0 { return Err(io::Error::last_os_error()); } Ok(fs::File::from_raw_fd(fd)) } pub fn raw_stdin() -> io::Result<fs::File> { unsafe { dupe_file_from_fd(libc::STDIN_FILENO) } } pub fn raw_stdout() -> io::Result<fs::File> { unsafe { dupe_file_from_fd(libc::STDOUT_FILENO) } } pub fn raw_stderr() -> io::Result<fs::File> { unsafe { dupe_file_from_fd(libc::STDERR_FILENO) } } pub fn get_nonblocking<F: AsRawFd>(file: &F) -> io::Result<bool> { unsafe { let flags = libc::fcntl(file.as_raw_fd(), libc::F_GETFL); if flags < 0 { return Err(io::Error::last_os_error()); } Ok(flags & libc::O_NONBLOCK != 0) } } pub fn set_nonblocking<F: AsRawFd>(file: &mut F, nonblocking: bool) -> io::Result<()> { unsafe { let fd = file.as_raw_fd(); let previous = libc::fcntl(fd, libc::F_GETFL); if previous < 0 { return Err(io::Error::last_os_error()); } let new = if nonblocking { previous | libc::O_NONBLOCK } else { previous & !libc::O_NONBLOCK }; if libc::fcntl(fd, libc::F_SETFL, new) < 0 { return Err(io::Error::last_os_error()); } Ok(()) } } #[derive(Debug)] pub struct File<F> { file: F, evented: RefCell<Option<mio::Registration>>, } impl<F: AsRawFd> File<F> { pub fn new_nb(mut file: F) -> io::Result<PollEvented<Self>> { set_nonblocking(&mut file, true)?; File::raw_new(file) } pub fn raw_new(file: F) -> io::Result<PollEvented<Self>> { PollEvented::new(File { file: file, evented: Default::default(), }) } } impl<F: AsRawFd> AsRawFd for File<F> { fn as_raw_fd(&self) -> RawFd { self.file.as_raw_fd() } } impl<F: AsRawFd> mio::Evented for File<F> { fn register( &self, poll: &mio::Poll, token: mio::Token, interest: mio::Ready, opts: mio::PollOpt, ) -> io::Result<()> { match mio::unix::EventedFd(&self.as_raw_fd()).register(poll, token, interest, opts) { Err(ref e) if e.raw_os_error() == Some(libc::EPERM) => { set_nonblocking(&mut self.as_raw_fd(), false)?; let (r, s) = mio::Registration::new2(); r.register(poll, token, interest, opts)?; s.set_readiness(mio::Ready::readable() | mio::Ready::writable())?; *self.evented.borrow_mut() = Some(r); Ok(()) } e => e, } } fn reregister( &self, poll: &mio::Poll, token: mio::Token, interest: mio::Ready, opts: mio::PollOpt, ) -> io::Result<()> { match *self.evented.borrow() { None => mio::unix::EventedFd(&self.as_raw_fd()).reregister(poll, token, interest, opts), Some(ref r) => r.reregister(poll, token, interest, opts), } } fn deregister(&self, poll: &mio::Poll) -> io::Result<()> { match *self.evented.borrow() { None => mio::unix::EventedFd(&self.as_raw_fd()).deregister(poll), Some(ref r) => mio::Evented::deregister(r, poll), } } } impl<F: io::Read> io::Read for File<F> { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { self.file.read(buf) } } impl<F: io::Write> io::Write for File<F> { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { self.file.write(buf) } fn flush(&mut self) -> io::Result<()> { self.file.flush() } } impl<F: io::Seek> io::Seek for File<F> { fn seek(&mut self, pos: io::SeekFrom) -> io::Result<u64> { self.file.seek(pos) } } #[cfg(test)] mod tests { use super::*; use std::os::unix::net::UnixStream; #[test] fn test_nonblocking() -> io::Result<()> { let (sock, _) = UnixStream::pair()?; let mut fd = sock.as_raw_fd(); set_nonblocking(&mut fd, false)?; assert!(!get_nonblocking(&fd)?); set_nonblocking(&mut fd, true)?; assert!(get_nonblocking(&fd)?); set_nonblocking(&mut fd, false)?; assert!(!get_nonblocking(&fd)?); Ok(()) } }
use std::cell::RefCell; use std::os::unix::io::{AsRawFd, FromRawFd, RawFd}; use std::{fs, io}; use tokio::io::PollEvented; unsafe fn dupe_file_from_fd(old_fd: RawFd) -> io::Result<fs::File> { let fd = libc::fcntl(old_fd, libc::F_DUPFD_CLOEXEC, 0); if fd < 0 { return Err(io::Error::last_os_error()); } Ok(fs::File::from_raw_fd(fd)) } pub fn raw_stdin() -> io::Result<fs::File> { unsafe { dupe_file_from_fd(libc::STDIN_FILENO) } } pub fn raw_stdout() -> io::Result<fs::File> { unsafe { dupe_file_from_fd(libc::STDOUT_FILENO) } } pub fn raw_stderr() -> io::Result<fs::File> { unsafe { dupe_file_from_fd(libc::STDERR_FILENO) } } pub fn get_nonblocking<F: AsRawFd>(file: &F) -> io::Result<bool> { unsafe { let flags = libc::fcntl(file.as_raw_fd(), libc::F_GETFL); if flags < 0 { return Err(io::Error::last_os_error()); } Ok(flags & libc::O_NONBLOCK != 0) } } pub fn set_nonblocking<F: AsRawFd>(file: &mut F, nonblocking: bool) -> io::Result<()> { unsafe { let fd = file.as_raw_fd(); let previous = libc::fcntl(fd, libc::F_GETFL); if previous < 0 { return Err(io::Error::last_os_error()); } let new = if nonblocking { previous | libc::O_NONBLOCK } else { previous & !libc::O_NONBLOCK }; if libc::fcntl(fd, libc::F_SETFL, new) < 0 { return Err(io::Error::last_os_error()); } Ok(()) } } #[derive(Debug)] pub struct File<F> { file: F, evented: RefCell<Option<mio::Registration>>, } impl<F: AsRawFd> File<F> { pub fn new_nb(mut file: F) -> io::Result<PollEvented<Self>> { set_nonblocking(&mut file, true)?; File::raw_new(file) } pub fn raw_new(file: F) -> io::Result<PollEvented<Self>> { PollEvented::new(File { file: file, evented: Default::default(), }) } } impl<F: AsRawFd> AsRawFd for File<F> { fn as_raw_fd(&self) -> RawFd { self.file.as_raw_fd() } } impl<F: AsRawFd> mio::Evented for File<F> { fn register( &self, poll: &mio::Poll, token: mio::Token, interest: mio::Ready, opts: mio::PollOpt, ) -> io::Result<()> { match mio::unix::EventedFd(&self.as_raw_fd()).register(poll, token, interest, opts) { Err(ref e) if e.raw_os_error() == Some(libc::EPERM) => { set_nonblocking(&mut self.as_raw_fd(), false)?; let (r, s) = mio::Registration::new2(); r.register(poll, token, interest, opts)?; s.set_readiness(mio::Ready::readable() | mio::Ready::writable())?; *self.evented.borrow_mut() = Some(r); Ok(()) } e => e, } } fn reregister( &self, poll: &mio::Poll, token: mio::Token, interest: mio::Ready, opts: mio::PollOpt, ) -> io::Result<()> { match *self.evented.borrow() { None => mio::unix::EventedFd(&self.as_raw_fd()).reregister(poll, token, interest, opts), Some(ref r) => r.reregister(poll, token, interest, opts), } }
} impl<F: io::Read> io::Read for File<F> { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { self.file.read(buf) } } impl<F: io::Write> io::Write for File<F> { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { self.file.write(buf) } fn flush(&mut self) -> io::Result<()> { self.file.flush() } } impl<F: io::Seek> io::Seek for File<F> { fn seek(&mut self, pos: io::SeekFrom) -> io::Result<u64> { self.file.seek(pos) } } #[cfg(test)] mod tests { use super::*; use std::os::unix::net::UnixStream; #[test] fn test_nonblocking() -> io::Result<()> { let (sock, _) = UnixStream::pair()?; let mut fd = sock.as_raw_fd(); set_nonblocking(&mut fd, false)?; assert!(!get_nonblocking(&fd)?); set_nonblocking(&mut fd, true)?; assert!(get_nonblocking(&fd)?); set_nonblocking(&mut fd, false)?; assert!(!get_nonblocking(&fd)?); Ok(()) } }
fn deregister(&self, poll: &mio::Poll) -> io::Result<()> { match *self.evented.borrow() { None => mio::unix::EventedFd(&self.as_raw_fd()).deregister(poll), Some(ref r) => mio::Evented::deregister(r, poll), } }
function_block-full_function
[ { "content": "fn stringify_error<E: error::Error>(e: E) -> io::Error {\n\n io::Error::new(io::ErrorKind::Other, e.to_string())\n\n}\n\n\n\n#[get(\"/{something}\")]\n\nasync fn index(info: web::Path<String>) -> impl Responder {\n\n format!(\"Hello Got this: {}\", info)\n\n}\n\n\n\n#[actix_rt::main]\n\nasyn...
Rust
wasm/src/client.rs
warycat/leetcode_rs
3cb8a1aa569d372443675fab9a63043291316b51
use crate::desktop::Desktop; use crate::media::MediaClient; use crate::pc::PeerConnection; use crate::searchbar::*; use crate::utils::*; use js_sys::JsString; use js_sys::Reflect; use log::info; use rustgym_msg::*; use std::cell::RefCell; use std::collections::HashMap; use uuid::Uuid; use wasm_bindgen::prelude::*; use wasm_bindgen::*; use wasm_bindgen_futures::*; use web_sys::{ MediaStream, MediaStreamTrack, MessageEvent, RtcIceCandidate, RtcIceCandidateInit, RtcSdpType, RtcSessionDescriptionInit, WebSocket, }; #[derive(Debug, Clone)] pub struct Client { client_info: Option<ClientInfo>, ws: WebSocket, _desktop: Desktop, searchbar: Option<SearchBar>, media_client: Option<MediaClient>, media_stream: Option<MediaStream>, pcs: HashMap<Uuid, PeerConnection>, } impl Client { pub fn new() -> Self { let client_info = None; let media_client = None; let searchbar = None; let media_stream = None; let pcs = HashMap::new(); let _desktop = Desktop::new(start_menu(), start_button()); let ws = WebSocket::new(&wsurl()).expect("WebSocket"); let onmessage_cb = Closure::wrap(Box::new(move |e: MessageEvent| { let js_json: JsString = e.data().dyn_into().unwrap(); let rust_json: String = js_json.into(); match serde_json::from_str::<MsgOut>(&rust_json) { Ok(msg) => { process(msg); } Err(err) => { info!("{}", err); } } }) as Box<dyn FnMut(_)>); ws.set_onmessage(Some(onmessage_cb.as_ref().unchecked_ref())); onmessage_cb.forget(); Client { ws, client_info, media_client, media_stream, _desktop, searchbar, pcs, } } fn on_stream_start(&self, client_uuid: Uuid) -> Result<(), JsValue> { self.send_json(MsgIn::StreamStart { client_uuid }) } fn on_offer(&self, caller: Uuid, callee: Uuid, offer_sdp: String) -> Result<(), JsValue> { self.send_json(MsgIn::Offer { caller, callee, offer_sdp, }) } fn on_answer(&self, caller: Uuid, callee: Uuid, answer_sdp: String) -> Result<(), JsValue> { self.send_json(MsgIn::Answer { caller, callee, answer_sdp, }) } pub fn on_ice_candidate( &self, local: Uuid, remote: Uuid, candidate: String, sdp_mid: String, sdp_m_line_index: u16, ) -> Result<(), JsValue> { self.send_json(MsgIn::IceCandidate { local, remote, candidate, sdp_mid, sdp_m_line_index, }) } pub fn on_search_text_change(&self, search_text: String) -> Result<(), JsValue> { self.send_json(MsgIn::SearchText(search_text)) } pub fn on_query_text(&self, query_text: String) -> Result<(), JsValue> { let searchbar = self.searchbar.as_ref().expect("searchbar"); searchbar.update_search_input(&query_text); searchbar.close_search_suggestions()?; self.send_json(MsgIn::QueryText(query_text))?; Ok(()) } fn send_json(&self, msg: MsgIn) -> Result<(), JsValue> { let str = serde_json::to_string(&msg).expect("to_string"); self.ws.send_with_str(&str) } } pub fn process(message: MsgOut) { spawn_local(async move { use MsgOut::*; let res: Result<(), JsValue> = match message { Ping => Ok(()), Pong => Ok(()), SearchSuggestions(suggestions) => render_search_suggestions(suggestions), QueryResults(results) => render_query_results(results), RegistorClient(client_info) => process_register_client(client_info).await, UnRegistorClient(_) => Ok(()), StreamStart { client_uuid } => start_call(client_uuid).await, Offer { caller, callee, offer_sdp, } => process_offer(caller, callee, offer_sdp).await, Answer { caller, callee, answer_sdp, } => process_answer(caller, callee, answer_sdp).await, IceCandidate { local, remote, candidate, sdp_mid, sdp_m_line_index, } => process_ice_candidate(local, remote, candidate, sdp_mid, sdp_m_line_index).await, SessionClients(_) => Ok(()), AllClients(_) => Ok(()), }; match res { Ok(_) => {} Err(err) => { info!("{:?}", err); } } }); } async fn process_ice_candidate( _local: Uuid, remote: Uuid, candidate: String, sdp_mid: String, sdp_m_line_index: u16, ) -> Result<(), JsValue> { if let Some(pc) = get_client().pcs.get(&remote) { info!("ice {} {}", remote, candidate); let mut candidate_init = RtcIceCandidateInit::new(&candidate); candidate_init.sdp_mid(Some(&sdp_mid)); candidate_init.sdp_m_line_index(Some(sdp_m_line_index)); let candidate_obj = RtcIceCandidate::new(&candidate_init)?; let promise = pc.add_ice_candidate_with_opt_rtc_ice_candidate(Some(&candidate_obj)); JsFuture::from(promise).await?; } Ok(()) } async fn process_offer(caller: Uuid, callee: Uuid, offer_sdp: String) -> Result<(), JsValue> { info!("process_offer"); let media_stream = get_client().media_stream.expect("media_stream"); let (pc, answer_sdp) = create_answer(caller, callee, &media_stream, offer_sdp).await?; set_peerconnection(caller, pc); get_client().on_answer(caller, callee, answer_sdp)?; Ok(()) } async fn process_answer(_caller: Uuid, callee: Uuid, answer_sdp: String) -> Result<(), JsValue> { info!("process_answer"); let mut answer_obj = RtcSessionDescriptionInit::new(RtcSdpType::Answer); answer_obj.sdp(&answer_sdp); let srd_promise = get_client() .pcs .get(&callee) .expect("pc") .set_remote_description(&answer_obj); JsFuture::from(srd_promise).await?; Ok(()) } async fn process_register_client(client_info: ClientInfo) -> Result<(), JsValue> { if get_client().client_info.is_none() { info!("set local {}", client_info.client_uuid); info!( "{:?}", client_info.user_agent.as_ref().expect("useragent").family ); let client_uuid = client_info.client_uuid; if client_info.is_media_supported() { let media_stream = get_media_stream().await?; let media_client = get_client().media_client.expect("media_client"); media_client.init_local_video(&media_stream)?; set_media_stream(media_stream); get_client().on_stream_start(client_uuid)?; } set_client_info(client_info); } Ok(()) } async fn start_call(client_uuid: Uuid) -> Result<(), JsValue> { let local_client_info = get_client().client_info.expect("local_client_info"); let caller = local_client_info.client_uuid; let callee = client_uuid; if callee != caller { info!("start_call"); let (pc, offer_sdp) = create_offer( caller, callee, &get_client().media_stream.expect("media_stream"), ) .await?; set_peerconnection(callee, pc); get_client().on_offer(caller, callee, offer_sdp) } else { Ok(()) } } async fn create_offer( caller: Uuid, callee: Uuid, media_stream: &MediaStream, ) -> Result<(PeerConnection, String), JsValue> { let local_client_info = get_client().client_info.expect("local_client_info"); let pc = PeerConnection::new(caller, callee, local_client_info.ice_servers)?; let tracks = media_stream.get_tracks().to_vec(); for item in tracks { let media_stream_track: MediaStreamTrack = item.dyn_into().unwrap(); pc.add_track_0(&media_stream_track, media_stream); } let offer = JsFuture::from(pc.create_offer()).await?; let offer_sdp = Reflect::get(&offer, &JsValue::from_str("sdp"))? .as_string() .unwrap(); let mut offer_obj = RtcSessionDescriptionInit::new(RtcSdpType::Offer); offer_obj.sdp(&offer_sdp); let sld_promise = pc.set_local_description(&offer_obj); JsFuture::from(sld_promise).await?; Ok((pc, offer_sdp)) } async fn create_answer( caller: Uuid, callee: Uuid, media_stream: &MediaStream, offer_sdp: String, ) -> Result<(PeerConnection, String), JsValue> { info!("create_answer"); let local_client_info = get_client().client_info.expect("local_client_info"); let pc = PeerConnection::new(callee, caller, local_client_info.ice_servers)?; let mut offer_obj = RtcSessionDescriptionInit::new(RtcSdpType::Offer); offer_obj.sdp(&offer_sdp); let srd_promise = pc.set_remote_description(&offer_obj); JsFuture::from(srd_promise).await?; let tracks = media_stream.get_tracks().to_vec(); for item in tracks { let media_stream_track: MediaStreamTrack = item.dyn_into().unwrap(); pc.add_track_0(&media_stream_track, media_stream); } let answer = JsFuture::from(pc.create_answer()).await?; let mut answer_obj = RtcSessionDescriptionInit::new(RtcSdpType::Answer); let answer_sdp = Reflect::get(&answer, &JsValue::from_str("sdp"))? .as_string() .unwrap(); answer_obj.sdp(&answer_sdp); let sld_promise = pc.set_local_description(&answer_obj); JsFuture::from(sld_promise).await?; Ok((pc, answer_sdp)) } fn render_search_suggestions(search_suggestions: Vec<String>) -> Result<(), JsValue> { info!("{:?}", search_suggestions); get_client() .searchbar .expect("searchbar") .update_search_suggestions(search_suggestions) } fn render_query_results(query_results: Vec<QueryResult>) -> Result<(), JsValue> { info!("{:?}", query_results); get_client() .searchbar .expect("searchbar") .update_query_results(query_results) } thread_local! { pub static CLIENT: RefCell<Client> = RefCell::new(Client::new()); } pub fn get_client() -> Client { CLIENT.with(|client| client.borrow_mut().clone()) } pub fn set_client_info(client_info: ClientInfo) { CLIENT.with(|client| client.borrow_mut().client_info = Some(client_info)); } pub fn set_searchbar(searchbar: SearchBar) { CLIENT.with(|client| client.borrow_mut().searchbar = Some(searchbar)); } pub fn set_media_client(media_client: MediaClient) { CLIENT.with(|client| client.borrow_mut().media_client = Some(media_client)); } pub fn add_remote_video(remote: Uuid) { CLIENT.with(|client| { client .borrow_mut() .media_client .as_mut() .expect("media_client") .add_remote_video(remote) .expect("add_remote_video"); }); } pub fn remove_remote_video(remote: Uuid) { CLIENT.with(|client| { client .borrow_mut() .media_client .as_mut() .expect("media_client") .remove_remote_video(remote) .expect("remove_remote_video"); }); } pub fn add_remote_track(remote: Uuid, track: MediaStreamTrack) { CLIENT.with(|client| match track.kind().as_ref() { "video" => { client .borrow_mut() .media_client .as_mut() .expect("media_client") .add_remote_video_track(remote, track) .expect("add_remote_video_track"); } "audio" => { client .borrow_mut() .media_client .as_mut() .expect("media_client") .add_remote_audio_track(remote, track) .expect("add_remote_audio_track"); } _ => { info!("{:?} {}", track, track.kind()); } }); } fn set_media_stream(media_stream: MediaStream) { CLIENT.with(|client| client.borrow_mut().media_stream = Some(media_stream)); } fn set_peerconnection(remote_uuid: Uuid, pc: PeerConnection) { CLIENT.with(|client| client.borrow_mut().pcs.insert(remote_uuid, pc)); }
use crate::desktop::Desktop; use crate::media::MediaClient; use crate::pc::PeerConnection; use crate::searchbar::*; use crate::utils::*; use js_sys::JsString; use js_sys::Reflect; use log::info; use rustgym_msg::*; use std::cell::RefCell; use std::collections::HashMap; use uuid::Uuid; use wasm_bindgen::prelude::*; use wasm_bindgen::*; use wasm_bindgen_futures::*; use web_sys::{ MediaStream, MediaStreamTrack, MessageEvent, RtcIceCandidate, RtcIceCandidateInit, RtcSdpType, RtcSessionDescriptionInit, WebSocket, }; #[derive(Debug, Clone)] pub struct Client { client_info: Option<ClientInfo>, ws: WebSocket, _desktop: Desktop, searchbar: Option<SearchBar>, media_client: Option<MediaClient>, media_stream: Option<MediaStream>, pcs: HashMap<Uuid, PeerConnection>, } impl Client { pub fn new() -> Self { let client_info = None; let media_client = None; let searchbar = None; let media_stream = None; let pcs = HashMap::new(); let _desktop = Desktop::new(start_menu(), start_button()); let ws = WebSocket::new(&wsurl()).expect("WebSocket"); let onmessage_cb = Closure::wrap(Box::new(move |e: MessageEvent| { let js_json: JsString = e.data().dyn_into().unwrap(); let rust_json: String = js_json.into(); match serde_json::from_str::<MsgOut>(&rust_json) { Ok(msg) => { process(msg); } Err(err) => { info!("{}", err); } } }) as Box<dyn FnMut(_)>); ws.set_onmessage(Some(onmessage_cb.as_ref().unchecked_ref())); onmessage_cb.forget(); Client { ws, client_info, media_client, media_stream, _desktop, searchbar, pcs, } } fn on_stream_start(&self, client_uuid: Uuid) -> Result<(), JsValue> { self.send_json(MsgIn::StreamStart { client_uuid }) } fn on_offer(&self, caller: Uuid, callee: Uuid, offer_sdp: String) -> Result<(), JsValue> { self.send_json(MsgIn::Offer { caller, callee, offer_sdp, }) } fn on_answer(&self, caller: Uuid, callee: Uuid, answer_sdp: String) -> Result<(), JsValue> { self.send_json(MsgIn::Answer { caller, callee, answer_sdp, }) } pub fn on_ice_candidate( &self, local: Uuid, remote: Uuid, candidate: String, sdp_mid: String, sdp_m_line_index: u16, ) -> Result<(), JsValue> { self.send_json(MsgIn::IceCandidate { local, remote, candidate, sdp_mid, sdp_m_line_index, }) } pub fn on_search_text_change(&self, search_text: String) -> Result<(), JsValue> { self.send_json(MsgIn::SearchText(search_text)) } pub fn on_query_text(&self, query_text: String) -> Result<(), JsValue> { let searchbar = self.searchbar.as_ref().expect("searchbar"); searchbar.update_search_input(&query_text); searchbar.close_search_suggestions()?; self.send_json(MsgIn::QueryText(query_text))?; Ok(()) } fn send_json(&self, msg: MsgIn) -> Result<(), JsValue> { let str = serde_json::to_string(&msg).expect("to_string"); self.ws.send_with_str(&str) } } pub fn process(message: MsgOut) { spawn_local(async move { use MsgOut::*; let res: Result<(), JsValue> = match message { Ping => Ok(()), Pong => Ok(()), SearchSuggestions(suggestions) => render_search_suggestions(suggestions), QueryResults(results) => render_query_results(results), RegistorClient(client_info) => process_register_client(client_info).await, UnRegistorClient(_) => Ok(()), StreamStart { client_uuid } => start_call(client_uuid).await, Offer { caller, callee, offer_sdp, } => process_offer(caller, callee, offer_sdp).await, Answer { caller, callee, answer_sdp, } => process_answer(caller, callee, answer_sdp).await, IceCandidate { local, remote, candidate, sdp_mid, sdp_m_line_index, } => process_ice_candidate(local, remote, candidate, sdp_mid, sdp_m_line_index).await, SessionClients(_) => Ok(()), AllClients(_) => Ok(()), }; match res { Ok(_) => {} Err(err) => { info!("{:?}", err); } } }); } async fn process_ice_candidate( _local: Uuid, remote: Uuid, candidate: String, sdp_mid: String, sdp_m_line_index: u16, ) -> Result<(), JsValue> { if let Some(pc) = get_client().pcs.get(&remote) { info!("ice {} {}", remote, candidate); let mut candidate_init = RtcIceCandidateInit::new(&candidate); candidate_init.sdp_mid(Some(&sdp_mid)); candidate_init.sdp_m_line_index(Some(sdp_m_line_index)); let candidate_obj = RtcIceCandidate::new(&candidate_init)?; let promise = pc.add_ice_candidate_with_opt_rtc_ice_candidate(Some(&candidate_obj)); JsFuture::from(promise).await?; } Ok(()) } async fn process_offer(caller: Uuid, callee: Uuid, offer_sdp: String) -> Result<(), JsValue> { info!("process_offer"); let media_stream = get_client().media_stream.expect("media_stream"); let (pc, answer_sdp) = create_answer(caller, callee, &media_stream, offer_sdp).await?; set_peerconnection(caller, pc); get_client().on_answer(caller, callee, answer_sdp)?; Ok(()) } async fn process_answer(_caller: Uuid, callee: Uuid, answer_sdp: String) -> Result<(), JsValue> { info!("process_answer"); let mut answer_obj = RtcSessionDescriptionInit::new(RtcSdpType::Answer); answer_obj.sdp(&answer_sdp); let srd_promise = get_client() .pcs .get(&callee) .expect("pc") .set_remote_description(&answer_obj); JsFuture::from(srd_promise).await?; Ok(()) } async fn process_register_client(client_info: ClientInfo) -> Result<(), JsValue> { if get_client().client_info.is_none() { info!("set local {}", client_info.client_uuid); info!( "{:?}", client_info.user_agent.as_ref().expect("useragent").family ); let client_uuid = client_info.client_uuid; if client_info.is_media_supported() { let media_stream = get_media_stream().await?; let media_client = get_client().media_client.expect("media_client"); media_client.init_local_video(&media_stream)?; set_media_stream(media_stream); get_client().on_stream_start(client_uuid)?; } set_client_info(client_info); } Ok(()) } async fn start_call(client_uuid: Uuid) -> Result<(), JsValue> { let local_client_info = get_client().client_info.expect("local_client_info"); let caller = local_client_info.client_uuid; let callee = client_uuid; if callee != caller { info!("start_call"); let (pc, offer_sdp) = create_offer( caller, callee, &get_client().media_stream.expect("media_stream"), ) .await?; set_peerconnection(callee, pc); get_client().on_offer(caller, callee, offer_sdp) } else { Ok(()) } } async fn create_offer( caller: Uuid, callee: Uuid, media_stream: &MediaStream, ) -> Result<(PeerConnection, String), JsValue> { let local_client_info = get_client().client_info.expect("local_client_info"); let pc = PeerConnection::new(caller, callee, local_client_info.ice_servers)?; let tracks = media_stream.get_tracks().to_vec(); for item in tracks { let media_stream_track: MediaStreamTrack = item.dyn_into().unwrap(); pc.add_track_0(&media_stream_track, media_stream); } let offer = JsFuture::from(pc.create_offer()).await?; let offer_sdp = Reflect::get(&offer, &JsValue::from_str("sdp"))? .as_string() .unwrap(); let mut offer_obj = RtcSessionDescriptionInit::new(RtcSdpType::Offer); offer_obj.sdp(&offer_sdp); let sld_promise = pc.set_local_description(&offer_obj); JsFuture::from(sld_promise).await?; Ok((pc, offer_sdp)) } async fn create_answer( caller: Uuid, callee: Uuid, media_stream: &MediaStream, offer_sdp: String, ) -> Result<(PeerConnection, String), JsValue> { info!("create_answer"); let local_client_info = get_client().client_info.expect("local_client_info"); let pc = PeerConnection::new(callee, caller, local_client_info.ice_servers)?; let mut offer_obj = RtcSessionDescriptionInit::new(RtcSdpType::Offer); offer_obj.sdp(&offer_sdp); let srd_promise = pc.set_remote_description(&offer_obj); JsFuture::from(srd_promise).await?; let tracks = media_stream.get_tracks().to_vec(); for item in tracks { let media_stream_track: MediaStreamTrack = item.dyn_into().unwrap(); pc.add_track_0(&media_stream_track, media_stream); } let answer = JsFuture::from(pc.create_answer()).await?; let mut answer_obj = RtcSessionDescriptionInit::new(RtcSdpType::Answer); let answer_sdp = Reflect::get(&answer, &JsValue::from_str("sdp"))? .as_string() .unwrap(); answer_obj.sdp(&answer_sdp); let sld_promise = pc.set_local_description(&answer_obj); JsFuture::from(sld_promise).await?; Ok((pc, answer_sdp)) } fn render_search_suggestions(search_suggestions: Vec<String>) -> Result<(), JsValue> { info!("{:?}", search_suggestions); get_client() .searchbar .expect("searchbar") .update_search_suggestions(search_suggestions) } fn render_query_results(query_results: Vec<QueryResult>) -> Result<(), JsValue> { info!("{:?}", query_results); get_client() .searchbar .expect("searchbar") .update_query_results(query_results) } thread_local! { pub static CLIENT: RefCell<Client> = RefCell::new(Client::new()); } pub fn get_client() -> Client { CLIENT.with(|client| client.borrow_mut().clone()) } pub fn set_client_info(client_info: ClientInfo) { CLIENT.with(|client| client.borrow_mut().client_info = Some(client_info)); } pub fn set_searchbar(searchbar: SearchBar) { CLIENT.with(|client| client.borrow_mut().searchbar = Some(searchbar)); } pub fn set_media_client(media_client: MediaClient) { CLIENT.with(|client| client.borrow_mut().media_client = Some(media_client)); } pub fn add_remote_video(remote: Uuid) { CLIENT.with(|client| { client .borrow_mut() .media_client .as_mut() .expect("media_client") .add_remote_video(remote) .expect("add_remote_video"); }); } pub fn remove_remote_video(remote: Uuid) { CLIENT.with(|client| { client .borrow_mut() .media_client .as_mut() .expect("media_client") .remove_remote_video(remote) .expect("remove_remote_video"); }); } pub fn add_remote_track(remote: Uuid, track: MediaStreamTrack) {
fn set_media_stream(media_stream: MediaStream) { CLIENT.with(|client| client.borrow_mut().media_stream = Some(media_stream)); } fn set_peerconnection(remote_uuid: Uuid, pc: PeerConnection) { CLIENT.with(|client| client.borrow_mut().pcs.insert(remote_uuid, pc)); }
CLIENT.with(|client| match track.kind().as_ref() { "video" => { client .borrow_mut() .media_client .as_mut() .expect("media_client") .add_remote_video_track(remote, track) .expect("add_remote_video_track"); } "audio" => { client .borrow_mut() .media_client .as_mut() .expect("media_client") .add_remote_audio_track(remote, track) .expect("add_remote_audio_track"); } _ => { info!("{:?} {}", track, track.kind()); } }); }
function_block-function_prefix_line
[ { "content": "pub fn nsstring_from_str(string: &str) -> NSString {\n\n const UTF8_ENCODING: usize = 4;\n\n\n\n let cls = class!(NSString);\n\n let bytes = string.as_ptr() as *const c_void;\n\n unsafe {\n\n let obj: *mut objc::runtime::Object = msg_send![cls, alloc];\n\n let obj: *mut o...
Rust
src/logging.rs
Cerber-Ursi/batch_run
61a24e0ccc1fdd3f1e67ebb5b12735ba007a0747
use termcolor::{ Color::{self, *}, WriteColor, }; use termcolor_output::colored; use crate::entry::{Entry, Expected}; use crate::normalize; use std::io; use std::path::Path; pub(crate) fn no_entries(log: &mut impl WriteColor) -> io::Result<()> { colored!( log, "{}{}No entries were provided to runner. Maybe the files are not created yet, or the glob path is wrong.\n{}", reset!(), fg!(Some(Yellow)), reset!() )?; Ok(()) } pub(crate) fn ok(log: &mut impl WriteColor) -> io::Result<()> { colored!(log, "{}ok{}\n", fg!(Some(Green)), reset!()) } pub(crate) fn log_entry_start(entry: &Entry, log: &mut impl WriteColor) -> io::Result<()> { let display_name = entry .path() .file_name() .unwrap_or_else(|| entry.path().as_os_str()) .to_string_lossy(); let expected = match entry.expected() { Expected::RunMatch => " [should run and generate output]", Expected::CompileFail => " [should fail to compile]", }; write_entry_header(log, &display_name, expected) } pub(crate) fn log_entry_fail_to_start(entry: &Entry, buf: &mut impl WriteColor) -> io::Result<()> { write_entry_header(buf, &entry.path().as_os_str().to_string_lossy(), "") } fn write_entry_header(buf: &mut impl WriteColor, name: &str, expected: &str) -> io::Result<()> { colored!( buf, "{}batch entry {}{}{}{} ... ", reset!(), bold!(true), name, bold!(false), expected ) } pub(crate) fn log_wip_write( buf: &mut impl WriteColor, wip_path: &Path, path: &Path, string: &str, ) -> io::Result<()> { let wip_path = wip_path.to_string_lossy(); let path = path.to_string_lossy(); colored!( buf, "{}{}wip\n\nNOTE{}: writing the following output to `{}`.\nMove this file to {} to accept it as correct.\n", reset!(), fg!(Some(Yellow)), reset!(), wip_path, path, )?; snippet(buf, Yellow, string) } pub(crate) fn log_overwrite( buf: &mut impl WriteColor, path: &Path, string: &str, ) -> io::Result<()> { let path = path.to_string_lossy(); colored!( buf, "{}{}wip\n\nNOTE{}: writing the following output to {}.", reset!(), fg!(Some(Yellow)), reset!(), path )?; snippet(buf, Yellow, string) } pub(crate) fn mismatch(log: &mut impl WriteColor, expected: &str, actual: &str) -> io::Result<()> { colored!( log, "{}{}mismatch{}\n\n", bold!(true), fg!(Some(Red)), reset!() )?; log_snapshot(log, Blue, "EXPECTED", expected.as_bytes())?; log_snapshot(log, Red, "ACTUAL", actual.as_bytes())?; Ok(()) } pub(crate) fn build_status_mismatch(log: &mut impl WriteColor) -> io::Result<()> { colored!( log, "{}{}{}error: {}", reset!(), bold!(true), fg!(Some(Red)), bold!(false) ) } pub(crate) fn unexpected_build_success(log: &mut impl WriteColor) -> io::Result<()> { build_status_mismatch(log)?; colored!( log, "Expected test case to fail to compile, but it succeeded.{}\n", reset!() ) } pub(crate) fn unexpected_build_error(log: &mut impl WriteColor, error: &[u8]) -> io::Result<()> { build_status_mismatch(log)?; colored!(log, "Entry failed to build; compiler output:{}\n", reset!())?; snippet(log, Red, &normalize::trim(error)) } pub(crate) fn log_snapshot( log: &mut impl WriteColor, color: Color, header: &str, snapshot: &[u8], ) -> io::Result<()> { if !snapshot.is_empty() { colored!(log, "{}{}{}:", bold!(true), fg!(Some(color)), header)?; snippet(log, color, &normalize::trim(snapshot))?; } Ok(()) } fn snippet(log: &mut impl WriteColor, color: Color, content: &str) -> io::Result<()> { let dotted_line = "┈".repeat(60); colored!(log, "\n{}{}{}\n", reset!(), fg!(Some(color)), dotted_line)?; for line in content.lines() { colored!(log, "{}{}\n", fg!(Some(color)), line)?; } colored!(log, "{}{}{}\n", fg!(Some(color)), dotted_line, reset!()) }
use termcolor::{ Color::{self, *}, WriteColor, }; use termcolor_output::colored; use crate::entry::{Entry, Expected}; use crate::normalize; use std::io; use std::path::Path; pub(crate) fn no_entries(log: &mut impl WriteColor) -> io::Result<()> { colored!( log, "{}{}No entries were provided to runner. Maybe the files are not created yet, or the glob path is wrong.\n{}", reset!(), fg!(Some(Yellow)), reset!() )?; Ok(()) } pub(crate) fn ok(log: &mut impl WriteColor) -> io::Result<()> { colored!(log, "{}ok{}\n", fg!(Some(Green)), reset!()) } pub(crate) fn log_entry_start(entry: &Entry, log: &mut impl WriteColor) -> io::Result<()> { let display_name = entry .path() .file_name() .unwrap_or_else(|| entry.path().as_os_str()) .to_string_lossy(); let expected = match entry.expected() { Expected::RunMatch => " [should run and generate output]", Expected::CompileFail => " [should fail to compile]", }; write_entry_header(log, &display_name, expected) } pub(crate) fn log_entry_fail_to_start(entry: &Entry, buf: &mut impl WriteColor) -> io::Result<()> { write_entry_header(buf, &entry.path().as_os_str().to_string_lossy(), "") } fn write_entry_header(buf: &mut impl WriteColor, name: &str, expected: &str) -> io::Result<()> { colored!( buf, "{}batch entry {}{}{}{} ... ", reset!(), bold!(true), name, bold!(false), expected ) } pub(crate) fn log_wip_write( buf: &mut impl WriteColor, wip_path: &Path, path: &Path, string: &str, ) -> io::Result<()> { let wip_path = wip_path.to_string_lossy(); let path = path.to_string_lossy(); colored!( buf, "{}{}wip\n\nNOTE{}: writing the following output to `{}`.\nMove this file to {} to accept it as correct.\n", reset!(), fg!(Some(Yellow)), reset!(), wip_path, path, )?; snippet(buf, Yellow, string) } pub(crate) fn log_overwrite( buf: &mut impl
mut impl WriteColor, color: Color, content: &str) -> io::Result<()> { let dotted_line = "┈".repeat(60); colored!(log, "\n{}{}{}\n", reset!(), fg!(Some(color)), dotted_line)?; for line in content.lines() { colored!(log, "{}{}\n", fg!(Some(color)), line)?; } colored!(log, "{}{}{}\n", fg!(Some(color)), dotted_line, reset!()) }
WriteColor, path: &Path, string: &str, ) -> io::Result<()> { let path = path.to_string_lossy(); colored!( buf, "{}{}wip\n\nNOTE{}: writing the following output to {}.", reset!(), fg!(Some(Yellow)), reset!(), path )?; snippet(buf, Yellow, string) } pub(crate) fn mismatch(log: &mut impl WriteColor, expected: &str, actual: &str) -> io::Result<()> { colored!( log, "{}{}mismatch{}\n\n", bold!(true), fg!(Some(Red)), reset!() )?; log_snapshot(log, Blue, "EXPECTED", expected.as_bytes())?; log_snapshot(log, Red, "ACTUAL", actual.as_bytes())?; Ok(()) } pub(crate) fn build_status_mismatch(log: &mut impl WriteColor) -> io::Result<()> { colored!( log, "{}{}{}error: {}", reset!(), bold!(true), fg!(Some(Red)), bold!(false) ) } pub(crate) fn unexpected_build_success(log: &mut impl WriteColor) -> io::Result<()> { build_status_mismatch(log)?; colored!( log, "Expected test case to fail to compile, but it succeeded.{}\n", reset!() ) } pub(crate) fn unexpected_build_error(log: &mut impl WriteColor, error: &[u8]) -> io::Result<()> { build_status_mismatch(log)?; colored!(log, "Entry failed to build; compiler output:{}\n", reset!())?; snippet(log, Red, &normalize::trim(error)) } pub(crate) fn log_snapshot( log: &mut impl WriteColor, color: Color, header: &str, snapshot: &[u8], ) -> io::Result<()> { if !snapshot.is_empty() { colored!(log, "{}{}{}:", bold!(true), fg!(Some(color)), header)?; snippet(log, color, &normalize::trim(snapshot))?; } Ok(()) } fn snippet(log: &
random
[ { "content": "fn write_wip(path: &Path, content: &str, log: &mut impl WriteColor) -> EntryResult<Infallible> {\n\n let wip_dir = Path::new(\"wip\");\n\n create_dir_all(wip_dir)?;\n\n\n\n let gitignore_path = wip_dir.join(\".gitignore\");\n\n write(gitignore_path, \"*\\n\")?;\n\n\n\n let stderr_na...
Rust
src/compiler/compiler.rs
mthom26/monkey-lang
92289eca2a3216ee6042403ecaf1d63e05e29dc0
use crate::{ compiler::{make_op, OpCode, SymbolTable}, evaluator::Object, lexer::lexer, parser::{parse, Expression, Operator, Prefix, Statement}, }; const GLOBAL: &str = "GLOBAL"; #[derive(Debug, PartialEq)] pub struct ByteCode { pub instructions: Vec<u8>, pub constants: Vec<Object>, } impl ByteCode { fn new() -> Self { ByteCode { instructions: vec![], constants: vec![], } } } pub struct Compiler { byte_code: ByteCode, symbol_table: SymbolTable, } impl Compiler { pub fn from_source(input: &str) -> ByteCode { let mut compiler = Compiler { byte_code: ByteCode::new(), symbol_table: SymbolTable::new(), }; let mut tokens = lexer(input.as_bytes()); let ast = parse(&mut tokens); compiler.compile_statements(ast); compiler.byte_code } fn compile_statements(&mut self, ast: Vec<Statement>) { for statement in ast { match statement { Statement::ExpressionStatement(expr) => { self.compile_expression(expr); self.add_instruction(OpCode::OpPop); } Statement::Let { name, value } => { self.compile_expression(value); let symbol_index = self.symbol_table.define(name, GLOBAL.to_owned()); self.add_instruction(OpCode::OpSetGlobal(symbol_index)); } _ => unimplemented!(), } } } fn compile_expression(&mut self, expr: Expression) { match expr { Expression::Int(val) => { let index = self.add_constant(Object::Int(val)); self.add_instruction(OpCode::OpConstant(index)); } Expression::Boolean(val) => { match val { true => self.add_instruction(OpCode::OpTrue), false => self.add_instruction(OpCode::OpFalse), }; } Expression::Ident(val) => { match self.symbol_table.resolve(val) { Some(index) => self.add_instruction(OpCode::OpGetGlobal(index)), None => panic!("Undefined variable"), }; } Expression::Infix { left, op, right } => { self.compile_expression(*left); self.compile_expression(*right); match op { Operator::PLUS => self.add_instruction(OpCode::OpAdd), Operator::MINUS => self.add_instruction(OpCode::OpSub), Operator::MULTIPLY => self.add_instruction(OpCode::OpMul), Operator::DIVIDE => self.add_instruction(OpCode::OpDiv), Operator::GREATER => self.add_instruction(OpCode::OpGreater), Operator::LESS => self.add_instruction(OpCode::OpLess), Operator::EQUAL => self.add_instruction(OpCode::OpEqual), Operator::NEQUAL => self.add_instruction(OpCode::OpNotEqual), }; } Expression::Prefix { prefix, value } => { self.compile_expression(*value); match prefix { Prefix::MINUS => self.add_instruction(OpCode::OpMinus), Prefix::BANG => self.add_instruction(OpCode::OpBang), }; } Expression::If { condition, consequence, alternative, } => { self.compile_expression(*condition); let jmp_false = self.byte_code.instructions.len(); self.add_instruction(OpCode::OpJmpIfFalse(9999)); self.compile_statements(consequence); if self.is_last_instruction_pop() { self.remove_last_pop(); } if alternative.len() == 0 { let new_jmp_pos = self.byte_code.instructions.len() as u16; self.replace_op(jmp_false, OpCode::OpJmpIfFalse(new_jmp_pos)); } else { let jmp = self.byte_code.instructions.len(); self.add_instruction(OpCode::OpJmp(9999)); let jmp_false_pos = self.byte_code.instructions.len() as u16; self.replace_op(jmp_false, OpCode::OpJmpIfFalse(jmp_false_pos)); self.compile_statements(alternative); if self.is_last_instruction_pop() { self.remove_last_pop(); } let jmp_pos = self.byte_code.instructions.len() as u16; self.replace_op(jmp, OpCode::OpJmp(jmp_pos)); } } _ => unimplemented!(), } } fn add_constant(&mut self, object: Object) -> u16 { self.byte_code.constants.push(object); (self.byte_code.constants.len() - 1) as u16 } fn add_instruction(&mut self, op_code: OpCode) -> u16 { let new_instruction_position = self.byte_code.instructions.len(); let op_bytes = make_op(op_code); self.byte_code.instructions.extend(op_bytes); new_instruction_position as u16 } fn is_last_instruction_pop(&self) -> bool { self.byte_code.instructions.last() == Some(&make_op(OpCode::OpPop)[0]) } fn remove_last_pop(&mut self) { self.byte_code.instructions.pop(); } fn replace_op(&mut self, pos: usize, opcode: OpCode) { let bytes = make_op(opcode); for (i, byte) in bytes.iter().enumerate() { self.byte_code.instructions[pos + i] = *byte; } } } #[cfg(test)] mod tests { use crate::{ compiler::{ByteCode, Compiler}, evaluator::Object, }; fn compiled(input: &str) -> ByteCode { Compiler::from_source(input) } #[test] fn test_basic_expressions() { let input = "3"; let expected = ByteCode { instructions: vec![1, 0, 0, 6], constants: vec![Object::Int(3)], }; assert_eq!(expected, compiled(input)); let input = "1 + 2"; let expected = ByteCode { instructions: vec![1, 0, 0, 1, 0, 1, 2, 6], constants: vec![Object::Int(1), Object::Int(2)], }; assert_eq!(expected, compiled(input)); let input = "1 + 2 + 3"; #[rustfmt::skip] let expected = ByteCode { instructions: vec![ 1, 0, 0, 1, 0, 1, 2, 1, 0, 2, 2, 6, ], constants: vec![Object::Int(1), Object::Int(2), Object::Int(3)], }; assert_eq!(expected, compiled(input)); let input = "1 - 2"; let expected = ByteCode { instructions: vec![1, 0, 0, 1, 0, 1, 3, 6], constants: vec![Object::Int(1), Object::Int(2)], }; assert_eq!(expected, compiled(input)); let input = "1 * 2 - 3 / 3 + 4"; #[rustfmt::skip] let expected = ByteCode { instructions: vec![ 1, 0, 0, 1, 0, 1, 4, 1, 0, 2, 1, 0, 3, 5, 3, 1, 0, 4, 2, 6, ], constants: vec![ Object::Int(1), Object::Int(2), Object::Int(3), Object::Int(3), Object::Int(4), ], }; assert_eq!(expected, compiled(input)); } #[test] fn test_booleans() { let input = "true"; let expected = ByteCode { instructions: vec![7, 6], constants: vec![], }; assert_eq!(expected, compiled(input)); let input = "false;"; let expected = ByteCode { instructions: vec![8, 6], constants: vec![], }; assert_eq!(expected, compiled(input)); } #[test] fn test_comparison_operators() { let input = "1 > 2"; let expected = ByteCode { instructions: vec![1, 0, 0, 1, 0, 1, 9, 6], constants: vec![Object::Int(1), Object::Int(2)], }; assert_eq!(expected, compiled(input)); let input = "1 < 2"; let expected = ByteCode { instructions: vec![1, 0, 0, 1, 0, 1, 10, 6], constants: vec![Object::Int(1), Object::Int(2)], }; assert_eq!(expected, compiled(input)); let input = "1 == 2"; let expected = ByteCode { instructions: vec![1, 0, 0, 1, 0, 1, 11, 6], constants: vec![Object::Int(1), Object::Int(2)], }; assert_eq!(expected, compiled(input)); let input = "1 != 2"; let expected = ByteCode { instructions: vec![1, 0, 0, 1, 0, 1, 12, 6], constants: vec![Object::Int(1), Object::Int(2)], }; assert_eq!(expected, compiled(input)); } #[test] fn test_prefixes() { let input = "-1"; let expected = ByteCode { instructions: vec![1, 0, 0, 14, 6], constants: vec![Object::Int(1)], }; assert_eq!(expected, compiled(input)); let input = "!false"; let expected = ByteCode { instructions: vec![8, 13, 6], constants: vec![], }; assert_eq!(expected, compiled(input)); } #[test] fn test_if() { let input = "if(true) { 10 }"; let expected = ByteCode { instructions: vec![7, 16, 0, 7, 1, 0, 0, 6], constants: vec![Object::Int(10)], }; assert_eq!(expected, compiled(input)); let input = "if(true) { 10 } else { 20 }"; #[rustfmt::skip] let expected = ByteCode { instructions: vec![ 7, 16, 0, 10, 1, 0, 0, 15, 0, 13, 1, 0, 1, 6, ], constants: vec![Object::Int(10), Object::Int(20)], }; assert_eq!(expected, compiled(input)); } #[test] fn test_globals() { let input = "let one = 1; let two = 2;"; #[rustfmt::skip] let expected = ByteCode { instructions: vec![ 1, 0, 0, 17, 0, 0, 1, 0, 1, 17, 0, 1, ], constants: vec![Object::Int(1), Object::Int(2)], }; assert_eq!(expected, compiled(input)); let input = "let one = 1; let two = one; let three = two;"; #[rustfmt::skip] let expected = ByteCode { instructions: vec![ 1, 0, 0, 17, 0, 0, 18, 0, 0, 17, 0, 1, 18, 0, 1, 17, 0, 2, ], constants: vec![Object::Int(1)], }; assert_eq!(expected, compiled(input)); let input = "let x = 1; x;"; #[rustfmt::skip] let expected = ByteCode { instructions: vec![ 1, 0, 0, 17, 0, 0, 18, 0, 0, 6, ], constants: vec![Object::Int(1)], }; assert_eq!(expected, compiled(input)); } }
use crate::{ compiler::{make_op, OpCode, SymbolTable}, evaluator::Object, lexer::lexer, parser::{parse, Expression, Operator, Prefix, Statement}, }; const GLOBAL: &str = "GLOBAL"; #[derive(Debug, PartialEq)] pub struct ByteCode { pub instructions: Vec<u8>, pub constants: Vec<Object>, } impl ByteCode { fn new() -> Self { ByteCode { instructions: vec![], constants: vec![], } } } pub struct Compiler { byte_code: ByteCode, symbol_table: SymbolTable, } impl Compiler { pub fn from_source(input: &str) -> ByteCode { let mut compiler = Compiler { byte_code: ByteCode::new(), symbol_table: SymbolTable::new(), }; let mut tokens = lexer(input.as_bytes()); let ast = parse(&mut tokens); compiler.compile_statements(ast); compiler.byte_code } fn compile_statements(&mut self, ast: Vec<Statement>) { for statement in ast { match statement { Statement::ExpressionStatement(expr) => { self.compile_expression(expr); self.add_instruction(OpCode::OpPop); } Statement::Let { name, value } => { self.compile_expression(value); let symbol_index = self.symbol_table.define(name, GLOBAL.to_owned()); self.add_instruction(OpCode::OpSetGlobal(symbol_index)); } _ => unimplemented!(), } } } fn compile_expression(&mut self, expr: Expression) { match expr { Expression::Int(val) => { let index = self.add_constant(Object::Int(val)); self.add_instruction(OpCode::OpConstant(index)); } Expression::Boolean(val) => { match val { true => self.add_instruction(OpCode::OpTrue), false => self.add_instruction(OpCode::OpFalse), }; } Expression::Ident(val) => { match self.symbol_table.resolve(val) { Some(index) => self.add_instruction(OpCode::OpGetGlobal(index)), None => panic!("Undefined variable"), }; } Expression::Infix { left, op, right } => { self.compile_expression(*left); self.compile_expression(*right); match op { Operator::PLUS => self.add_instruction(OpCode::OpAdd), Operator::MINUS => self.add_instruction(OpCode::OpSub), Operator::MULTIPLY => self.add_instruction(OpCode::OpMul), Operator::DIVIDE => self.add_instruction(OpCode::OpDiv), Operator::GREATER => self.add_instruction(OpCode::OpGreater), Operator::LESS => self.add_instruction(OpCode::OpLess), Operator::EQUAL => self.add_instruction(OpCode::OpEqual), Operator::NEQUAL => self.add_instruction(OpCode::OpNotEqual), }; } Expression::Prefix { prefix, value } => { self.compile_expression(*value); match prefix { Prefix::MINUS => self.add_instruction(OpCode::OpMinus), Prefix::BANG => self.add_instruction(OpCode::OpBang), }; } Expression::If { condition, consequence, alternative, } => { self.compile_expression(*condition); let jmp_false = self.byte_code.instructions.len(); self.add_instruction(OpCode::OpJmpIfFalse(9999)); self.compile_statements(consequence); if self.is_last_instruction_pop() { self.remove_last_pop(); } if alternative.len() == 0 { let new_jmp_pos = self.byte_code.instructions.len() as u16; self.replace_op(jmp_false, OpCode::OpJmpIfFalse(new_jmp_pos)); } else { let jmp = self.byte_code.instructions.len(); self.add_instruction(OpCode::OpJmp(9999)); let jmp_false_pos = self.byte_code.instructions.len() as u16; self.replace_op(jmp_false, OpCode::OpJmpIfFalse(jmp_false_pos)); self.compile_statements(alternative); if self.is_last_instruction_pop() { self.remove_last_pop(); } let jmp_pos = self.byte_code.instructions.len() as u16; self.replace_op(jmp, OpCode::OpJmp(jmp_pos)); } } _ => unimplemented!(), } } fn add_constant(&mut self, object: Object) -> u16 { self.byte_code.constants.push(object); (self.byte_code.constants.len() - 1) as u16 } fn add_instruction(&mut self, op_code: OpCode) -> u16 { let new_instruction_position = self.byte_code.instructions.len(); let op_bytes = make_op(op_code); self.byte_code.instructions.extend(op_bytes); new_instruction_position as u16 } fn is_last_instruction_pop(&self) -> bool { self.byte_code.instructions.last() == Some(&make_op(OpCode::OpPop)[0]) } fn remove_last_pop(&mut self) { self.byte_code.instructions.pop(); } fn replace_op(&mut self, pos: usize, opcode: OpCode) { let bytes = make_op(opcode); for (i, byte) in bytes.iter().enumerate() { self.byte_code.instructions[pos + i] = *byte; } } } #[cfg(test)] mod tests { use crate::{ compiler::{ByteCode, Compiler}, evaluator::Object, }; fn compiled(input: &str) -> ByteCode { Compiler::from_source(input) } #[test] fn test_basic_expressions() { let input = "3"; let expected = ByteCode { instructions: vec![1, 0, 0, 6], constants: vec![Object::Int(3)], }; assert_eq!(expected, compiled(input)); let input = "1 + 2"; let expected = ByteCode { instructions: vec![1, 0, 0, 1, 0, 1, 2, 6], constants: vec![Object::Int(1), Object::Int(2)], }; assert_eq!(expected, compiled(input)); let input = "1 + 2 + 3"; #[rustfmt::skip] let expected = ByteCode { instructions: vec![ 1, 0, 0, 1, 0, 1, 2, 1, 0, 2, 2, 6, ], constants: vec![Object::Int(1), Object::Int(2), Object::Int(3)], }; assert_eq!(expected, compiled(input)); let input = "1 - 2"; let expected = ByteCode { instructions: vec![1, 0, 0, 1, 0, 1, 3, 6], constants: vec![Object::Int(1), Object::Int(2)], }; assert_eq!(expected, compiled(input)); let input = "1 * 2 - 3 / 3 + 4"; #[rustfmt::skip] let expected = ByteCode { instructions: vec![ 1, 0, 0, 1, 0, 1, 4, 1, 0, 2, 1, 0, 3, 5, 3, 1, 0, 4, 2, 6, ], constants: vec![ Object::Int(1), Object::Int(2), Object::Int(3), Object::Int(3), Object::Int(4), ], }; assert_eq!(expected, compiled(input)); } #[test] fn test_booleans() { let input = "true"; let expected = ByteCode { instructions: vec![7, 6], constants: vec![], }; assert_eq!(expected, compiled(input)); let input = "false;"; let expected = ByteCode { instructions: vec![8, 6], constants: vec![], }; assert_eq!(expected, compiled(input)); } #[test] fn test_comparison_operators() { let input = "1 > 2"; let expected = ByteCode { instructions: vec![1, 0, 0, 1, 0, 1, 9, 6], constants: vec![Object::Int(1), Object::Int(2)], }; assert_eq!(expected, compiled(input)); let input = "1 < 2"; let expected = ByteCode { instructions: vec![1, 0, 0, 1, 0, 1, 10, 6], constants: vec![Object::Int(1), Object::Int(2)], }; assert_eq!(expected, compiled(input)); let input = "1 == 2"; let expected = ByteCode { instructions: vec![1, 0, 0, 1, 0, 1, 11, 6], constants: vec![Object::I
input = "let x = 1; x;"; #[rustfmt::skip] let expected = ByteCode { instructions: vec![ 1, 0, 0, 17, 0, 0, 18, 0, 0, 6, ], constants: vec![Object::Int(1)], }; assert_eq!(expected, compiled(input)); } }
nt(1), Object::Int(2)], }; assert_eq!(expected, compiled(input)); let input = "1 != 2"; let expected = ByteCode { instructions: vec![1, 0, 0, 1, 0, 1, 12, 6], constants: vec![Object::Int(1), Object::Int(2)], }; assert_eq!(expected, compiled(input)); } #[test] fn test_prefixes() { let input = "-1"; let expected = ByteCode { instructions: vec![1, 0, 0, 14, 6], constants: vec![Object::Int(1)], }; assert_eq!(expected, compiled(input)); let input = "!false"; let expected = ByteCode { instructions: vec![8, 13, 6], constants: vec![], }; assert_eq!(expected, compiled(input)); } #[test] fn test_if() { let input = "if(true) { 10 }"; let expected = ByteCode { instructions: vec![7, 16, 0, 7, 1, 0, 0, 6], constants: vec![Object::Int(10)], }; assert_eq!(expected, compiled(input)); let input = "if(true) { 10 } else { 20 }"; #[rustfmt::skip] let expected = ByteCode { instructions: vec![ 7, 16, 0, 10, 1, 0, 0, 15, 0, 13, 1, 0, 1, 6, ], constants: vec![Object::Int(10), Object::Int(20)], }; assert_eq!(expected, compiled(input)); } #[test] fn test_globals() { let input = "let one = 1; let two = 2;"; #[rustfmt::skip] let expected = ByteCode { instructions: vec![ 1, 0, 0, 17, 0, 0, 1, 0, 1, 17, 0, 1, ], constants: vec![Object::Int(1), Object::Int(2)], }; assert_eq!(expected, compiled(input)); let input = "let one = 1; let two = one; let three = two;"; #[rustfmt::skip] let expected = ByteCode { instructions: vec![ 1, 0, 0, 17, 0, 0, 18, 0, 0, 17, 0, 1, 18, 0, 1, 17, 0, 2, ], constants: vec![Object::Int(1)], }; assert_eq!(expected, compiled(input)); let
random
[ { "content": "pub fn eval(ast: Vec<Statement>, env: &mut Environment) -> Object {\n\n let result = eval_block(ast, env);\n\n\n\n // If final result is a Return unwrap it...\n\n match result {\n\n Object::Return(val) => *val,\n\n _ => result,\n\n }\n\n}\n\n\n", "file_path": "src/eva...
Rust
macros/component-definition-derive/src/lib.rs
Max-Meldrum/kompact
8b77733d8b4da2c74e7d97058ea5ce774d488547
#![recursion_limit = "128"] extern crate proc_macro; use proc_macro::TokenStream; use proc_macro2::TokenStream as TokenStream2; use quote::quote; use syn::{parse_macro_input, DeriveInput}; use std::iter::Iterator; #[proc_macro_derive(ComponentDefinition)] pub fn component_definition(input: TokenStream) -> TokenStream { let ast = parse_macro_input!(input as DeriveInput); let gen = impl_component_definition(&ast); gen.into() } fn impl_component_definition(ast: &syn::DeriveInput) -> TokenStream2 { let name = &ast.ident; let name_str = format!("{}", name); if let syn::Data::Struct(ref vdata) = ast.data { let generics = &ast.generics; let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); let fields = &vdata.fields; let mut ports: Vec<(&syn::Field, PortField)> = Vec::new(); let mut ctx_field: Option<&syn::Field> = None; for field in fields.iter() { let cf = identify_field(field); match cf { ComponentField::Ctx => { ctx_field = Some(field); } ComponentField::Port(pf) => ports.push((field, pf)), ComponentField::Other => (), } } let (ctx_setup, ctx_access) = match ctx_field { Some(f) => { let id = &f.ident; let setup = quote! { self.#id.initialise(self_component.clone()); }; let access = quote! { self.#id }; (setup, access) } None => panic!("No ComponentContext found for {:?}!", name), }; let port_setup = ports .iter() .map(|&(f, _)| { let id = &f.ident; quote! { self.#id.set_parent(self_component.clone()); } }) .collect::<Vec<_>>(); let port_handles_skip = ports .iter() .enumerate() .map(|(i, &(f, ref t))| { let id = &f.ident; let handle = t.as_handle(); quote! { if skip <= #i { if count >= max_events { return ExecuteResult::new(false, count, #i); } #[allow(unreachable_code)] { if let Some(event) = self.#id.dequeue() { let res = #handle count += 1; done_work = true; if let Handled::BlockOn(blocking_future) = res { self.ctx_mut().set_blocking(blocking_future); return ExecuteResult::new(true, count, #i); } } } } } }) .collect::<Vec<_>>(); let port_handles = ports .iter() .enumerate() .map(|(i, &(f, ref t))| { let id = &f.ident; let handle = t.as_handle(); quote! { if count >= max_events { return ExecuteResult::new(false, count, #i); } #[allow(unreachable_code)] { if let Some(event) = self.#id.dequeue() { let res = #handle count += 1; done_work = true; if let Handled::BlockOn(blocking_future) = res { self.ctx_mut().set_blocking(blocking_future); return ExecuteResult::new(true, count, #i); } } } } }) .collect::<Vec<_>>(); let exec = if port_handles.is_empty() { quote! { fn execute(&mut self, _max_events: usize, _skip: usize) -> ExecuteResult { ExecuteResult::new(false, 0, 0) } } } else { quote! { fn execute(&mut self, max_events: usize, skip: usize) -> ExecuteResult { let mut count: usize = 0; let mut done_work = true; #(#port_handles_skip)* while done_work { done_work = false; #(#port_handles)* } ExecuteResult::new(false, count, 0) } } }; let port_ref_impls = ports .iter() .map(|p| { let (field, port_field) = p; let id = &field.ident; match port_field { PortField::Required(ty) => quote! { impl #impl_generics RequireRef< #ty > for #name #ty_generics #where_clause { fn required_ref(&mut self) -> RequiredRef< #ty > { self.#id.share() } fn connect_to_provided(&mut self, prov: ProvidedRef< #ty >) -> () { self.#id.connect(prov) } } }, PortField::Provided(ty) => quote! { impl #impl_generics ProvideRef< #ty > for #name #ty_generics #where_clause { fn provided_ref(&mut self) -> ProvidedRef< #ty > { self.#id.share() } fn connect_to_required(&mut self, req: RequiredRef< #ty >) -> () { self.#id.connect(req) } } }, } }) .collect::<Vec<_>>(); fn make_match(f: &syn::Field, t: &syn::Type) -> TokenStream2 { let f = &f.ident; quote! { id if id == ::std::any::TypeId::of::<#t>() => Some(&mut self.#f as &mut dyn ::std::any::Any), } } let provided_matches: Vec<_> = ports .iter() .filter_map(|(f, p)| match p { PortField::Provided(t) => Some((*f, t)), _ => None, }) .map(|(f, t)| make_match(f, t)) .collect(); let required_matches: Vec<_> = ports .iter() .filter_map(|(f, p)| match p { PortField::Required(t) => Some((*f, t)), _ => None, }) .map(|(f, t)| make_match(f, t)) .collect(); quote! { impl #impl_generics ComponentDefinition for #name #ty_generics #where_clause { fn setup(&mut self, self_component: ::std::sync::Arc<Component<Self>>) -> () { #ctx_setup #(#port_setup)* } #exec fn ctx_mut(&mut self) -> &mut ComponentContext<Self> { &mut #ctx_access } fn ctx(&self) -> &ComponentContext<Self> { &#ctx_access } fn type_name() -> &'static str { #name_str } } impl #impl_generics DynamicPortAccess for #name #ty_generics #where_clause { fn get_provided_port_as_any(&mut self, port_id: ::std::any::TypeId) -> Option<&mut dyn ::std::any::Any> { match port_id { #(#provided_matches)* _ => None, } } fn get_required_port_as_any(&mut self, port_id: ::std::any::TypeId) -> Option<&mut dyn ::std::any::Any> { match port_id { #(#required_matches)* _ => None, } } } #(#port_ref_impls)* } } else { panic!("#[derive(ComponentDefinition)] is only defined for structs, not for enums!"); } } #[allow(clippy::large_enum_variant)] #[derive(Debug)] enum ComponentField { Ctx, Port(PortField), Other, } #[derive(Debug)] enum PortField { Required(syn::Type), Provided(syn::Type), } impl PortField { fn as_handle(&self) -> TokenStream2 { match *self { PortField::Provided(ref ty) => quote! { Provide::<#ty>::handle(self, event); }, PortField::Required(ref ty) => quote! { Require::<#ty>::handle(self, event); }, } } } const REQP: &str = "RequiredPort"; const PROVP: &str = "ProvidedPort"; const CTX: &str = "ComponentContext"; const KOMPICS: &str = "kompact"; fn identify_field(f: &syn::Field) -> ComponentField { if let syn::Type::Path(ref patht) = f.ty { let path = &patht.path; let port_seg_opt = if path.segments.len() == 1 { Some(&path.segments[0]) } else if path.segments.len() == 2 { if path.segments[0].ident == KOMPICS { Some(&path.segments[1]) } else { None } } else { None }; if let Some(seg) = port_seg_opt { if seg.ident == REQP { ComponentField::Port(PortField::Required(extract_port_type(seg))) } else if seg.ident == PROVP { ComponentField::Port(PortField::Provided(extract_port_type(seg))) } else if seg.ident == CTX { ComponentField::Ctx } else { ComponentField::Other } } else { ComponentField::Other } } else { ComponentField::Other } } fn extract_port_type(seg: &syn::PathSegment) -> syn::Type { match seg.arguments { syn::PathArguments::AngleBracketed(ref abppd) => { match abppd.args.first().expect("Invalid type argument!") { syn::GenericArgument::Type(ty) => ty.clone(), _ => panic!("Wrong generic argument type in {:?}", seg), } } _ => panic!("Wrong path parameter type! {:?}", seg), } }
#![recursion_limit = "128"] extern crate proc_macro; use proc_macro::TokenStream; use proc_macro2::TokenStream as TokenStream2; use quote::quote; use syn::{parse_macro_input, DeriveInput}; use std::iter::Iterator; #[proc_macro_derive(ComponentDefinition)] pub fn component_definition(input: TokenStream) -> TokenStream { let ast = parse_macro_input!(input as DeriveInput); let gen = impl_component_definition(&ast); gen.into() } fn impl_component_definition(ast: &syn::DeriveInput) -> TokenStream2 { let name = &ast.ident; let name_str = format!("{}", name); if let syn::Data::Struct(ref vdata) = ast.data { let generics = &ast.generics; let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); let fields = &vdata.fields; let mut ports: Vec<(&syn::Field, PortField)> = Vec::new(); let mut ctx_field: Option<&syn::Field> = None; for field in fields.iter() { let cf = identify_field(field); match cf { ComponentField::Ctx => { ctx_field = Some(field); } ComponentField::Port(pf) => ports.push((field, pf)), ComponentField::Other => (), } } let (ctx_setup, ctx_access) = match ctx_field { Some(f) => { let id = &f.ident; let setup = quote! { self.#id.initialise(self_component.clone()); }; let access = quote! { self.#id }; (setup, access) } None => panic!("No ComponentContext found for {:?}!", name), }; let port_setup = ports .iter() .map(|&(f, _)| { let id = &f.ident; quote! { self.#id.set_parent(self_component.clone()); } }) .collect::<Vec<_>>(); let port_handles_skip = ports .iter() .enumerate() .map(|(i, &(f, ref t))| { let id = &f.ident; let handle = t.as_handle(); quote! { if skip <= #i { if count >= max_events { return ExecuteResult::new(false, count, #i); } #[allow(unreachable_code)] { if let Some(event) = self.#id.dequeue() { let res = #handle count += 1; done_work = true; if let Handled::BlockOn(blocking_future) = res { self.ctx_mut().set_blocking(blocking_future); return ExecuteResult::new(true, count, #i); } } } } } }) .collect::<Vec<_>>(); let port_handles = ports .iter() .enumerate() .map(|(i, &(f, ref t))| { let id = &f.ident; let handle = t.as_handle(); quote! { if count >= max_events { return ExecuteResult::new(false, count, #i); } #[allow(unreachable_code)] { if let Some(event) = self.#id.dequeue() { let res = #handle count += 1; done_work = true; if let Handled::BlockOn(blocking_future) = res { self.ctx_mut().set_blocking(blocking_future); return ExecuteResult::new(true, count, #i); } } } } }) .collect::<Vec<_>>(); let exec = if port_handles.is_empty() { quote! { fn execute(&mut self, _max_events: usize, _skip: usize) -> ExecuteResult { ExecuteResult::new(false, 0, 0) } } } else { quote! { fn execute(&mut self, max_events: usize, skip: usize) -> ExecuteResult { let mut count: usize = 0; let mut done_work = true; #(#port_handles_skip)* while done_work { done_work = false; #(#port_handles)* } ExecuteResult::new(false, count, 0) } } }; let port_ref_impls = ports .iter() .map(|p| { let (field, port_field) = p; let id = &field.ident; match port_field { PortField::Required(ty) => quote! { impl #impl_generics RequireRef< #ty > for #name #ty_generics #where_clause { fn required_ref(&mut self) -> RequiredRef< #ty > { self.#id.share() } fn connect_to_provided(&mut self, prov: ProvidedRef< #ty >) -> () { self.#id.connect(prov) } } }, PortField::Provided(t
#[allow(clippy::large_enum_variant)] #[derive(Debug)] enum ComponentField { Ctx, Port(PortField), Other, } #[derive(Debug)] enum PortField { Required(syn::Type), Provided(syn::Type), } impl PortField { fn as_handle(&self) -> TokenStream2 { match *self { PortField::Provided(ref ty) => quote! { Provide::<#ty>::handle(self, event); }, PortField::Required(ref ty) => quote! { Require::<#ty>::handle(self, event); }, } } } const REQP: &str = "RequiredPort"; const PROVP: &str = "ProvidedPort"; const CTX: &str = "ComponentContext"; const KOMPICS: &str = "kompact"; fn identify_field(f: &syn::Field) -> ComponentField { if let syn::Type::Path(ref patht) = f.ty { let path = &patht.path; let port_seg_opt = if path.segments.len() == 1 { Some(&path.segments[0]) } else if path.segments.len() == 2 { if path.segments[0].ident == KOMPICS { Some(&path.segments[1]) } else { None } } else { None }; if let Some(seg) = port_seg_opt { if seg.ident == REQP { ComponentField::Port(PortField::Required(extract_port_type(seg))) } else if seg.ident == PROVP { ComponentField::Port(PortField::Provided(extract_port_type(seg))) } else if seg.ident == CTX { ComponentField::Ctx } else { ComponentField::Other } } else { ComponentField::Other } } else { ComponentField::Other } } fn extract_port_type(seg: &syn::PathSegment) -> syn::Type { match seg.arguments { syn::PathArguments::AngleBracketed(ref abppd) => { match abppd.args.first().expect("Invalid type argument!") { syn::GenericArgument::Type(ty) => ty.clone(), _ => panic!("Wrong generic argument type in {:?}", seg), } } _ => panic!("Wrong path parameter type! {:?}", seg), } }
y) => quote! { impl #impl_generics ProvideRef< #ty > for #name #ty_generics #where_clause { fn provided_ref(&mut self) -> ProvidedRef< #ty > { self.#id.share() } fn connect_to_required(&mut self, req: RequiredRef< #ty >) -> () { self.#id.connect(req) } } }, } }) .collect::<Vec<_>>(); fn make_match(f: &syn::Field, t: &syn::Type) -> TokenStream2 { let f = &f.ident; quote! { id if id == ::std::any::TypeId::of::<#t>() => Some(&mut self.#f as &mut dyn ::std::any::Any), } } let provided_matches: Vec<_> = ports .iter() .filter_map(|(f, p)| match p { PortField::Provided(t) => Some((*f, t)), _ => None, }) .map(|(f, t)| make_match(f, t)) .collect(); let required_matches: Vec<_> = ports .iter() .filter_map(|(f, p)| match p { PortField::Required(t) => Some((*f, t)), _ => None, }) .map(|(f, t)| make_match(f, t)) .collect(); quote! { impl #impl_generics ComponentDefinition for #name #ty_generics #where_clause { fn setup(&mut self, self_component: ::std::sync::Arc<Component<Self>>) -> () { #ctx_setup #(#port_setup)* } #exec fn ctx_mut(&mut self) -> &mut ComponentContext<Self> { &mut #ctx_access } fn ctx(&self) -> &ComponentContext<Self> { &#ctx_access } fn type_name() -> &'static str { #name_str } } impl #impl_generics DynamicPortAccess for #name #ty_generics #where_clause { fn get_provided_port_as_any(&mut self, port_id: ::std::any::TypeId) -> Option<&mut dyn ::std::any::Any> { match port_id { #(#provided_matches)* _ => None, } } fn get_required_port_as_any(&mut self, port_id: ::std::any::TypeId) -> Option<&mut dyn ::std::any::Any> { match port_id { #(#required_matches)* _ => None, } } } #(#port_ref_impls)* } } else { panic!("#[derive(ComponentDefinition)] is only defined for structs, not for enums!"); } }
function_block-function_prefixed
[ { "content": "/// Connect two port instances.\n\n///\n\n/// The providing port instance must be given as first argument, and the requiring instance second.\n\npub fn biconnect_ports<P: Port>(prov: &mut ProvidedPort<P>, req: &mut RequiredPort<P>) -> () {\n\n let prov_share = prov.share();\n\n let req_share...
Rust
src/log.rs
kornelski/rustracing
117cbd127e5467c4f8303c4dcab9a874d99fdfb2
#[cfg(feature = "stacktrace")] use backtrace::Backtrace; use std::borrow::Cow; use std::time::SystemTime; #[derive(Debug)] pub struct LogBuilder { fields: Vec<LogField>, time: Option<SystemTime>, } impl LogBuilder { pub fn field<T: Into<LogField>>(&mut self, field: T) -> &mut Self { self.fields.push(field.into()); self } pub fn time(&mut self, time: SystemTime) -> &mut Self { self.time = Some(time); self } pub fn std(&mut self) -> StdLogFieldsBuilder { StdLogFieldsBuilder(self) } pub fn error(&mut self) -> StdErrorLogFieldsBuilder { self.field(LogField::new("event", "error")); StdErrorLogFieldsBuilder(self) } pub(crate) fn new() -> Self { LogBuilder { fields: Vec::new(), time: None, } } pub(crate) fn finish(mut self) -> Option<Log> { if self.fields.is_empty() { None } else { self.fields.reverse(); self.fields.sort_by(|a, b| a.name.cmp(&b.name)); self.fields.dedup_by(|a, b| a.name == b.name); Some(Log { fields: self.fields, time: self.time.unwrap_or_else(SystemTime::now), }) } } } #[derive(Debug, Clone)] pub struct Log { fields: Vec<LogField>, time: SystemTime, } impl Log { pub fn fields(&self) -> &[LogField] { &self.fields } pub fn time(&self) -> SystemTime { self.time } } #[derive(Debug, Clone)] pub struct LogField { name: Cow<'static, str>, value: Cow<'static, str>, } impl LogField { pub fn new<N, V>(name: N, value: V) -> Self where N: Into<Cow<'static, str>>, V: Into<Cow<'static, str>>, { LogField { name: name.into(), value: value.into(), } } pub fn name(&self) -> &str { self.name.as_ref() } pub fn value(&self) -> &str { self.value.as_ref() } } impl<N, V> From<(N, V)> for LogField where N: Into<Cow<'static, str>>, V: Into<Cow<'static, str>>, { fn from((n, v): (N, V)) -> Self { LogField::new(n, v) } } #[derive(Debug)] pub struct StdLogFieldsBuilder<'a>(&'a mut LogBuilder); impl<'a> StdLogFieldsBuilder<'a> { pub fn event<T>(&mut self, event: T) -> &mut Self where T: Into<Cow<'static, str>>, { self.0.field(LogField::new("event", event)); self } pub fn message<T>(&mut self, message: T) -> &mut Self where T: Into<Cow<'static, str>>, { self.0.field(LogField::new("message", message)); self } #[cfg(feature = "stacktrace")] pub fn stack(&mut self) -> &mut Self { self.0 .field(LogField::new("stack", format!("{:?}", Backtrace::new()))); self } } #[derive(Debug)] pub struct StdErrorLogFieldsBuilder<'a>(&'a mut LogBuilder); impl<'a> StdErrorLogFieldsBuilder<'a> { pub fn kind<T>(&mut self, kind: T) -> &mut Self where T: Into<Cow<'static, str>>, { self.0.field(LogField::new("error.kind", kind)); self } pub fn message<T>(&mut self, message: T) -> &mut Self where T: Into<Cow<'static, str>>, { self.0.field(LogField::new("message", message)); self } #[cfg(feature = "stacktrace")] pub fn stack(&mut self) -> &mut Self { self.0 .field(LogField::new("stack", format!("{:?}", Backtrace::new()))); self } }
#[cfg(feature = "stacktrace")] use backtrace::Backtrace; use std::borrow::Cow; use std::time::SystemTime; #[derive(Debug)] pub struct LogBuilder { fields: Vec<LogField>,
lue: V) -> Self where N: Into<Cow<'static, str>>, V: Into<Cow<'static, str>>, { LogField { name: name.into(), value: value.into(), } } pub fn name(&self) -> &str { self.name.as_ref() } pub fn value(&self) -> &str { self.value.as_ref() } } impl<N, V> From<(N, V)> for LogField where N: Into<Cow<'static, str>>, V: Into<Cow<'static, str>>, { fn from((n, v): (N, V)) -> Self { LogField::new(n, v) } } #[derive(Debug)] pub struct StdLogFieldsBuilder<'a>(&'a mut LogBuilder); impl<'a> StdLogFieldsBuilder<'a> { pub fn event<T>(&mut self, event: T) -> &mut Self where T: Into<Cow<'static, str>>, { self.0.field(LogField::new("event", event)); self } pub fn message<T>(&mut self, message: T) -> &mut Self where T: Into<Cow<'static, str>>, { self.0.field(LogField::new("message", message)); self } #[cfg(feature = "stacktrace")] pub fn stack(&mut self) -> &mut Self { self.0 .field(LogField::new("stack", format!("{:?}", Backtrace::new()))); self } } #[derive(Debug)] pub struct StdErrorLogFieldsBuilder<'a>(&'a mut LogBuilder); impl<'a> StdErrorLogFieldsBuilder<'a> { pub fn kind<T>(&mut self, kind: T) -> &mut Self where T: Into<Cow<'static, str>>, { self.0.field(LogField::new("error.kind", kind)); self } pub fn message<T>(&mut self, message: T) -> &mut Self where T: Into<Cow<'static, str>>, { self.0.field(LogField::new("message", message)); self } #[cfg(feature = "stacktrace")] pub fn stack(&mut self) -> &mut Self { self.0 .field(LogField::new("stack", format!("{:?}", Backtrace::new()))); self } }
time: Option<SystemTime>, } impl LogBuilder { pub fn field<T: Into<LogField>>(&mut self, field: T) -> &mut Self { self.fields.push(field.into()); self } pub fn time(&mut self, time: SystemTime) -> &mut Self { self.time = Some(time); self } pub fn std(&mut self) -> StdLogFieldsBuilder { StdLogFieldsBuilder(self) } pub fn error(&mut self) -> StdErrorLogFieldsBuilder { self.field(LogField::new("event", "error")); StdErrorLogFieldsBuilder(self) } pub(crate) fn new() -> Self { LogBuilder { fields: Vec::new(), time: None, } } pub(crate) fn finish(mut self) -> Option<Log> { if self.fields.is_empty() { None } else { self.fields.reverse(); self.fields.sort_by(|a, b| a.name.cmp(&b.name)); self.fields.dedup_by(|a, b| a.name == b.name); Some(Log { fields: self.fields, time: self.time.unwrap_or_else(SystemTime::now), }) } } } #[derive(Debug, Clone)] pub struct Log { fields: Vec<LogField>, time: SystemTime, } impl Log { pub fn fields(&self) -> &[LogField] { &self.fields } pub fn time(&self) -> SystemTime { self.time } } #[derive(Debug, Clone)] pub struct LogField { name: Cow<'static, str>, value: Cow<'static, str>, } impl LogField { pub fn new<N, V>(name: N, va
random
[ { "content": "/// This trait allows to insert fields in a HTTP header.\n\npub trait SetHttpHeaderField {\n\n /// Sets the value of the field named `name` in the HTTP header to `value`.\n\n fn set_http_header_field(&mut self, name: &str, value: &str) -> Result<()>;\n\n}\n\nimpl<S: BuildHasher> SetHttpHeade...
Rust
santa/src/before_level.rs
balbok0/abstreet
3af15fefdb2772c83864c08724318418da8190a9
use std::collections::{BTreeSet, HashSet}; use rand::seq::SliceRandom; use rand::SeedableRng; use rand_xorshift::XorShiftRng; use abstutil::prettyprint_usize; use geom::Time; use map_gui::load::MapLoader; use map_gui::tools::PopupMsg; use map_gui::ID; use map_model::BuildingID; use widgetry::{ ButtonBuilder, Color, ControlState, Drawable, EventCtx, GeomBatch, GfxCtx, HorizontalAlignment, Image, Key, Line, Outcome, Panel, RewriteColor, State, Text, TextExt, VerticalAlignment, Widget, }; use crate::buildings::{BldgState, Buildings}; use crate::game::Game; use crate::levels::Level; use crate::meters::{custom_bar, make_bar}; use crate::vehicles::Vehicle; use crate::{App, Transition}; const ZOOM: f64 = 2.0; pub struct Picker { vehicle_panel: Panel, instructions_panel: Panel, upzone_panel: Panel, level: Level, bldgs: Buildings, current_picks: BTreeSet<BuildingID>, draw_start: Drawable, } impl Picker { pub fn new(ctx: &mut EventCtx, app: &App, level: Level) -> Box<dyn State<App>> { MapLoader::new( ctx, app, level.map.clone(), Box::new(move |ctx, app| { app.session.music.change_song(&level.music); ctx.canvas.cam_zoom = ZOOM; let start = app .map .get_i(app.map.find_i_by_osm_id(level.start).unwrap()) .polygon .center(); ctx.canvas.center_on_map_pt(start); let bldgs = Buildings::new(ctx, app, HashSet::new()); let mut txt = Text::new(); txt.add_line(Line(format!("Ready for {}?", level.title)).small_heading()); txt.add_line(format!( "Goal: deliver {} presents", prettyprint_usize(level.goal) )); txt.add_line(format!("Time limit: {}", level.time_limit)); txt.add_appended(vec![ Line("Deliver presents to "), Line("single-family homes").fg(app.cs.residential_building), Line(" and "), Line("apartments").fg(app.session.colors.apartment), ]); txt.add_appended(vec![ Line("Raise your blood sugar by visiting "), Line("stores").fg(app.session.colors.store), ]); let instructions_panel = Panel::new(Widget::col(vec![ txt.into_widget(ctx), Widget::row(vec![ GeomBatch::load_svg_bytes( &ctx.prerender, widgetry::include_labeled_bytes!("../../widgetry/icons/arrow_keys.svg"), ) .into_widget(ctx), Text::from_all(vec![ Line("arrow keys").fg(ctx.style().text_hotkey_color), Line(" to move (or "), Line("WASD").fg(ctx.style().text_hotkey_color), Line(")"), ]) .into_widget(ctx), ]), Widget::row(vec![ Image::from_path("system/assets/tools/mouse.svg").into_widget(ctx), Text::from_all(vec![ Line("mouse scroll wheel or touchpad") .fg(ctx.style().text_hotkey_color), Line(" to zoom in or out"), ]) .into_widget(ctx), ]), Text::from_all(vec![ Line("Escape key").fg(ctx.style().text_hotkey_color), Line(" to pause"), ]) .into_widget(ctx), ])) .aligned(HorizontalAlignment::LeftInset, VerticalAlignment::TopInset) .build(ctx); let draw_start = map_gui::tools::start_marker(ctx, start, 3.0); let current_picks = app .session .upzones_per_level .get(level.title.clone()) .clone(); let upzone_panel = make_upzone_panel(ctx, app, current_picks.len()); Transition::Replace(Box::new(Picker { vehicle_panel: make_vehicle_panel(ctx, app), upzone_panel, instructions_panel, level, bldgs, current_picks, draw_start: ctx.upload(draw_start), })) }), ) } fn randomly_pick_upzones(&mut self, app: &App) { let mut choices = Vec::new(); for (b, state) in &self.bldgs.buildings { if let BldgState::Undelivered(_) = state { if !self.current_picks.contains(b) { choices.push(*b); } } } let mut rng = XorShiftRng::seed_from_u64(42); choices.shuffle(&mut rng); let n = app.session.upzones_unlocked - self.current_picks.len(); assert!(choices.len() >= n); self.current_picks.extend(choices.into_iter().take(n)); } } impl State<App> for Picker { fn event(&mut self, ctx: &mut EventCtx, app: &mut App) -> Transition { if app.session.upzones_unlocked > 0 && !app.session.upzones_explained { app.session.upzones_explained = true; return explain_upzoning(ctx); } ctx.canvas_movement(); if ctx.redo_mouseover() { app.current_selection = app.mouseover_unzoomed_buildings(ctx).filter(|id| { match self.bldgs.buildings[&id.as_building()] { BldgState::Undelivered(_) => true, _ => false, } }); } if let Some(ID::Building(b)) = app.current_selection { if ctx.normal_left_click() { if self.current_picks.contains(&b) { self.current_picks.remove(&b); } else if self.current_picks.len() < app.session.upzones_unlocked { self.current_picks.insert(b); } self.upzone_panel = make_upzone_panel(ctx, app, self.current_picks.len()); } } match self.upzone_panel.event(ctx) { Outcome::Clicked(x) => match x.as_ref() { "Start game" => { app.current_selection = None; app.session .upzones_per_level .set(self.level.title.clone(), self.current_picks.clone()); app.session.save(); return Transition::Replace(Game::new( ctx, app, self.level.clone(), Vehicle::get(&app.session.current_vehicle), self.current_picks.clone().into_iter().collect(), )); } "Randomly choose upzones" => { self.randomly_pick_upzones(app); self.upzone_panel = make_upzone_panel(ctx, app, self.current_picks.len()); } "Clear upzones" => { self.current_picks.clear(); self.upzone_panel = make_upzone_panel(ctx, app, self.current_picks.len()); } "help" => { return explain_upzoning(ctx); } _ => unreachable!(), }, _ => {} } match self.vehicle_panel.event(ctx) { Outcome::Clicked(x) => { app.session.current_vehicle = x; self.vehicle_panel = make_vehicle_panel(ctx, app); } _ => {} } app.session.update_music(ctx); Transition::Keep } fn draw(&self, g: &mut GfxCtx, app: &App) { self.vehicle_panel.draw(g); self.upzone_panel.draw(g); self.instructions_panel.draw(g); app.session.music.draw(g); g.redraw(&self.bldgs.draw_all); for b in &self.current_picks { g.draw_polygon(Color::PINK, app.map.get_b(*b).polygon.clone()); } if let Some(ID::Building(b)) = app.current_selection { g.draw_polygon(app.cs.selected, app.map.get_b(b).polygon.clone()); } g.redraw(&self.draw_start); } } fn make_vehicle_panel(ctx: &mut EventCtx, app: &App) -> Panel { let mut buttons = Vec::new(); for name in &app.session.vehicles_unlocked { let vehicle = Vehicle::get(name); let batch = vehicle .animate(ctx.prerender, Time::START_OF_DAY) .scale(10.0); buttons.push( if name == &app.session.current_vehicle { batch .into_widget(ctx) .container() .padding(5) .outline((2.0, Color::WHITE)) } else { let normal = batch.clone().color(RewriteColor::MakeGrayscale); let hovered = batch; ButtonBuilder::new() .custom_batch(normal, ControlState::Default) .custom_batch(hovered, ControlState::Hovered) .build_widget(ctx, name) } .centered_vert(), ); buttons.push(Widget::vert_separator(ctx, 150.0)); } buttons.pop(); let vehicle = Vehicle::get(&app.session.current_vehicle); let (max_speed, max_energy) = Vehicle::max_stats(); Panel::new(Widget::col(vec![ Line("Pick Santa's vehicle") .small_heading() .into_widget(ctx), Widget::row(buttons), Line(&vehicle.name).small_heading().into_widget(ctx), Widget::row(vec![ "Speed:".text_widget(ctx), custom_bar( ctx, app.session.colors.boost, vehicle.speed / max_speed, Text::new(), ) .align_right(), ]), Widget::row(vec![ "Energy:".text_widget(ctx), custom_bar( ctx, app.session.colors.energy, (vehicle.max_energy as f64) / (max_energy as f64), Text::new(), ) .align_right(), ]), ])) .aligned(HorizontalAlignment::RightInset, VerticalAlignment::TopInset) .build(ctx) } fn make_upzone_panel(ctx: &mut EventCtx, app: &App, num_picked: usize) -> Panel { if app.session.upzones_unlocked == 0 { return Panel::new( ctx.style() .btn_solid_primary .text("Start game") .hotkey(Key::Enter) .build_def(ctx) .container(), ) .aligned( HorizontalAlignment::RightInset, VerticalAlignment::BottomInset, ) .build(ctx); } Panel::new(Widget::col(vec![ Widget::row(vec![ Line("Upzoning").small_heading().into_widget(ctx), ctx.style() .btn_plain .icon("system/assets/tools/info.svg") .build_widget(ctx, "help") .align_right(), ]), Widget::row(vec![ Image::from_path("system/assets/tools/mouse.svg").into_widget(ctx), Line("Select the houses you want to turn into stores") .fg(ctx.style().text_hotkey_color) .into_widget(ctx), ]), Widget::row(vec![ "Upzones chosen:".text_widget(ctx), make_bar(ctx, Color::PINK, num_picked, app.session.upzones_unlocked), ]), Widget::row(vec![ ctx.style() .btn_outline .text("Randomly choose upzones") .disabled(num_picked == app.session.upzones_unlocked) .build_def(ctx), ctx.style() .btn_outline .text("Clear upzones") .disabled(num_picked == 0) .build_def(ctx) .align_right(), ]), if num_picked == app.session.upzones_unlocked { ctx.style() .btn_solid_primary .text("Start game") .hotkey(Key::Enter) .build_def(ctx) } else { ctx.style() .btn_solid_primary .text("Finish upzoning before playing") .disabled(true) .build_def(ctx) }, ])) .aligned( HorizontalAlignment::RightInset, VerticalAlignment::BottomInset, ) .build(ctx) } fn explain_upzoning(ctx: &mut EventCtx) -> Transition { Transition::Push(PopupMsg::new( ctx, "Upzoning power unlocked", vec![ "It's hard to deliver to houses far away from shops, isn't it?", "You've gained the power to change the zoning code for a residential building.", "You can now transform a single-family house into a multi-use building,", "with shops on the ground floor, and people living above.", "", "Where should you place the new store?", ], )) }
use std::collections::{BTreeSet, HashSet}; use rand::seq::SliceRandom; use rand::SeedableRng; use rand_xorshift::XorShiftRng; use abstutil::prettyprint_usize; use geom::Time; use map_gui::load::MapLoader; use map_gui::tools::PopupMsg; use map_gui::ID; use map_model::BuildingID; use widgetry::{ ButtonBuilder, Color, ControlState, Drawable, EventCtx, GeomBatch, GfxCtx, HorizontalAlignment, Image, Key, Line, Outcome, Panel, RewriteColor, State, Text, TextExt, VerticalAlignment, Widget, }; use crate::buildings::{BldgState, Buildings}; use crate::game::Game; use crate::levels::Level; use crate::meters::{custom_bar, make_bar}; use crate::vehicles::Vehicle; use crate::{App, Transition}; const ZOOM: f64 = 2.0; pub struct Picker { vehicle_panel: Panel, instructions_panel: Panel, upzone_panel: Panel, level: Level, bldgs: Buildings, current_picks: BTreeSet<BuildingID>, draw_start: Drawable, } impl Picker { pub fn new(ctx: &mut EventCtx, app: &App, level: Level) -> Box<dyn State<App>> { MapLoader::new( ctx, app, level.map.clone(), Box::new(move |ctx, app| { app.session.music.change_song(&level.music); ctx.canvas.cam_zoom = ZOOM; let start = app .map .get_i(app.map.find_i_by_osm_id(level.start).unwrap()) .polygon .center(); ctx.canvas.center_on_map_pt(start); let bldgs = Buildings::new(ctx, app, HashSet::new()); let mut txt = Text::new(); txt.add_line(Line(format!("Ready for {}?", level.title)).small_heading()); txt.add_line(format!( "Goal: deliver {} presents", prettyprint_usize(level.goal) )); txt.add_line(format!("Time limit: {}", level.time_limit)); txt.add_appended(vec![ Line("Deliver presents to "), Line("single-family homes").fg(app.cs.residential_building), Line(" and "), Line("apartments").fg(app.session.colors.apartment), ]); txt.add_appended(vec![ Line("Raise your blood sugar by visiting "), Line("stores").fg(app.session.colors.store), ]); let instructions_panel = Panel::new(Widget::col(vec![ txt.into_widget(ctx), Widget::row(vec![ GeomBatch::load_svg_bytes( &ctx.prerender, widgetry::include_labeled_bytes!("../../widgetry/icons/arrow_keys.svg"), ) .into_widget(ctx), Text::from_all(vec![ Line("arrow keys").fg(ctx.style().text_hotkey_color), Line(" to move (or "), Line("WASD").fg(ctx.style().text_hotkey_color), Line(")"), ]) .into_widget(ctx), ]), Widget::row(vec![ Image::from_path("system/assets/tools/mouse.svg").into_widget(ctx), Text::from_all(vec![ Line("mouse scroll wheel or touchpad") .fg(ctx.style().text_hotkey_color), Line(" to zoom in or out"), ]) .into_widget(ctx), ]), Text::from_all(vec![ Line("Escape key").fg(ctx.style().text_hotkey_color), Line(" to pause"), ]) .into_widget(ctx), ])) .aligned(HorizontalAlignment::LeftInset, VerticalAlignment::TopInset) .build(ctx); let draw_start = map_gui::tools::start_marker(ctx, start, 3.0); let current_picks = app .session .upzones_per_level .get(level.title.clone()) .clone(); let upzone_panel = make_upzone_panel(ctx, app, current_picks.len()); Transition::Replace(Box::new(Picker { vehicle_panel: make_vehicle_panel(ctx, ap
fn randomly_pick_upzones(&mut self, app: &App) { let mut choices = Vec::new(); for (b, state) in &self.bldgs.buildings { if let BldgState::Undelivered(_) = state { if !self.current_picks.contains(b) { choices.push(*b); } } } let mut rng = XorShiftRng::seed_from_u64(42); choices.shuffle(&mut rng); let n = app.session.upzones_unlocked - self.current_picks.len(); assert!(choices.len() >= n); self.current_picks.extend(choices.into_iter().take(n)); } } impl State<App> for Picker { fn event(&mut self, ctx: &mut EventCtx, app: &mut App) -> Transition { if app.session.upzones_unlocked > 0 && !app.session.upzones_explained { app.session.upzones_explained = true; return explain_upzoning(ctx); } ctx.canvas_movement(); if ctx.redo_mouseover() { app.current_selection = app.mouseover_unzoomed_buildings(ctx).filter(|id| { match self.bldgs.buildings[&id.as_building()] { BldgState::Undelivered(_) => true, _ => false, } }); } if let Some(ID::Building(b)) = app.current_selection { if ctx.normal_left_click() { if self.current_picks.contains(&b) { self.current_picks.remove(&b); } else if self.current_picks.len() < app.session.upzones_unlocked { self.current_picks.insert(b); } self.upzone_panel = make_upzone_panel(ctx, app, self.current_picks.len()); } } match self.upzone_panel.event(ctx) { Outcome::Clicked(x) => match x.as_ref() { "Start game" => { app.current_selection = None; app.session .upzones_per_level .set(self.level.title.clone(), self.current_picks.clone()); app.session.save(); return Transition::Replace(Game::new( ctx, app, self.level.clone(), Vehicle::get(&app.session.current_vehicle), self.current_picks.clone().into_iter().collect(), )); } "Randomly choose upzones" => { self.randomly_pick_upzones(app); self.upzone_panel = make_upzone_panel(ctx, app, self.current_picks.len()); } "Clear upzones" => { self.current_picks.clear(); self.upzone_panel = make_upzone_panel(ctx, app, self.current_picks.len()); } "help" => { return explain_upzoning(ctx); } _ => unreachable!(), }, _ => {} } match self.vehicle_panel.event(ctx) { Outcome::Clicked(x) => { app.session.current_vehicle = x; self.vehicle_panel = make_vehicle_panel(ctx, app); } _ => {} } app.session.update_music(ctx); Transition::Keep } fn draw(&self, g: &mut GfxCtx, app: &App) { self.vehicle_panel.draw(g); self.upzone_panel.draw(g); self.instructions_panel.draw(g); app.session.music.draw(g); g.redraw(&self.bldgs.draw_all); for b in &self.current_picks { g.draw_polygon(Color::PINK, app.map.get_b(*b).polygon.clone()); } if let Some(ID::Building(b)) = app.current_selection { g.draw_polygon(app.cs.selected, app.map.get_b(b).polygon.clone()); } g.redraw(&self.draw_start); } } fn make_vehicle_panel(ctx: &mut EventCtx, app: &App) -> Panel { let mut buttons = Vec::new(); for name in &app.session.vehicles_unlocked { let vehicle = Vehicle::get(name); let batch = vehicle .animate(ctx.prerender, Time::START_OF_DAY) .scale(10.0); buttons.push( if name == &app.session.current_vehicle { batch .into_widget(ctx) .container() .padding(5) .outline((2.0, Color::WHITE)) } else { let normal = batch.clone().color(RewriteColor::MakeGrayscale); let hovered = batch; ButtonBuilder::new() .custom_batch(normal, ControlState::Default) .custom_batch(hovered, ControlState::Hovered) .build_widget(ctx, name) } .centered_vert(), ); buttons.push(Widget::vert_separator(ctx, 150.0)); } buttons.pop(); let vehicle = Vehicle::get(&app.session.current_vehicle); let (max_speed, max_energy) = Vehicle::max_stats(); Panel::new(Widget::col(vec![ Line("Pick Santa's vehicle") .small_heading() .into_widget(ctx), Widget::row(buttons), Line(&vehicle.name).small_heading().into_widget(ctx), Widget::row(vec![ "Speed:".text_widget(ctx), custom_bar( ctx, app.session.colors.boost, vehicle.speed / max_speed, Text::new(), ) .align_right(), ]), Widget::row(vec![ "Energy:".text_widget(ctx), custom_bar( ctx, app.session.colors.energy, (vehicle.max_energy as f64) / (max_energy as f64), Text::new(), ) .align_right(), ]), ])) .aligned(HorizontalAlignment::RightInset, VerticalAlignment::TopInset) .build(ctx) } fn make_upzone_panel(ctx: &mut EventCtx, app: &App, num_picked: usize) -> Panel { if app.session.upzones_unlocked == 0 { return Panel::new( ctx.style() .btn_solid_primary .text("Start game") .hotkey(Key::Enter) .build_def(ctx) .container(), ) .aligned( HorizontalAlignment::RightInset, VerticalAlignment::BottomInset, ) .build(ctx); } Panel::new(Widget::col(vec![ Widget::row(vec![ Line("Upzoning").small_heading().into_widget(ctx), ctx.style() .btn_plain .icon("system/assets/tools/info.svg") .build_widget(ctx, "help") .align_right(), ]), Widget::row(vec![ Image::from_path("system/assets/tools/mouse.svg").into_widget(ctx), Line("Select the houses you want to turn into stores") .fg(ctx.style().text_hotkey_color) .into_widget(ctx), ]), Widget::row(vec![ "Upzones chosen:".text_widget(ctx), make_bar(ctx, Color::PINK, num_picked, app.session.upzones_unlocked), ]), Widget::row(vec![ ctx.style() .btn_outline .text("Randomly choose upzones") .disabled(num_picked == app.session.upzones_unlocked) .build_def(ctx), ctx.style() .btn_outline .text("Clear upzones") .disabled(num_picked == 0) .build_def(ctx) .align_right(), ]), if num_picked == app.session.upzones_unlocked { ctx.style() .btn_solid_primary .text("Start game") .hotkey(Key::Enter) .build_def(ctx) } else { ctx.style() .btn_solid_primary .text("Finish upzoning before playing") .disabled(true) .build_def(ctx) }, ])) .aligned( HorizontalAlignment::RightInset, VerticalAlignment::BottomInset, ) .build(ctx) } fn explain_upzoning(ctx: &mut EventCtx) -> Transition { Transition::Push(PopupMsg::new( ctx, "Upzoning power unlocked", vec![ "It's hard to deliver to houses far away from shops, isn't it?", "You've gained the power to change the zoning code for a residential building.", "You can now transform a single-family house into a multi-use building,", "with shops on the ground floor, and people living above.", "", "Where should you place the new store?", ], )) }
p), upzone_panel, instructions_panel, level, bldgs, current_picks, draw_start: ctx.upload(draw_start), })) }), ) }
function_block-function_prefixed
[ { "content": "pub fn custom_bar(ctx: &mut EventCtx, filled_color: Color, pct_full: f64, txt: Text) -> Widget {\n\n let total_width = 300.0;\n\n let height = 32.0;\n\n let radius = 4.0;\n\n\n\n let mut batch = GeomBatch::new();\n\n // Background\n\n batch.push(\n\n Color::hex(\"#666666\"...
Rust
src/ast/expr.rs
1tgr/simplejit-demo
750a1f628452d42836d0da5fc415fcef7750c045
use crate::ast::{ArithmeticKind, ComparisonKind, EnvId, IdentId}; use derive_more::{Display, TryInto}; #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, Display)] #[display(fmt = "{}", "_0")] pub struct ExprId(salsa::InternId); impl salsa::InternKey for ExprId { fn from_intern_id(v: salsa::InternId) -> Self { Self(v) } fn as_intern_id(&self) -> salsa::InternId { self.0 } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct Arithmetic { pub lhs: ExprId, pub op: ArithmeticKind, pub rhs: ExprId, } impl Arithmetic { pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> { let Self { lhs, op: _, rhs } = self; visitor.visit_expr(lhs)?; visitor.visit_expr(rhs)?; Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> { self.lhs = transform.transform_expr(self.lhs)?; self.rhs = transform.transform_expr(self.rhs)?; Ok(Expr::Arithmetic(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct Assign { pub lvalue: ExprId, pub expr: ExprId, } impl Assign { pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> { let Self { lvalue, expr } = self; visitor.visit_expr(lvalue)?; visitor.visit_expr(expr)?; Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> { self.lvalue = transform.transform_expr(self.lvalue)?; self.expr = transform.transform_expr(self.expr)?; Ok(Expr::Assign(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct Block { pub stmts: Vec<ExprId>, } impl Block { pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> { let Self { stmts } = self; for expr in stmts { visitor.visit_expr(expr)?; } Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> { for stmt in self.stmts.iter_mut() { *stmt = transform.transform_expr(*stmt)?; } Ok(Expr::Block(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct Call { pub env: Option<EnvId>, pub name: IdentId, pub args: Vec<ExprId>, } impl Call { pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> { let Self { env: _, name: _, args } = self; for expr in args { visitor.visit_expr(expr)?; } Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> { for expr in self.args.iter_mut() { *expr = transform.transform_expr(*expr)?; } Ok(Expr::Call(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct Comparison { pub lhs: ExprId, pub op: ComparisonKind, pub rhs: ExprId, } impl Comparison { pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> { let Self { lhs, op: _, rhs } = self; visitor.visit_expr(lhs)?; visitor.visit_expr(rhs)?; Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> { self.lhs = transform.transform_expr(self.lhs)?; self.rhs = transform.transform_expr(self.rhs)?; Ok(Expr::Comparison(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct Deref { pub expr: ExprId, } impl Deref { pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> { let Self { expr } = self; visitor.visit_expr(expr)?; Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> { self.expr = transform.transform_expr(self.expr)?; Ok(Expr::Deref(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct Dot { pub expr: ExprId, pub field_name: IdentId, } impl Dot { pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> { let Self { expr, field_name: _ } = self; visitor.visit_expr(expr)?; Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> { self.expr = transform.transform_expr(self.expr)?; Ok(Expr::Dot(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct GlobalDataAddr { pub name: IdentId, } impl GlobalDataAddr { pub fn walk<V: ExprVisitor + ?Sized>(self, _visitor: &mut V) -> Result<(), V::Error> { let Self { name: _name } = self; Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(self, _transform: &mut T) -> Result<Expr, T::Error> { Ok(Expr::GlobalDataAddr(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct Identifier { pub env: Option<EnvId>, pub name: IdentId, } impl Identifier { pub fn walk<V: ExprVisitor + ?Sized>(self, _visitor: &mut V) -> Result<(), V::Error> { Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(self, _transform: &mut T) -> Result<Expr, T::Error> { Ok(Expr::Identifier(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct IfElse { pub condition: ExprId, pub then_body: ExprId, pub else_body: ExprId, } impl IfElse { pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> { let Self { condition, then_body, else_body } = self; visitor.visit_expr(condition)?; visitor.visit_expr(then_body)?; visitor.visit_expr(else_body)?; Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> { self.condition = transform.transform_expr(self.condition)?; self.then_body = transform.transform_expr(self.then_body)?; self.else_body = transform.transform_expr(self.else_body)?; Ok(Expr::IfElse(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct Index { pub base: ExprId, pub offset: ExprId, } impl Index { pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> { let Self { base, offset } = self; visitor.visit_expr(base)?; visitor.visit_expr(offset)?; Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> { self.base = transform.transform_expr(self.base)?; self.offset = transform.transform_expr(self.offset)?; Ok(Expr::Index(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct Literal { pub value: i32, } impl Literal { pub fn walk<V: ExprVisitor + ?Sized>(self, _visitor: &mut V) -> Result<(), V::Error> { let Self { value: _value } = self; Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(self, _transform: &mut T) -> Result<Expr, T::Error> { Ok(Expr::Literal(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct Scope { pub scope_env: EnvId, pub decl_name: IdentId, pub decl_expr: ExprId, pub body: ExprId, } impl Scope { pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> { let Self { scope_env: _, decl_name: _, decl_expr, body, } = self; visitor.visit_expr(decl_expr)?; visitor.visit_expr(body)?; Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> { self.decl_expr = transform.transform_expr(self.decl_expr)?; self.body = transform.transform_expr(self.body)?; Ok(Expr::Scope(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct StructInit { pub name: IdentId, pub fields: im_rc::HashMap<IdentId, ExprId>, } impl StructInit { pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> { let Self { name: _, fields } = self; for &expr in fields.values() { visitor.visit_expr(expr)?; } Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> { for (_, expr_mut) in self.fields.iter_mut() { *expr_mut = transform.transform_expr(*expr_mut)?; } Ok(Expr::StructInit(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct WhileLoop { pub condition: ExprId, pub body: ExprId, } impl WhileLoop { pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> { let Self { condition, body } = self; visitor.visit_expr(condition)?; visitor.visit_expr(body)?; Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> { self.condition = transform.transform_expr(self.condition)?; self.body = transform.transform_expr(self.body)?; Ok(Expr::WhileLoop(self)) } } macro_rules! expr_enum { ( $( [ $ty:ident, $visit:ident, $transform:ident, $map:ident ] ),* ) => { #[derive(Clone, Debug, Hash, PartialEq, Eq, TryInto)] #[try_into(owned, ref, ref_mut)] pub enum Expr { $( $ty($ty), )* } impl Expr { pub fn walk<V: ExprVisitor + ?Sized>(self, expr_id: ExprId, visitor: &mut V) -> Result<(), V::Error> { match self { $( Self::$ty(expr) => visitor.$visit(expr_id, expr), )* } } pub fn transform<T: ExprTransform + ?Sized>(self, expr_id: ExprId, transform: &mut T) -> Result<Self, T::Error> { match self { $( Self::$ty(expr) => transform.$transform(expr_id, expr), )* } } pub fn map<M: ExprMap + ?Sized>(self, map: &mut M, expr_id: ExprId) -> M::Value { match self { $( Self::$ty(expr) => map.$map(expr_id, expr), )* } } } pub trait ExprVisitor { type Error; fn lookup_expr(&self, expr: ExprId) -> Expr; $( #[allow(unused_variables)] fn $visit(&mut self, expr_id: ExprId, expr: $ty) -> Result<(), Self::Error> { expr.walk(self) } )* fn visit_expr(&mut self, expr: ExprId) -> Result<(), Self::Error> { self.lookup_expr(expr).walk(expr, self) } } pub trait ExprTransform { type Error; fn lookup_expr(&self, expr: ExprId) -> Expr; fn intern_expr(&self, expr: Expr) -> ExprId; $( #[allow(unused_variables)] fn $transform(&mut self, expr_id: ExprId, expr: $ty) -> Result<Expr, Self::Error> { expr.transform(self) } )* fn transform_expr(&mut self, expr: ExprId) -> Result<ExprId, Self::Error> { self.lookup_expr(expr).transform(expr, self).map(|expr| self.intern_expr(expr)) } } pub trait ExprMap { type Value; fn lookup_expr(&self, expr: ExprId) -> Expr; $( fn $map(&mut self, expr_id: ExprId, expr: $ty) -> Self::Value; )* fn map_expr(&mut self, expr: ExprId) -> Self::Value { self.lookup_expr(expr).map(self, expr) } } }; } expr_enum! { [ Arithmetic, visit_arithmetic, transform_arithmetic, map_arithmetic ], [ Assign, visit_assign, transform_assign, map_assign ], [ Block, visit_block, transform_block, map_block ], [ Call, visit_call, transform_call, map_call ], [ Comparison, visit_comparison, transform_comparison, map_comparison ], [ Deref, visit_deref, transform_deref, map_deref ], [ Dot, visit_dot, transform_dot, map_dot ], [ GlobalDataAddr, visit_global_data_addr, transform_global_data_addr, map_global_data_addr ], [ Identifier, visit_identifier, transform_identifier, map_identifier ], [ IfElse, visit_if_else, transform_if_else, map_if_else ], [ Index, visit_index, transform_index, map_index ], [ Literal, visit_literal, transform_literal, map_literal ], [ Scope, visit_scope, transform_scope, map_scope ], [ StructInit, visit_struct_init, transform_struct_init, map_struct_init ], [ WhileLoop, visit_while_loop, transform_while_loop, map_while_loop ] }
use crate::ast::{ArithmeticKind, ComparisonKind, EnvId, IdentId}; use derive_more::{Display, TryInto}; #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, Display)] #[display(fmt = "{}", "_0")] pub struct ExprId(salsa::InternId); impl salsa::InternKey for ExprId { fn from_intern_id(v: salsa::InternId) -> Self { Self(v) } fn as_intern_id(&self) -> salsa::InternId { self.0 } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct Arithmetic { pub lhs: ExprId, pub op: ArithmeticKind, pub rhs: ExprId, } impl Arithmetic { pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> { let Self { lhs, op: _, rhs } = self; visitor.visit_expr(lhs)?; visitor.visit_expr(rhs)?; Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> { self.lhs = transform.transform_expr(self.lhs)?; self.rhs = transform.transform_expr(self.rhs)?; Ok(Expr::Arithmetic(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct Assign { pub lvalue: ExprId, pub expr: ExprId, } impl Assign { pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> { let Self { lvalue, expr } = self; visitor.visit_expr(lvalue)?; visitor.visit_expr(expr)?; Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> { self.lvalue = transform.transform_expr(self.lvalue)?; self.expr = transform.transform_expr(self.expr)?; Ok(Expr::Assign(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct Block { pub stmts: Vec<ExprId>, } impl Block { pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> { let Self { stmts } = self; for expr in stmts { visitor.visit_expr(expr)?; } Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> { for stmt in self.stmts.iter_mut() { *stmt = transform.transform_expr(*stmt)?; } Ok(Expr::Block(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct Call { pub env: Option<EnvId>, pub name: IdentId, pub args: Vec<ExprId>, } impl Call { pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> { let Self { env: _, name: _, args } = self; for expr in args { visitor.visit_expr(expr)?; } Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> { for expr in self.args.iter_mut() { *expr = transform.transform_expr(*expr)?; } Ok(Expr::Call(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct Comparison { pub lhs: ExprId, pub op: ComparisonKind, pub rhs: ExprId, } impl Comparison { pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> { let Self { lhs, op: _, rhs } = self; visitor.visit_expr(lhs)?; visitor.visit_expr(rhs)?; Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> { self.lhs = transform.transform_expr(self.lhs)?; self.rhs = transform.transform_expr(self.rhs)?; Ok(Expr::Comparison(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct Deref { pub expr: ExprId, } impl Deref { pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> { let Self { expr } = self; visitor.visit_expr(expr)?; Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> { self.expr = transform.transform_expr(self.expr)?; Ok(Expr::Deref(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct Dot { pub expr: ExprId, pub field_name: IdentId, } impl Dot { pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> { let Self { expr, field_name: _ } = self; visitor.visit_expr(expr)?; Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> { self.expr = transform.transform_expr(self.expr)?; Ok(Expr::Dot(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct GlobalDataAddr { pub name: IdentId, } impl GlobalDataAddr { pub fn walk<V: ExprVisitor + ?Sized>(self, _visitor: &mut V) -> Result<(), V::Error> { let Self { name: _name } = self; Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(self, _transform: &mut T) -> Result<Expr, T::Error> { Ok(Expr::GlobalDataAddr(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct Identifier { pub env: Option<EnvId>, pub name: IdentId, } impl Identifier { pub fn walk<V: ExprVisitor + ?Sized>(self, _visitor: &mut V) -> Result<(
pub enum Expr { $( $ty($ty), )* } impl Expr { pub fn walk<V: ExprVisitor + ?Sized>(self, expr_id: ExprId, visitor: &mut V) -> Result<(), V::Error> { match self { $( Self::$ty(expr) => visitor.$visit(expr_id, expr), )* } } pub fn transform<T: ExprTransform + ?Sized>(self, expr_id: ExprId, transform: &mut T) -> Result<Self, T::Error> { match self { $( Self::$ty(expr) => transform.$transform(expr_id, expr), )* } } pub fn map<M: ExprMap + ?Sized>(self, map: &mut M, expr_id: ExprId) -> M::Value { match self { $( Self::$ty(expr) => map.$map(expr_id, expr), )* } } } pub trait ExprVisitor { type Error; fn lookup_expr(&self, expr: ExprId) -> Expr; $( #[allow(unused_variables)] fn $visit(&mut self, expr_id: ExprId, expr: $ty) -> Result<(), Self::Error> { expr.walk(self) } )* fn visit_expr(&mut self, expr: ExprId) -> Result<(), Self::Error> { self.lookup_expr(expr).walk(expr, self) } } pub trait ExprTransform { type Error; fn lookup_expr(&self, expr: ExprId) -> Expr; fn intern_expr(&self, expr: Expr) -> ExprId; $( #[allow(unused_variables)] fn $transform(&mut self, expr_id: ExprId, expr: $ty) -> Result<Expr, Self::Error> { expr.transform(self) } )* fn transform_expr(&mut self, expr: ExprId) -> Result<ExprId, Self::Error> { self.lookup_expr(expr).transform(expr, self).map(|expr| self.intern_expr(expr)) } } pub trait ExprMap { type Value; fn lookup_expr(&self, expr: ExprId) -> Expr; $( fn $map(&mut self, expr_id: ExprId, expr: $ty) -> Self::Value; )* fn map_expr(&mut self, expr: ExprId) -> Self::Value { self.lookup_expr(expr).map(self, expr) } } }; } expr_enum! { [ Arithmetic, visit_arithmetic, transform_arithmetic, map_arithmetic ], [ Assign, visit_assign, transform_assign, map_assign ], [ Block, visit_block, transform_block, map_block ], [ Call, visit_call, transform_call, map_call ], [ Comparison, visit_comparison, transform_comparison, map_comparison ], [ Deref, visit_deref, transform_deref, map_deref ], [ Dot, visit_dot, transform_dot, map_dot ], [ GlobalDataAddr, visit_global_data_addr, transform_global_data_addr, map_global_data_addr ], [ Identifier, visit_identifier, transform_identifier, map_identifier ], [ IfElse, visit_if_else, transform_if_else, map_if_else ], [ Index, visit_index, transform_index, map_index ], [ Literal, visit_literal, transform_literal, map_literal ], [ Scope, visit_scope, transform_scope, map_scope ], [ StructInit, visit_struct_init, transform_struct_init, map_struct_init ], [ WhileLoop, visit_while_loop, transform_while_loop, map_while_loop ] }
), V::Error> { Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(self, _transform: &mut T) -> Result<Expr, T::Error> { Ok(Expr::Identifier(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct IfElse { pub condition: ExprId, pub then_body: ExprId, pub else_body: ExprId, } impl IfElse { pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> { let Self { condition, then_body, else_body } = self; visitor.visit_expr(condition)?; visitor.visit_expr(then_body)?; visitor.visit_expr(else_body)?; Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> { self.condition = transform.transform_expr(self.condition)?; self.then_body = transform.transform_expr(self.then_body)?; self.else_body = transform.transform_expr(self.else_body)?; Ok(Expr::IfElse(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct Index { pub base: ExprId, pub offset: ExprId, } impl Index { pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> { let Self { base, offset } = self; visitor.visit_expr(base)?; visitor.visit_expr(offset)?; Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> { self.base = transform.transform_expr(self.base)?; self.offset = transform.transform_expr(self.offset)?; Ok(Expr::Index(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct Literal { pub value: i32, } impl Literal { pub fn walk<V: ExprVisitor + ?Sized>(self, _visitor: &mut V) -> Result<(), V::Error> { let Self { value: _value } = self; Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(self, _transform: &mut T) -> Result<Expr, T::Error> { Ok(Expr::Literal(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct Scope { pub scope_env: EnvId, pub decl_name: IdentId, pub decl_expr: ExprId, pub body: ExprId, } impl Scope { pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> { let Self { scope_env: _, decl_name: _, decl_expr, body, } = self; visitor.visit_expr(decl_expr)?; visitor.visit_expr(body)?; Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> { self.decl_expr = transform.transform_expr(self.decl_expr)?; self.body = transform.transform_expr(self.body)?; Ok(Expr::Scope(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct StructInit { pub name: IdentId, pub fields: im_rc::HashMap<IdentId, ExprId>, } impl StructInit { pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> { let Self { name: _, fields } = self; for &expr in fields.values() { visitor.visit_expr(expr)?; } Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> { for (_, expr_mut) in self.fields.iter_mut() { *expr_mut = transform.transform_expr(*expr_mut)?; } Ok(Expr::StructInit(self)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct WhileLoop { pub condition: ExprId, pub body: ExprId, } impl WhileLoop { pub fn walk<V: ExprVisitor + ?Sized>(self, visitor: &mut V) -> Result<(), V::Error> { let Self { condition, body } = self; visitor.visit_expr(condition)?; visitor.visit_expr(body)?; Ok(()) } pub fn transform<T: ExprTransform + ?Sized>(mut self, transform: &mut T) -> Result<Expr, T::Error> { self.condition = transform.transform_expr(self.condition)?; self.body = transform.transform_expr(self.body)?; Ok(Expr::WhileLoop(self)) } } macro_rules! expr_enum { ( $( [ $ty:ident, $visit:ident, $transform:ident, $map:ident ] ),* ) => { #[derive(Clone, Debug, Hash, PartialEq, Eq, TryInto)] #[try_into(owned, ref, ref_mut)]
random
[ { "content": "fn lower_function(db: &dyn Lower, name: IdentId) -> Result<(Rc<HashMap<EnvId, Env>>, ExprId)> {\n\n let mut envs = HashMap::new();\n\n let global_env = db.global_env()?;\n\n envs.insert(EnvId::GLOBAL, global_env.clone());\n\n\n\n let mut index = 2;\n\n let env = EnvId::from(NonZeroU...
Rust
07-rust/stm32f446/stm32f446_pac/src/otg_hs_global/otg_hs_gahbcfg.rs
aaronhktan/stm32-exploration
dcd7674424cd17b02b85c6b3ce533456d5037d65
#[doc = "Reader of register OTG_HS_GAHBCFG"] pub type R = crate::R<u32, super::OTG_HS_GAHBCFG>; #[doc = "Writer for register OTG_HS_GAHBCFG"] pub type W = crate::W<u32, super::OTG_HS_GAHBCFG>; #[doc = "Register OTG_HS_GAHBCFG `reset()`'s with value 0"] impl crate::ResetValue for super::OTG_HS_GAHBCFG { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `GINT`"] pub type GINT_R = crate::R<bool, bool>; #[doc = "Write proxy for field `GINT`"] pub struct GINT_W<'a> { w: &'a mut W, } impl<'a> GINT_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Reader of field `HBSTLEN`"] pub type HBSTLEN_R = crate::R<u8, u8>; #[doc = "Write proxy for field `HBSTLEN`"] pub struct HBSTLEN_W<'a> { w: &'a mut W, } impl<'a> HBSTLEN_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0f << 1)) | (((value as u32) & 0x0f) << 1); self.w } } #[doc = "Reader of field `DMAEN`"] pub type DMAEN_R = crate::R<bool, bool>; #[doc = "Write proxy for field `DMAEN`"] pub struct DMAEN_W<'a> { w: &'a mut W, } impl<'a> DMAEN_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5); self.w } } #[doc = "Reader of field `TXFELVL`"] pub type TXFELVL_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TXFELVL`"] pub struct TXFELVL_W<'a> { w: &'a mut W, } impl<'a> TXFELVL_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7); self.w } } #[doc = "Reader of field `PTXFELVL`"] pub type PTXFELVL_R = crate::R<bool, bool>; #[doc = "Write proxy for field `PTXFELVL`"] pub struct PTXFELVL_W<'a> { w: &'a mut W, } impl<'a> PTXFELVL_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8); self.w } } impl R { #[doc = "Bit 0 - Global interrupt mask"] #[inline(always)] pub fn gint(&self) -> GINT_R { GINT_R::new((self.bits & 0x01) != 0) } #[doc = "Bits 1:4 - Burst length/type"] #[inline(always)] pub fn hbstlen(&self) -> HBSTLEN_R { HBSTLEN_R::new(((self.bits >> 1) & 0x0f) as u8) } #[doc = "Bit 5 - DMA enable"] #[inline(always)] pub fn dmaen(&self) -> DMAEN_R { DMAEN_R::new(((self.bits >> 5) & 0x01) != 0) } #[doc = "Bit 7 - TxFIFO empty level"] #[inline(always)] pub fn txfelvl(&self) -> TXFELVL_R { TXFELVL_R::new(((self.bits >> 7) & 0x01) != 0) } #[doc = "Bit 8 - Periodic TxFIFO empty level"] #[inline(always)] pub fn ptxfelvl(&self) -> PTXFELVL_R { PTXFELVL_R::new(((self.bits >> 8) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - Global interrupt mask"] #[inline(always)] pub fn gint(&mut self) -> GINT_W { GINT_W { w: self } } #[doc = "Bits 1:4 - Burst length/type"] #[inline(always)] pub fn hbstlen(&mut self) -> HBSTLEN_W { HBSTLEN_W { w: self } } #[doc = "Bit 5 - DMA enable"] #[inline(always)] pub fn dmaen(&mut self) -> DMAEN_W { DMAEN_W { w: self } } #[doc = "Bit 7 - TxFIFO empty level"] #[inline(always)] pub fn txfelvl(&mut self) -> TXFELVL_W { TXFELVL_W { w: self } } #[doc = "Bit 8 - Periodic TxFIFO empty level"] #[inline(always)] pub fn ptxfelvl(&mut self) -> PTXFELVL_W { PTXFELVL_W { w: self } } }
#[doc = "Reader of register OTG_HS_GAHBCFG"] pub type R = crate::R<u32, super::OTG_HS_GAHBCFG>; #[doc = "Writer for register OTG_HS_GAHBCFG"] pub type W = crate::W<u32, super::OTG_HS_GAHBCFG>; #[doc = "Register OTG_HS_GAHBCFG `reset()`'s with value 0"] impl crate::ResetValue for super::OTG_HS_GAHBCFG { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `GINT`"] pub type GINT_R = crate::R<bool, bool>; #[doc = "Write proxy for field `GINT`"] pub struct GINT_W<'a> { w: &'a mut W, } impl<'a> GINT_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r
HBSTLEN_R = crate::R<u8, u8>; #[doc = "Write proxy for field `HBSTLEN`"] pub struct HBSTLEN_W<'a> { w: &'a mut W, } impl<'a> HBSTLEN_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0f << 1)) | (((value as u32) & 0x0f) << 1); self.w } } #[doc = "Reader of field `DMAEN`"] pub type DMAEN_R = crate::R<bool, bool>; #[doc = "Write proxy for field `DMAEN`"] pub struct DMAEN_W<'a> { w: &'a mut W, } impl<'a> DMAEN_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5); self.w } } #[doc = "Reader of field `TXFELVL`"] pub type TXFELVL_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TXFELVL`"] pub struct TXFELVL_W<'a> { w: &'a mut W, } impl<'a> TXFELVL_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7); self.w } } #[doc = "Reader of field `PTXFELVL`"] pub type PTXFELVL_R = crate::R<bool, bool>; #[doc = "Write proxy for field `PTXFELVL`"] pub struct PTXFELVL_W<'a> { w: &'a mut W, } impl<'a> PTXFELVL_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8); self.w } } impl R { #[doc = "Bit 0 - Global interrupt mask"] #[inline(always)] pub fn gint(&self) -> GINT_R { GINT_R::new((self.bits & 0x01) != 0) } #[doc = "Bits 1:4 - Burst length/type"] #[inline(always)] pub fn hbstlen(&self) -> HBSTLEN_R { HBSTLEN_R::new(((self.bits >> 1) & 0x0f) as u8) } #[doc = "Bit 5 - DMA enable"] #[inline(always)] pub fn dmaen(&self) -> DMAEN_R { DMAEN_R::new(((self.bits >> 5) & 0x01) != 0) } #[doc = "Bit 7 - TxFIFO empty level"] #[inline(always)] pub fn txfelvl(&self) -> TXFELVL_R { TXFELVL_R::new(((self.bits >> 7) & 0x01) != 0) } #[doc = "Bit 8 - Periodic TxFIFO empty level"] #[inline(always)] pub fn ptxfelvl(&self) -> PTXFELVL_R { PTXFELVL_R::new(((self.bits >> 8) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - Global interrupt mask"] #[inline(always)] pub fn gint(&mut self) -> GINT_W { GINT_W { w: self } } #[doc = "Bits 1:4 - Burst length/type"] #[inline(always)] pub fn hbstlen(&mut self) -> HBSTLEN_W { HBSTLEN_W { w: self } } #[doc = "Bit 5 - DMA enable"] #[inline(always)] pub fn dmaen(&mut self) -> DMAEN_W { DMAEN_W { w: self } } #[doc = "Bit 7 - TxFIFO empty level"] #[inline(always)] pub fn txfelvl(&mut self) -> TXFELVL_W { TXFELVL_W { w: self } } #[doc = "Bit 8 - Periodic TxFIFO empty level"] #[inline(always)] pub fn ptxfelvl(&mut self) -> PTXFELVL_W { PTXFELVL_W { w: self } } }
"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Reader of field `HBSTLEN`"] pub type
random
[ { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Res...
Rust
pkg-core/rate-core/src/actors/app_bind/actor/assets.rs
transparencies/rillrate
a1a6f76e84211224a85bb9fd92602d33f095229e
use super::AppBind; use crate::assets::Assets; use anyhow::Error; use async_trait::async_trait; use meio::{Context, IdOf, LiteTask, Scheduled, TaskEliminated, TaskError}; use reqwest::Url; use std::path::{Path, PathBuf}; use std::time::{Duration, Instant}; use tokio::fs::File; use tokio::io::AsyncReadExt; impl AppBind { pub(super) async fn init_assets(&mut self, ctx: &mut Context<Self>) -> Result<(), Error> { let path = self .options .env_var .and_then(|env_var| std::env::var(env_var).ok()); if let Some(path) = path { if path.starts_with("http") { log::info!("Assets: env-url."); let url: Url = path.parse()?; ctx.spawn_task(FetchUiPack(url), (), ()); } else { log::info!("Assets: env-path."); self.assets = self.read_assets(&path).await?; log::warn!("Assets overriden to: {}", path); } } else if let Some(data) = self.options.embedded.as_ref() { log::info!("Assets: embedded."); let assets = Assets::parse(data)?; self.assets = AssetsMode::Packed(assets); log::info!("Embedded assets used."); } else if let Some(url) = self.options.url.clone() { log::info!("Assets: url."); ctx.spawn_task(FetchUiPack(url), (), ()); } Ok(()) } async fn read_assets(&mut self, path: &str) -> Result<AssetsMode, Error> { let asset_path = Path::new(path).to_path_buf(); if asset_path.exists() { let metadata = tokio::fs::metadata(&asset_path).await?; if metadata.is_dir() { Ok(AssetsMode::Local(asset_path)) } else { let data = read_file(&asset_path).await?; let assets = Assets::parse(&data)?; Ok(AssetsMode::Packed(assets)) } } else { Err(Error::msg(format!("Can't load assets from {}", path))) } } } pub async fn read_file(path: &Path) -> Result<Vec<u8>, Error> { let mut file = File::open(path).await?; let mut content = Vec::new(); file.read_to_end(&mut content).await?; Ok(content) } pub enum AssetsMode { Loading, Local(PathBuf), Packed(Assets), Failed(String), } pub struct FetchUiPack(Url); #[async_trait] impl LiteTask for FetchUiPack { type Output = Assets; async fn interruptable_routine(mut self) -> Result<Self::Output, Error> { log::info!("Fetching UI assets..."); let bytes = reqwest::get(self.0) .await? .error_for_status()? .bytes() .await?; let assets = Assets::parse(&bytes)?; Ok(assets) } } #[async_trait] impl TaskEliminated<FetchUiPack, ()> for AppBind { async fn handle( &mut self, _id: IdOf<FetchUiPack>, _tag: (), result: Result<Assets, TaskError>, ctx: &mut Context<Self>, ) -> Result<(), Error> { match result { Ok(assets) => { self.assets = AssetsMode::Packed(assets); log::info!("Assets pack attached."); Ok(()) } Err(err) => { self.assets = AssetsMode::Failed(err.to_string()); ctx.address() .schedule(ReInitAssets, Instant::now() + Duration::from_secs(5))?; log::error!("Can't load UI pack: {}", err); Err(err.into()) } } } } struct ReInitAssets; #[async_trait] impl Scheduled<ReInitAssets> for AppBind { async fn handle( &mut self, _: Instant, _: ReInitAssets, ctx: &mut Context<Self>, ) -> Result<(), Error> { self.init_assets(ctx).await?; Ok(()) } }
use super::AppBind; use crate::assets::Assets; use anyhow::Error; use async_trait::async_trait; use meio::{Context, IdOf, LiteTask, Scheduled, TaskEliminated, TaskError}; use reqwest::Url; use std::path::{Path, PathBuf}; use std::time::{Duration, Instant}; use tokio::fs::File; use tokio::io::AsyncReadExt; impl AppBind { pub(super) async fn init_assets(&mut self, ctx: &mut Context<Self>) -> Result<(), Error> { let path = self .options .env_var .and_then(|env_var| std::env::var(env_var).ok()); if let Some(path) = path {
} else if let Some(data) = self.options.embedded.as_ref() { log::info!("Assets: embedded."); let assets = Assets::parse(data)?; self.assets = AssetsMode::Packed(assets); log::info!("Embedded assets used."); } else if let Some(url) = self.options.url.clone() { log::info!("Assets: url."); ctx.spawn_task(FetchUiPack(url), (), ()); } Ok(()) } async fn read_assets(&mut self, path: &str) -> Result<AssetsMode, Error> { let asset_path = Path::new(path).to_path_buf(); if asset_path.exists() { let metadata = tokio::fs::metadata(&asset_path).await?; if metadata.is_dir() { Ok(AssetsMode::Local(asset_path)) } else { let data = read_file(&asset_path).await?; let assets = Assets::parse(&data)?; Ok(AssetsMode::Packed(assets)) } } else { Err(Error::msg(format!("Can't load assets from {}", path))) } } } pub async fn read_file(path: &Path) -> Result<Vec<u8>, Error> { let mut file = File::open(path).await?; let mut content = Vec::new(); file.read_to_end(&mut content).await?; Ok(content) } pub enum AssetsMode { Loading, Local(PathBuf), Packed(Assets), Failed(String), } pub struct FetchUiPack(Url); #[async_trait] impl LiteTask for FetchUiPack { type Output = Assets; async fn interruptable_routine(mut self) -> Result<Self::Output, Error> { log::info!("Fetching UI assets..."); let bytes = reqwest::get(self.0) .await? .error_for_status()? .bytes() .await?; let assets = Assets::parse(&bytes)?; Ok(assets) } } #[async_trait] impl TaskEliminated<FetchUiPack, ()> for AppBind { async fn handle( &mut self, _id: IdOf<FetchUiPack>, _tag: (), result: Result<Assets, TaskError>, ctx: &mut Context<Self>, ) -> Result<(), Error> { match result { Ok(assets) => { self.assets = AssetsMode::Packed(assets); log::info!("Assets pack attached."); Ok(()) } Err(err) => { self.assets = AssetsMode::Failed(err.to_string()); ctx.address() .schedule(ReInitAssets, Instant::now() + Duration::from_secs(5))?; log::error!("Can't load UI pack: {}", err); Err(err.into()) } } } } struct ReInitAssets; #[async_trait] impl Scheduled<ReInitAssets> for AppBind { async fn handle( &mut self, _: Instant, _: ReInitAssets, ctx: &mut Context<Self>, ) -> Result<(), Error> { self.init_assets(ctx).await?; Ok(()) } }
if path.starts_with("http") { log::info!("Assets: env-url."); let url: Url = path.parse()?; ctx.spawn_task(FetchUiPack(url), (), ()); } else { log::info!("Assets: env-path."); self.assets = self.read_assets(&path).await?; log::warn!("Assets overriden to: {}", path); }
if_condition
[ { "content": "/// Install the engine.\n\npub fn install(name: impl ToString) -> Result<(), Error> {\n\n RillRate::install(name)\n\n}\n\n\n", "file_path": "rillrate/src/lib.rs", "rank": 0, "score": 234472.19064612628 }, { "content": "pub fn typed_var<T>(name: &'static str) -> Result<Option...
Rust
tests/parse/valid/control_flow.rs
JSAbrahams/mamba
66ae435a4abf496aae945a78e4fdfa8e4785d854
use mamba::lex::tokenize; use mamba::parse::ast::Node; use mamba::parse::ast::AST; use mamba::parse::parse; use mamba::parse::parse_direct; use crate::common::*; #[test] fn for_statements() { let source = resource_content(true, &["control_flow"], "for_statements.mamba"); parse(&tokenize(&source).unwrap()).unwrap(); } #[test] fn for_statement_verify() { let source = String::from("for a in c do d"); let ast = parse_direct(&tokenize(&source).unwrap()).unwrap(); let (expr, collection, body) = match ast.node { Node::Script { statements, .. } => match &statements.first().expect("script empty.").node { Node::For { expr, col, body } => (expr.clone(), col.clone(), body.clone()), _ => panic!("first element script was not for.") }, _ => panic!("ast was not script.") }; assert_eq!(expr.node, Node::Id { lit: String::from("a") }); assert_eq!(collection.node, Node::Id { lit: String::from("c") }); assert_eq!(body.node, Node::Id { lit: String::from("d") }); } #[test] fn for_range_step_verify() { let source = String::from("for a in c .. d step e do f"); let ast = parse_direct(&tokenize(&source).unwrap()).unwrap(); let (expr, col, body) = match ast.node { Node::Script { statements, .. } => match &statements.first().expect("script empty.").node { Node::For { expr, col, body } => (expr.clone(), col.clone(), body.clone()), _ => panic!("first element script was not foreach.") }, _ => panic!("ast was not script.") }; match col.node { Node::Range { from, to, inclusive, step } => { assert_eq!(from.node, Node::Id { lit: String::from("c") }); assert_eq!(to.node, Node::Id { lit: String::from("d") }); assert!(!inclusive); assert_eq!(step.clone().unwrap().node, Node::Id { lit: String::from("e") }); } _ => panic!("Expected range") } assert_eq!(expr.node, Node::Id { lit: String::from("a") }); assert_eq!(body.node, Node::Id { lit: String::from("f") }); } #[test] fn for_range_incl_verify() { let source = String::from("for a in c ..= d do f"); let ast = parse_direct(&tokenize(&source).unwrap()).unwrap(); let (expr, col, body) = match ast.node { Node::Script { statements, .. } => match &statements.first().expect("script empty.").node { Node::For { expr, col, body } => (expr.clone(), col.clone(), body.clone()), _ => panic!("first element script was not foreach.") }, _ => panic!("ast was not script.") }; match col.node { Node::Range { from, to, inclusive, step } => { assert_eq!(from.node, Node::Id { lit: String::from("c") }); assert_eq!(to.node, Node::Id { lit: String::from("d") }); assert!(inclusive); assert_eq!(step, None); } _ => panic!("Expected range") } assert_eq!(expr.node, Node::Id { lit: String::from("a") }); assert_eq!(body.node, Node::Id { lit: String::from("f") }); } #[test] fn if_stmt() { let source = resource_content(true, &["control_flow"], "if.mamba"); assert!(parse(&tokenize(&source).unwrap()).is_ok()); } #[test] fn if_verify() { let source = String::from("if a then c"); let ast = parse_direct(&tokenize(&source).unwrap()).unwrap(); let _statements; let (cond, then, el) = match ast.node { Node::Script { statements, .. } => { _statements = statements; match &_statements.first().expect("script empty.").node { Node::IfElse { cond, then, el } => (cond, then, el), _ => panic!("first element script was not if.") } } _ => panic!("ast was not script.") }; assert_eq!(cond.node, Node::Id { lit: String::from("a") }); assert_eq!(then.node, Node::Id { lit: String::from("c") }); assert_eq!(el.is_none(), true); } #[test] fn if_with_block_verify() { let source = String::from("if a then\n c\n d"); let ast = parse_direct(&tokenize(&source).unwrap()).unwrap(); let (cond, then, el) = match ast.node { Node::Script { statements, .. } => match &statements.first().expect("script empty.").node { Node::IfElse { cond, then, el } => (cond.clone(), then.clone(), el.clone()), _ => panic!("first element script was not if.") }, _ => panic!("ast was not script.") }; assert_eq!(cond.node, Node::Id { lit: String::from("a") }); assert_eq!(el.is_none(), true); let block = match then.node { Node::Block { statements } => statements, other => panic!("then of if was not block, was: {:?}", other) }; assert_eq!(block.len(), 2); assert_eq!(block[0].node, Node::Id { lit: String::from("c") }); assert_eq!(block[1].node, Node::Id { lit: String::from("d") }); } #[test] fn if_else_verify() { let source = String::from("if a then c else d"); let ast = parse_direct(&tokenize(&source).unwrap()).unwrap(); let _statements; let (cond, then, el) = match ast.node { Node::Script { statements, .. } => { _statements = statements; match &_statements.first().expect("script empty.").node { Node::IfElse { cond, then, el } => (cond, then, el), _ => panic!("first element script was not if.") } } _ => panic!("ast was not script.") }; assert_eq!(cond.node, Node::Id { lit: String::from("a") }); assert_eq!(then.node, Node::Id { lit: String::from("c") }); assert_eq!(el.as_ref().unwrap().node, Node::Id { lit: String::from("d") }); } #[test] fn match_statements() { let source = resource_content(true, &["control_flow"], "match.mamba"); parse(&tokenize(&source).unwrap()).unwrap(); } #[test] fn match_verify() { let source = String::from("match a\n a => b\n c => d"); let ast = parse_direct(&tokenize(&source).unwrap()).unwrap(); let (cond, cases) = match ast.node { Node::Script { statements, .. } => match &statements.first().expect("script empty.").node { Node::Match { cond, cases } => (cond.clone(), cases.clone()), _ => panic!("first element script was not match.") }, _ => panic!("ast was not script.") }; assert_eq!(cond.node, Node::Id { lit: String::from("a") }); assert_eq!(cases.len(), 2); let (cond1, expr1, cond2, expr2) = match (&cases[0], &cases[1]) { ( AST { node: Node::Case { cond: cond1, body: expr1 }, .. }, AST { node: Node::Case { cond: cond2, body: expr2 }, .. } ) => match (&cond1.node, &cond2.node) { ( Node::ExpressionType { expr: cond1, .. }, Node::ExpressionType { expr: cond2, .. } ) => (cond1, expr1, cond2, expr2), other => panic!("expected expression type: {:?}", other) }, _ => panic!("Cases incorrect.") }; assert_eq!(cond1.node, Node::Id { lit: String::from("a") }); assert_eq!(expr1.node, Node::Id { lit: String::from("b") }); assert_eq!(cond2.node, Node::Id { lit: String::from("c") }); assert_eq!(expr2.node, Node::Id { lit: String::from("d") }); } #[test] fn while_statements() { let source = resource_content(true, &["control_flow"], "while.mamba"); parse(&tokenize(&source).unwrap()).unwrap(); } #[test] fn while_verify() { let source = String::from("while a do d"); let ast = parse_direct(&tokenize(&source).unwrap()).unwrap(); let (cond, body) = match ast.node { Node::Script { statements, .. } => match &statements.first().expect("script empty.").node { Node::While { cond, body } => (cond.clone(), body.clone()), _ => panic!("first element script was not while.") }, _ => panic!("ast was not script.") }; assert_eq!(cond.node, Node::Id { lit: String::from("a") }); assert_eq!(body.node, Node::Id { lit: String::from("d") }); }
use mamba::lex::tokenize; use mamba::parse::ast::Node; use mamba::parse::ast::AST; use mamba::parse::parse; use mamba::parse::parse_direct; use crate::common::*; #[test] fn for_statements() { let source = resource_content(true, &["control_flow"], "for_statements.mamba"); parse(&tokenize(&source).unwrap()).unwrap(); } #[test] fn for_statement_verify() { let source = String::from("for a in c do d"); let ast = parse_direct(&tokenize(&source).unwrap()).unwrap(); let (expr, collection, body) = match ast.node { Node::Script { statements, .. } => match &statements.first().expect("script empty.").node { Node::For { expr, col, body } => (expr.clone(), col.clone(), body.clone()), _ => panic!("first element script was not for.") }, _ => panic!("ast was not script.") }; assert_eq!(expr.node, Node::Id { lit: String::from("a") }); assert_eq!(collection.node, Node::Id { lit: String::from("c") }); assert_eq!(body.node, Node::Id { lit: String::from("d") }); } #[test] fn for_range_step_verify() { let source = String::from("for a in c .. d step e do f"); let ast = parse_direct(&tokenize(&source).unwrap()).unwrap(); let (expr, col, body) = match ast.node { Node::Script { statements, .. } => match &statements.first().expect("script empty.").node { Node::For { expr, col, body } => (expr.clone(), col.clone(), body.clone()), _ => panic!("first element script was not foreach.") }, _ => panic!("ast was not script.") };
assert_eq!(expr.node, Node::Id { lit: String::from("a") }); assert_eq!(body.node, Node::Id { lit: String::from("f") }); } #[test] fn for_range_incl_verify() { let source = String::from("for a in c ..= d do f"); let ast = parse_direct(&tokenize(&source).unwrap()).unwrap(); let (expr, col, body) = match ast.node { Node::Script { statements, .. } => match &statements.first().expect("script empty.").node { Node::For { expr, col, body } => (expr.clone(), col.clone(), body.clone()), _ => panic!("first element script was not foreach.") }, _ => panic!("ast was not script.") }; match col.node { Node::Range { from, to, inclusive, step } => { assert_eq!(from.node, Node::Id { lit: String::from("c") }); assert_eq!(to.node, Node::Id { lit: String::from("d") }); assert!(inclusive); assert_eq!(step, None); } _ => panic!("Expected range") } assert_eq!(expr.node, Node::Id { lit: String::from("a") }); assert_eq!(body.node, Node::Id { lit: String::from("f") }); } #[test] fn if_stmt() { let source = resource_content(true, &["control_flow"], "if.mamba"); assert!(parse(&tokenize(&source).unwrap()).is_ok()); } #[test] fn if_verify() { let source = String::from("if a then c"); let ast = parse_direct(&tokenize(&source).unwrap()).unwrap(); let _statements; let (cond, then, el) = match ast.node { Node::Script { statements, .. } => { _statements = statements; match &_statements.first().expect("script empty.").node { Node::IfElse { cond, then, el } => (cond, then, el), _ => panic!("first element script was not if.") } } _ => panic!("ast was not script.") }; assert_eq!(cond.node, Node::Id { lit: String::from("a") }); assert_eq!(then.node, Node::Id { lit: String::from("c") }); assert_eq!(el.is_none(), true); } #[test] fn if_with_block_verify() { let source = String::from("if a then\n c\n d"); let ast = parse_direct(&tokenize(&source).unwrap()).unwrap(); let (cond, then, el) = match ast.node { Node::Script { statements, .. } => match &statements.first().expect("script empty.").node { Node::IfElse { cond, then, el } => (cond.clone(), then.clone(), el.clone()), _ => panic!("first element script was not if.") }, _ => panic!("ast was not script.") }; assert_eq!(cond.node, Node::Id { lit: String::from("a") }); assert_eq!(el.is_none(), true); let block = match then.node { Node::Block { statements } => statements, other => panic!("then of if was not block, was: {:?}", other) }; assert_eq!(block.len(), 2); assert_eq!(block[0].node, Node::Id { lit: String::from("c") }); assert_eq!(block[1].node, Node::Id { lit: String::from("d") }); } #[test] fn if_else_verify() { let source = String::from("if a then c else d"); let ast = parse_direct(&tokenize(&source).unwrap()).unwrap(); let _statements; let (cond, then, el) = match ast.node { Node::Script { statements, .. } => { _statements = statements; match &_statements.first().expect("script empty.").node { Node::IfElse { cond, then, el } => (cond, then, el), _ => panic!("first element script was not if.") } } _ => panic!("ast was not script.") }; assert_eq!(cond.node, Node::Id { lit: String::from("a") }); assert_eq!(then.node, Node::Id { lit: String::from("c") }); assert_eq!(el.as_ref().unwrap().node, Node::Id { lit: String::from("d") }); } #[test] fn match_statements() { let source = resource_content(true, &["control_flow"], "match.mamba"); parse(&tokenize(&source).unwrap()).unwrap(); } #[test] fn match_verify() { let source = String::from("match a\n a => b\n c => d"); let ast = parse_direct(&tokenize(&source).unwrap()).unwrap(); let (cond, cases) = match ast.node { Node::Script { statements, .. } => match &statements.first().expect("script empty.").node { Node::Match { cond, cases } => (cond.clone(), cases.clone()), _ => panic!("first element script was not match.") }, _ => panic!("ast was not script.") }; assert_eq!(cond.node, Node::Id { lit: String::from("a") }); assert_eq!(cases.len(), 2); let (cond1, expr1, cond2, expr2) = match (&cases[0], &cases[1]) { ( AST { node: Node::Case { cond: cond1, body: expr1 }, .. }, AST { node: Node::Case { cond: cond2, body: expr2 }, .. } ) => match (&cond1.node, &cond2.node) { ( Node::ExpressionType { expr: cond1, .. }, Node::ExpressionType { expr: cond2, .. } ) => (cond1, expr1, cond2, expr2), other => panic!("expected expression type: {:?}", other) }, _ => panic!("Cases incorrect.") }; assert_eq!(cond1.node, Node::Id { lit: String::from("a") }); assert_eq!(expr1.node, Node::Id { lit: String::from("b") }); assert_eq!(cond2.node, Node::Id { lit: String::from("c") }); assert_eq!(expr2.node, Node::Id { lit: String::from("d") }); } #[test] fn while_statements() { let source = resource_content(true, &["control_flow"], "while.mamba"); parse(&tokenize(&source).unwrap()).unwrap(); } #[test] fn while_verify() { let source = String::from("while a do d"); let ast = parse_direct(&tokenize(&source).unwrap()).unwrap(); let (cond, body) = match ast.node { Node::Script { statements, .. } => match &statements.first().expect("script empty.").node { Node::While { cond, body } => (cond.clone(), body.clone()), _ => panic!("first element script was not while.") }, _ => panic!("ast was not script.") }; assert_eq!(cond.node, Node::Id { lit: String::from("a") }); assert_eq!(body.node, Node::Id { lit: String::from("d") }); }
match col.node { Node::Range { from, to, inclusive, step } => { assert_eq!(from.node, Node::Id { lit: String::from("c") }); assert_eq!(to.node, Node::Id { lit: String::from("d") }); assert!(!inclusive); assert_eq!(step.clone().unwrap().node, Node::Id { lit: String::from("e") }); } _ => panic!("Expected range") }
if_condition
[ { "content": "#[test]\n\n#[ignore]\n\nfn core_match_statements() {\n\n let source = resource_content(true, &[\"control_flow\"], \"match.mamba\");\n\n to_py!(source);\n\n}\n\n\n", "file_path": "tests/core/control_flow.rs", "rank": 1, "score": 198184.49970102464 }, { "content": "#[test]\...
Rust
src/lib/ui/carnelian/src/render/generic/spinel/composition.rs
re995/fuchsia
02cb86f760af2aac974ba654186b73af8c16638f
use std::{ops::RangeBounds, ptr, slice}; use euclid::default::{Rect, Size2D}; use spinel_rs_sys::*; use crate::{ color::Color, drawing::DisplayRotation, render::generic::{ spinel::{init, InnerContext, Spinel}, BlendMode, Composition, Fill, FillRule, Layer, Style, }, }; fn group_layers( spn_styling: SpnStyling, top_group: SpnGroupId, layers: &[Layer<Spinel>], layer_id_start: u32, ) { fn cmds_len(style: &Style) -> usize { let fill_rule_len = match style.fill_rule { FillRule::NonZero => 1, FillRule::EvenOdd => 1, }; let fill_len = match &style.fill { Fill::Solid(..) => 3, Fill::Gradient(..) => 3, }; let blend_mode_len = match style.blend_mode { BlendMode::Over => 1, _ => 1, }; 1 + fill_rule_len + fill_len + blend_mode_len } for (i, Layer { style, .. }) in layers.iter().enumerate() { let cmds = unsafe { let len = cmds_len(style); let data = init(|ptr| { spn!(spn_styling_group_layer( spn_styling, top_group, layer_id_start + i as u32, len as u32, ptr )) }); slice::from_raw_parts_mut(data, len) }; cmds[0] = SpnCommand::SpnStylingOpcodeCoverWipZero; let mut cursor = 1; match style.fill_rule { FillRule::NonZero => { cmds[cursor] = SpnCommand::SpnStylingOpcodeCoverNonzero; cursor += 1; } FillRule::EvenOdd => { cmds[cursor] = SpnCommand::SpnStylingOpcodeCoverEvenodd; cursor += 1; } } match &style.fill { Fill::Solid(color) => { let color = color.to_linear_premult_rgba(); unsafe { spn_styling_layer_fill_rgba_encoder(&mut cmds[cursor], color.as_ptr()); } cursor += 3; } Fill::Gradient(gradient) => { let color = gradient.stops.first().unwrap().0.to_linear_premult_rgba(); unsafe { spn_styling_layer_fill_rgba_encoder(&mut cmds[cursor], color.as_ptr()); } cursor += 3; } } cmds[cursor] = match style.blend_mode { BlendMode::Over => SpnCommand::SpnStylingOpcodeBlendOver, _ => SpnCommand::SpnStylingOpcodeBlendOver, } } } #[derive(Clone, Debug)] pub struct SpinelComposition { pub(crate) layers: Vec<Layer<Spinel>>, pub(crate) background_color: [f32; 4], } impl SpinelComposition { pub(crate) fn set_up_spn_composition( &self, context: &InnerContext, raster_builder: SpnRasterBuilder, composition: SpnComposition, previous_rasters: &mut Vec<SpnRaster>, size: Size2D<u32>, display_rotation: DisplayRotation, clip: Rect<u32>, ) { unsafe { let clip = [clip.min_x(), clip.min_y(), clip.max_x(), clip.max_y()]; spn!(spn_composition_reset(composition)); spn!(spn_composition_set_clip(composition, clip.as_ptr(),)); } for raster in previous_rasters.drain(..) { let i = self.layers.len(); unsafe { spn!(spn_composition_place(composition, &raster, &(i as u32), ptr::null(), 1)); } context.get_checked().map(|context| unsafe { spn!(spn_raster_release(context, &raster as *const _, 1)) }); } for (i, Layer { raster, .. }) in self.layers.iter().enumerate() { for (paths, txty) in raster.rasters.iter() { unsafe { spn!(spn_raster_builder_begin(raster_builder)); } for (path, transform) in paths.iter() { const SPINEL_TRANSFORM_MULTIPLIER: f32 = 32.0; let transform = transform .then_translate(*txty) .then(&display_rotation.transform(&size.to_f32())); let transform = SpnTransform { sx: transform.m11 * SPINEL_TRANSFORM_MULTIPLIER, shx: transform.m21 * SPINEL_TRANSFORM_MULTIPLIER, tx: transform.m31 * SPINEL_TRANSFORM_MULTIPLIER, shy: transform.m12 * SPINEL_TRANSFORM_MULTIPLIER, sy: transform.m22 * SPINEL_TRANSFORM_MULTIPLIER, ty: transform.m32 * SPINEL_TRANSFORM_MULTIPLIER, w0: 0.0, w1: 0.0, }; let clip = SpnClip { x0: std::f32::MIN, y0: std::f32::MIN, x1: std::f32::MAX, y1: std::f32::MAX, }; unsafe { spn!(spn_raster_builder_add( raster_builder, &*path.path, ptr::null_mut(), &transform, ptr::null_mut(), &clip, 1, )); } } let raster = unsafe { init(|ptr| spn!(spn_raster_builder_end(raster_builder, ptr))) }; unsafe { spn!(spn_composition_place(composition, &raster, &(i as u32), ptr::null(), 1)); } previous_rasters.push(raster); } } } pub(crate) fn spn_styling( &self, context: &InnerContext, needs_linear_to_srgb_opcode: bool, ) -> Option<SpnStyling> { const PARENTS: u32 = 0; const ENTER_CMDS: u32 = 1; const GROUP_SIZE: u32 = 6; const MAX_LAYER_CMDS: u32 = 6; let leave_cmds: u32 = if needs_linear_to_srgb_opcode { 5 } else { 4 }; let num_clear_layers = 1; let len = self.layers.len() as u32 + num_clear_layers; let styling_len = len * MAX_LAYER_CMDS + PARENTS + ENTER_CMDS + leave_cmds + GROUP_SIZE; let spn_styling = context.get_checked().map(|context| unsafe { init(|ptr| spn!(spn_styling_create(context, ptr, len, styling_len))) })?; let top_group = unsafe { init(|ptr| spn!(spn_styling_group_alloc(spn_styling, ptr))) }; unsafe { spn!(spn_styling_group_parents(spn_styling, top_group, PARENTS, ptr::null_mut())); if len != 0 { spn!(spn_styling_group_range_lo(spn_styling, top_group, 0)); spn!(spn_styling_group_range_hi(spn_styling, top_group, len - 1)); } } let cmds_enter = unsafe { let data = init(|ptr| spn!(spn_styling_group_enter(spn_styling, top_group, ENTER_CMDS, ptr))); slice::from_raw_parts_mut(data, 1) }; cmds_enter[0] = SpnCommand::SpnStylingOpcodeColorAccZero; let cmds_leave = unsafe { let data = init(|ptr| spn!(spn_styling_group_leave(spn_styling, top_group, leave_cmds, ptr))); slice::from_raw_parts_mut(data, leave_cmds as usize) }; unsafe { spn_styling_background_over_encoder(&mut cmds_leave[0], self.background_color.as_ptr()); } if needs_linear_to_srgb_opcode { cmds_leave[3] = SpnCommand::SpnStylingOpcodeColorAccLinearToSrgb; cmds_leave[4] = SpnCommand::SpnStylingOpcodeColorAccStoreToSurface; } else { cmds_leave[3] = SpnCommand::SpnStylingOpcodeColorAccStoreToSurface; } group_layers(spn_styling, top_group, &self.layers, 0); let clear_cmds = unsafe { let data = init(|ptr| { let len = 5; spn!(spn_styling_group_layer( spn_styling, top_group, self.layers.len() as u32, len, ptr )) }); slice::from_raw_parts_mut(data, len as usize) }; clear_cmds[0] = SpnCommand::SpnStylingOpcodeCoverWipZero; unsafe { spn_styling_layer_fill_rgba_encoder(&mut clear_cmds[1], self.background_color.as_ptr()); } clear_cmds[4] = SpnCommand::SpnStylingOpcodeBlendOver; unsafe { spn!(spn_styling_seal(spn_styling)); } Some(spn_styling) } } impl Composition<Spinel> for SpinelComposition { fn new(background_color: Color) -> Self { Self { layers: vec![], background_color: background_color.to_linear_premult_rgba() } } fn with_layers( layers: impl IntoIterator<Item = Layer<Spinel>>, background_color: Color, ) -> Self { Self { layers: layers.into_iter().collect(), background_color: background_color.to_linear_premult_rgba(), } } fn clear(&mut self) { self.layers.clear(); } fn replace<R, I>(&mut self, range: R, with: I) where R: RangeBounds<usize>, I: IntoIterator<Item = Layer<Spinel>>, { self.layers.splice(range, with); } }
use std::{ops::RangeBounds, ptr, slice}; use euclid::default::{Rect, Size2D}; use spinel_rs_sys::*; use crate::{ color::Color, drawing::DisplayRotation, render::generic::{ spinel::{init, InnerContext, Spinel}, BlendMode, Composition, Fill, FillRule, Layer, Style, }, }; fn group_layers( spn_styling: SpnStyling, top_group: SpnGroupId, layers: &[Layer<Spinel>], layer_id_start: u32, ) { fn cmds_len(style: &Style) -> usize { let fill_rule_len = match style.fill_rule { FillRule::NonZero => 1, FillRule::EvenOdd => 1, }; let fill_len = match &style.fill { Fill::Solid(..) => 3, Fill::Gradient(..) => 3, }; let blend_mode_len = match style.blend_mode { BlendMode::Over => 1, _ => 1, }; 1 + fill_rule_len + fill_len + blend_mode_len } for (i, Layer { style, .. }) in layers.iter().enumerate() { let cmds = unsafe { let len = cmds_len(style); let data =
; slice::from_raw_parts_mut(data, len) }; cmds[0] = SpnCommand::SpnStylingOpcodeCoverWipZero; let mut cursor = 1; match style.fill_rule { FillRule::NonZero => { cmds[cursor] = SpnCommand::SpnStylingOpcodeCoverNonzero; cursor += 1; } FillRule::EvenOdd => { cmds[cursor] = SpnCommand::SpnStylingOpcodeCoverEvenodd; cursor += 1; } } match &style.fill { Fill::Solid(color) => { let color = color.to_linear_premult_rgba(); unsafe { spn_styling_layer_fill_rgba_encoder(&mut cmds[cursor], color.as_ptr()); } cursor += 3; } Fill::Gradient(gradient) => { let color = gradient.stops.first().unwrap().0.to_linear_premult_rgba(); unsafe { spn_styling_layer_fill_rgba_encoder(&mut cmds[cursor], color.as_ptr()); } cursor += 3; } } cmds[cursor] = match style.blend_mode { BlendMode::Over => SpnCommand::SpnStylingOpcodeBlendOver, _ => SpnCommand::SpnStylingOpcodeBlendOver, } } } #[derive(Clone, Debug)] pub struct SpinelComposition { pub(crate) layers: Vec<Layer<Spinel>>, pub(crate) background_color: [f32; 4], } impl SpinelComposition { pub(crate) fn set_up_spn_composition( &self, context: &InnerContext, raster_builder: SpnRasterBuilder, composition: SpnComposition, previous_rasters: &mut Vec<SpnRaster>, size: Size2D<u32>, display_rotation: DisplayRotation, clip: Rect<u32>, ) { unsafe { let clip = [clip.min_x(), clip.min_y(), clip.max_x(), clip.max_y()]; spn!(spn_composition_reset(composition)); spn!(spn_composition_set_clip(composition, clip.as_ptr(),)); } for raster in previous_rasters.drain(..) { let i = self.layers.len(); unsafe { spn!(spn_composition_place(composition, &raster, &(i as u32), ptr::null(), 1)); } context.get_checked().map(|context| unsafe { spn!(spn_raster_release(context, &raster as *const _, 1)) }); } for (i, Layer { raster, .. }) in self.layers.iter().enumerate() { for (paths, txty) in raster.rasters.iter() { unsafe { spn!(spn_raster_builder_begin(raster_builder)); } for (path, transform) in paths.iter() { const SPINEL_TRANSFORM_MULTIPLIER: f32 = 32.0; let transform = transform .then_translate(*txty) .then(&display_rotation.transform(&size.to_f32())); let transform = SpnTransform { sx: transform.m11 * SPINEL_TRANSFORM_MULTIPLIER, shx: transform.m21 * SPINEL_TRANSFORM_MULTIPLIER, tx: transform.m31 * SPINEL_TRANSFORM_MULTIPLIER, shy: transform.m12 * SPINEL_TRANSFORM_MULTIPLIER, sy: transform.m22 * SPINEL_TRANSFORM_MULTIPLIER, ty: transform.m32 * SPINEL_TRANSFORM_MULTIPLIER, w0: 0.0, w1: 0.0, }; let clip = SpnClip { x0: std::f32::MIN, y0: std::f32::MIN, x1: std::f32::MAX, y1: std::f32::MAX, }; unsafe { spn!(spn_raster_builder_add( raster_builder, &*path.path, ptr::null_mut(), &transform, ptr::null_mut(), &clip, 1, )); } } let raster = unsafe { init(|ptr| spn!(spn_raster_builder_end(raster_builder, ptr))) }; unsafe { spn!(spn_composition_place(composition, &raster, &(i as u32), ptr::null(), 1)); } previous_rasters.push(raster); } } } pub(crate) fn spn_styling( &self, context: &InnerContext, needs_linear_to_srgb_opcode: bool, ) -> Option<SpnStyling> { const PARENTS: u32 = 0; const ENTER_CMDS: u32 = 1; const GROUP_SIZE: u32 = 6; const MAX_LAYER_CMDS: u32 = 6; let leave_cmds: u32 = if needs_linear_to_srgb_opcode { 5 } else { 4 }; let num_clear_layers = 1; let len = self.layers.len() as u32 + num_clear_layers; let styling_len = len * MAX_LAYER_CMDS + PARENTS + ENTER_CMDS + leave_cmds + GROUP_SIZE; let spn_styling = context.get_checked().map(|context| unsafe { init(|ptr| spn!(spn_styling_create(context, ptr, len, styling_len))) })?; let top_group = unsafe { init(|ptr| spn!(spn_styling_group_alloc(spn_styling, ptr))) }; unsafe { spn!(spn_styling_group_parents(spn_styling, top_group, PARENTS, ptr::null_mut())); if len != 0 { spn!(spn_styling_group_range_lo(spn_styling, top_group, 0)); spn!(spn_styling_group_range_hi(spn_styling, top_group, len - 1)); } } let cmds_enter = unsafe { let data = init(|ptr| spn!(spn_styling_group_enter(spn_styling, top_group, ENTER_CMDS, ptr))); slice::from_raw_parts_mut(data, 1) }; cmds_enter[0] = SpnCommand::SpnStylingOpcodeColorAccZero; let cmds_leave = unsafe { let data = init(|ptr| spn!(spn_styling_group_leave(spn_styling, top_group, leave_cmds, ptr))); slice::from_raw_parts_mut(data, leave_cmds as usize) }; unsafe { spn_styling_background_over_encoder(&mut cmds_leave[0], self.background_color.as_ptr()); } if needs_linear_to_srgb_opcode { cmds_leave[3] = SpnCommand::SpnStylingOpcodeColorAccLinearToSrgb; cmds_leave[4] = SpnCommand::SpnStylingOpcodeColorAccStoreToSurface; } else { cmds_leave[3] = SpnCommand::SpnStylingOpcodeColorAccStoreToSurface; } group_layers(spn_styling, top_group, &self.layers, 0); let clear_cmds = unsafe { let data = init(|ptr| { let len = 5; spn!(spn_styling_group_layer( spn_styling, top_group, self.layers.len() as u32, len, ptr )) }); slice::from_raw_parts_mut(data, len as usize) }; clear_cmds[0] = SpnCommand::SpnStylingOpcodeCoverWipZero; unsafe { spn_styling_layer_fill_rgba_encoder(&mut clear_cmds[1], self.background_color.as_ptr()); } clear_cmds[4] = SpnCommand::SpnStylingOpcodeBlendOver; unsafe { spn!(spn_styling_seal(spn_styling)); } Some(spn_styling) } } impl Composition<Spinel> for SpinelComposition { fn new(background_color: Color) -> Self { Self { layers: vec![], background_color: background_color.to_linear_premult_rgba() } } fn with_layers( layers: impl IntoIterator<Item = Layer<Spinel>>, background_color: Color, ) -> Self { Self { layers: layers.into_iter().collect(), background_color: background_color.to_linear_premult_rgba(), } } fn clear(&mut self) { self.layers.clear(); } fn replace<R, I>(&mut self, range: R, with: I) where R: RangeBounds<usize>, I: IntoIterator<Item = Layer<Spinel>>, { self.layers.splice(range, with); } }
init(|ptr| { spn!(spn_styling_group_layer( spn_styling, top_group, layer_id_start + i as u32, len as u32, ptr )) })
call_expression
[]
Rust
bsync/src/db.rs
losfair/blkredo
1151cd23acfd231ce10fedce4401a00e2339ed93
use std::{ convert::TryInto, path::Path, sync::{ atomic::{AtomicU64, Ordering}, Arc, }, time::{Duration, Instant, SystemTime, UNIX_EPOCH}, }; use anyhow::Result; use parking_lot::Mutex; use rusqlite::{params, Connection, OpenFlags, OptionalExtension, TransactionBehavior}; use thiserror::Error; use crate::{blob::ZERO_BLOCK_HASH, util::align_block}; macro_rules! migration { ($id:ident, $($version:expr,)*) => { static $id: &'static [(&'static str, &'static str)] = &[ $(($version, include_str!(concat!("./migration/", $version, ".sql"))),)* ]; }; } migration!(VERSIONS, "000001", "000002", "000003",); static SNAPSHOT_ID: AtomicU64 = AtomicU64::new(0); #[derive(Clone)] pub struct Database { db: Arc<Mutex<Connection>>, instance_id: Arc<str>, } #[derive(Clone)] pub struct ConsistentPoint { pub lsn: u64, pub size: u64, pub created_at: u64, } pub enum RedoContentOrHash<'a> { Content(&'a [u8]), Hash([u8; 32]), } impl Database { pub fn open_file(path: &Path, create: bool) -> Result<Self> { #[derive(Error, Debug)] #[error("migration failed: {0}")] struct MigrationError(anyhow::Error); let mut flags: OpenFlags = OpenFlags::SQLITE_OPEN_READ_WRITE; if create { flags |= OpenFlags::SQLITE_OPEN_CREATE; } let mut db = Connection::open_with_flags(path, flags)?; db.execute_batch( r#" pragma journal_mode = wal; "#, )?; db.busy_handler(Some(|i| { log::debug!("Waiting for lock on database (attempt {})", i); std::thread::sleep(Duration::from_millis(100)); true }))?; run_migration(&mut db).map_err(MigrationError)?; let instance_id: String = db .query_row( "select v from bsync_config where k = 'instance_id'", params![], |r| r.get(0), ) .expect("missing instance_id in bsync_config"); log::info!( "Opened database at {:?} with instance id {}.", path, instance_id ); Ok(Self { db: Arc::new(Mutex::new(db)), instance_id: Arc::from(instance_id.as_str()), }) } pub fn instance_id(&self) -> &str { &*self.instance_id } pub fn snapshot(&self, lsn: u64) -> Result<Snapshot> { let id = SNAPSHOT_ID.fetch_add(1, Ordering::Relaxed); let table_name = format!("snapshot_{}", id); let db = self.db.lock(); let start = Instant::now(); db.execute_batch(&format!( r#" create temp table {} ( block_id integer not null primary key, hash blob not null ); insert into temp.{} (block_id, hash) select block_id, hash from redo_v1 where lsn in ( select max(lsn) from redo_v1 where lsn <= {} group by block_id ); "#, table_name, table_name, lsn ))?; log::info!( "Materialized snapshot at LSN {} in {:?}.", lsn, start.elapsed() ); Ok(Snapshot { db: self.clone(), table_name, }) } pub fn write_redo<'a>( &self, base_lsn: u64, data: impl IntoIterator<Item = (u64, RedoContentOrHash<'a>)>, ) -> Result<u64> { #[derive(Error, Debug)] #[error("base lsn mismatch: expecting {0}, got {1}")] struct LsnMismatch(u64, u64); #[derive(Error, Debug)] #[error("block with hash {0} was assumed to exist in CAS but does not exist anymore - did you run `bsync squash` just now? please retry.")] struct MissingHash(String); let mut db = self.db.lock(); let txn = db.transaction_with_behavior(TransactionBehavior::Immediate)?; let max_lsn: Option<u64>; { let mut get_max_lsn_stmt = txn.prepare_cached("select max(lsn) from redo_v1").unwrap(); let mut has_cas_stmt = txn .prepare_cached("select hash from cas_v1 where hash = ?") .unwrap(); let mut insert_cas_compressed_stmt = txn .prepare_cached("insert into cas_v1 (hash, content, compressed) values(?, ?, 1)") .unwrap(); let mut insert_redo_stmt = txn .prepare_cached("insert into redo_v1 (block_id, hash) values(?, ?)") .unwrap(); let prev_max_lsn: Option<u64> = get_max_lsn_stmt.query_row(params![], |r| r.get(0)).unwrap(); let prev_max_lsn = prev_max_lsn.unwrap_or(0); if prev_max_lsn != base_lsn { return Err(LsnMismatch(base_lsn, prev_max_lsn).into()); } for (block_id, body) in data { let hash: [u8; 32] = match body { RedoContentOrHash::Content(x) => blake3::hash(x).into(), RedoContentOrHash::Hash(x) => x, }; let has_cas: Option<Vec<u8>> = has_cas_stmt .query_row(params![&hash[..]], |r| r.get(0)) .optional() .unwrap(); if has_cas.is_none() { match body { RedoContentOrHash::Content(content) => { let content = align_block(content); let content = zstd::encode_all(&*content, 3)?; insert_cas_compressed_stmt .execute(params![&hash[..], &content[..]]) .unwrap(); } RedoContentOrHash::Hash(_) => return Err(MissingHash(hex::encode(&hash)).into()), } } insert_redo_stmt .execute(params![block_id, &hash[..]]) .unwrap(); } max_lsn = get_max_lsn_stmt .query_row(params![], |r| r.get(0)) .optional() .unwrap(); } txn.commit().unwrap(); Ok(max_lsn.unwrap_or(0)) } pub fn max_lsn(&self) -> u64 { let x: Option<u64> = self .db .lock() .prepare_cached("select max(lsn) from redo_v1") .unwrap() .query_row(params![], |r| r.get(0)) .unwrap(); x.unwrap_or(0) } pub fn exists_in_cas(&self, hash: &[u8; 32]) -> bool { let v: Option<u32> = self .db .lock() .query_row( "select 1 from cas_v1 where hash = ?", params![&hash[..]], |r| r.get(0), ) .optional() .unwrap(); v.is_some() } pub fn list_consistent_point(&self) -> Vec<ConsistentPoint> { let db = self.db.lock(); let mut stmt = db .prepare_cached("select lsn, size, created_at from consistent_point_v1 order by lsn asc") .unwrap(); stmt .query_map(params![], |r| { Ok(ConsistentPoint { lsn: r.get(0)?, size: r.get(1)?, created_at: r.get(2)?, }) }) .unwrap() .collect::<Result<_, rusqlite::Error>>() .unwrap() } pub fn add_consistent_point(&self, lsn: u64, size: u64) { let db = self.db.lock(); let now = SystemTime::now() .duration_since(UNIX_EPOCH) .unwrap() .as_secs(); let mut stmt = db .prepare_cached( "insert or ignore into consistent_point_v1 (lsn, size, created_at) values(?, ?, ?)", ) .unwrap(); stmt.execute(params![lsn, size, now]).unwrap(); } pub fn squash(&self, start_lsn: u64, end_lsn: u64) -> Result<()> { let mut db = self.db.lock(); let txn = db.transaction_with_behavior(TransactionBehavior::Immediate)?; txn.execute_batch(&format!(r#" delete from consistent_point_v1 where lsn > {from} and lsn < {to}; create temp table squash ( `lsn` integer not null primary key ); insert into temp.squash (lsn) select max(lsn) from redo_v1 where lsn > {from} and lsn <= {to} group by block_id; delete from redo_v1 where lsn > {from} and lsn <= {to} and not exists (select * from temp.squash where lsn = redo_v1.lsn); drop table temp.squash; "#, from = start_lsn, to = end_lsn)).unwrap(); txn.commit().unwrap(); Ok(()) } pub fn cas_gc(&self) { let db = self.db.lock(); db.execute_batch( r#" delete from cas_v1 where hash not in (select hash from redo_v1); "#, ) .unwrap(); } pub fn vacuum(&self) { self.db.lock().execute_batch("vacuum;").unwrap(); } } pub struct Snapshot { db: Database, table_name: String, } impl Snapshot { pub fn read_block(&self, block_id: u64) -> Option<Vec<u8>> { let hash = self.read_block_hash(block_id)?; if hash == *ZERO_BLOCK_HASH { return None; } let db = self.db.db.lock(); let mut stmt = db .prepare_cached( r#" select content, compressed from cas_v1 where hash = ? "#, ) .unwrap(); let (content, compressed): (Vec<u8>, bool) = stmt .query_row(params![&hash[..]], |r| Ok((r.get(0)?, r.get(1)?))) .optional() .unwrap()?; if compressed { let content = zstd::decode_all(&content[..]).expect("read_block: decompression failed"); Some(content) } else { Some(content) } } pub fn read_block_hash(&self, block_id: u64) -> Option<[u8; 32]> { let db = self.db.db.lock(); let mut stmt = db .prepare_cached(&format!( "select hash from temp.{} where block_id = ?", self.table_name )) .unwrap(); let hash: Vec<u8> = stmt .query_row(params![block_id], |r| r.get(0)) .optional() .unwrap()?; Some(hash.try_into().unwrap()) } } impl Drop for Snapshot { fn drop(&mut self) { self .db .db .lock() .execute_batch(&format!( r#" drop table temp.{}; "#, &self.table_name )) .unwrap(); } } fn run_migration(db: &mut Connection) -> Result<()> { #[derive(Error, Debug)] #[error("database schema version is newer than the supported version")] struct SchemaTooNew; let txn = db.transaction_with_behavior(TransactionBehavior::Immediate)?; let table_exists: u32 = txn.query_row( "select count(*) from sqlite_master where type='table' and name='bsync_config'", params![], |r| r.get(0), )?; let current_version: Option<String> = if table_exists == 1 { Some(txn.query_row( "select v from bsync_config where k = 'schema_version'", params![], |r| r.get(0), )?) } else { None }; let current_version: u64 = current_version.map(|x| x.parse()).transpose()?.unwrap_or(0); let latest_version: u64 = VERSIONS.last().unwrap().0.parse().unwrap(); if current_version > latest_version { return Err(SchemaTooNew.into()); } for &(version, sql) in VERSIONS { let version: u64 = version.parse().unwrap(); if version > current_version { txn.execute_batch(sql)?; log::info!("Applied migration {}.", version); } } txn.execute( "replace into bsync_config (k, v) values('schema_version', ?)", params![format!("{}", latest_version)], )?; txn.commit()?; Ok(()) }
use std::{ convert::TryInto, path::Path, sync::{ atomic::{AtomicU64, Ordering}, Arc, }, time::{Duration, Instant, SystemTime, UNIX_EPOCH}, }; use anyhow::Result; use parking_lot::Mutex; use rusqlite::{params, Connection, OpenFlags, OptionalExtension, TransactionBehavior}; use thiserror::Error; use crate::{blob::ZERO_BLOCK_HASH, util::align_block}; macro_rules! migration { ($id:ident, $($version:expr,)*) => { static $id: &'static [(&'static str, &'static str)] = &[ $(($version, include_str!(concat!("./migration/", $version, ".sql"))),)* ]; }; } migration!(VERSIONS, "000001", "000002", "000003",); static SNAPSHOT_ID: AtomicU64 = AtomicU64::new(0); #[derive(Clone)] pub struct Database { db: Arc<Mutex<Connection>>, instance_id: Arc<str>, } #[derive(Clone)] pub struct ConsistentPoint { pub lsn: u64, pub size: u64, pub created_at: u64, } pub enum RedoContentOrHash<'a> { Content(&'a [u8]), Hash([u8; 32]), } impl Database { pub fn open_file(path: &Path, create: bool) -> Result<Self> { #[derive(Error, Debug)] #[error("migration failed: {0}")] struct MigrationError(anyhow::Error); let mut flags: OpenFlags = OpenFlags::SQLITE_OPEN_READ_WRITE; if create { flags |= OpenFlags::SQLITE_OPEN_CREATE; } let mut db = Connection::open_with_flags(path, flags)?; db.execute_batch( r#" pragma journal_mode = wal; "#, )?; db.busy_handler(Some(|i| { log::debug!("Waiting for lock on database (attempt {})", i); std::thread::sleep(Duration::from_millis(100)); true }))?; run_migration(&mut db).map_err(MigrationError)?; let instance_id: String = db .query_row( "select v from bsync_config where k = 'instance_id'", params![], |r| r.get(0), ) .expect("missing instance_id in bsync_config"); log::info!( "Opened database at {:?} with instance id {}.", path, instance_id ); Ok(Self { db: Arc::new(Mutex::new(db)), instance_id: Arc::from(instance_id.as_str()), }) } pub fn instance_id(&self) -> &str { &*self.instance_id } pub fn snapshot(&self, lsn: u64) -> Result<Snapshot> { let id = SNAPSHOT_ID.fetch_add(1, Ordering::Relaxed); let table_name = format!("snapshot_{}", id); let db = self.db.lock(); let start = Instant::now(); db.execute_batch(&format!( r#" create temp table {} ( block_id integer not null primary key, hash blob not null ); insert into temp.{} (block_id, hash) select block_id, hash from redo_v1 where lsn in ( select max(lsn) from redo_v1 where lsn <= {} group by block_id ); "#, table_name, table_name, lsn ))?; log::info!( "Materialized snapshot at LSN {} in {:?}.", lsn, start.elapsed() ); Ok(Snapshot { db: self.clone(), table_name, }) } pub fn write_redo<'a>( &self, base_lsn: u64, data: impl IntoIterator<Item = (u64, RedoContentOrHash<'a>)>, ) -> Result<u64> { #[derive(Error, Debug)] #[error("base lsn mismatch: expecting {0}, got {1}")] struct LsnMismatch(u64, u64); #[derive(Error, Debug)] #[error("block with hash {0} was assumed to exist in CAS but does not exist anymore - did you run `bsync squash` just now? please retry.")] struct MissingHash(String); let mut db = self.db.lock(); let txn = db.transaction_with_behavior(TransactionBehavior::Immediate)?; let max_lsn: Option<u64>; { let mut get_max_lsn_stmt = txn.prepare_cached("select max(lsn) from redo_v1").unwrap(); let mut has_cas_stmt = txn .prepare_cached("select hash from cas_v1 where hash = ?") .unwrap(); let mut insert_cas_compressed_stmt = txn .prepare_cached("insert into cas_v1 (hash, content, compressed) values(?, ?, 1)") .unwrap(); let mut insert_redo_stmt = txn .prepare_cache
lsn > {from} and lsn <= {to} and not exists (select * from temp.squash where lsn = redo_v1.lsn); drop table temp.squash; "#, from = start_lsn, to = end_lsn)).unwrap(); txn.commit().unwrap(); Ok(()) } pub fn cas_gc(&self) { let db = self.db.lock(); db.execute_batch( r#" delete from cas_v1 where hash not in (select hash from redo_v1); "#, ) .unwrap(); } pub fn vacuum(&self) { self.db.lock().execute_batch("vacuum;").unwrap(); } } pub struct Snapshot { db: Database, table_name: String, } impl Snapshot { pub fn read_block(&self, block_id: u64) -> Option<Vec<u8>> { let hash = self.read_block_hash(block_id)?; if hash == *ZERO_BLOCK_HASH { return None; } let db = self.db.db.lock(); let mut stmt = db .prepare_cached( r#" select content, compressed from cas_v1 where hash = ? "#, ) .unwrap(); let (content, compressed): (Vec<u8>, bool) = stmt .query_row(params![&hash[..]], |r| Ok((r.get(0)?, r.get(1)?))) .optional() .unwrap()?; if compressed { let content = zstd::decode_all(&content[..]).expect("read_block: decompression failed"); Some(content) } else { Some(content) } } pub fn read_block_hash(&self, block_id: u64) -> Option<[u8; 32]> { let db = self.db.db.lock(); let mut stmt = db .prepare_cached(&format!( "select hash from temp.{} where block_id = ?", self.table_name )) .unwrap(); let hash: Vec<u8> = stmt .query_row(params![block_id], |r| r.get(0)) .optional() .unwrap()?; Some(hash.try_into().unwrap()) } } impl Drop for Snapshot { fn drop(&mut self) { self .db .db .lock() .execute_batch(&format!( r#" drop table temp.{}; "#, &self.table_name )) .unwrap(); } } fn run_migration(db: &mut Connection) -> Result<()> { #[derive(Error, Debug)] #[error("database schema version is newer than the supported version")] struct SchemaTooNew; let txn = db.transaction_with_behavior(TransactionBehavior::Immediate)?; let table_exists: u32 = txn.query_row( "select count(*) from sqlite_master where type='table' and name='bsync_config'", params![], |r| r.get(0), )?; let current_version: Option<String> = if table_exists == 1 { Some(txn.query_row( "select v from bsync_config where k = 'schema_version'", params![], |r| r.get(0), )?) } else { None }; let current_version: u64 = current_version.map(|x| x.parse()).transpose()?.unwrap_or(0); let latest_version: u64 = VERSIONS.last().unwrap().0.parse().unwrap(); if current_version > latest_version { return Err(SchemaTooNew.into()); } for &(version, sql) in VERSIONS { let version: u64 = version.parse().unwrap(); if version > current_version { txn.execute_batch(sql)?; log::info!("Applied migration {}.", version); } } txn.execute( "replace into bsync_config (k, v) values('schema_version', ?)", params![format!("{}", latest_version)], )?; txn.commit()?; Ok(()) }
d("insert into redo_v1 (block_id, hash) values(?, ?)") .unwrap(); let prev_max_lsn: Option<u64> = get_max_lsn_stmt.query_row(params![], |r| r.get(0)).unwrap(); let prev_max_lsn = prev_max_lsn.unwrap_or(0); if prev_max_lsn != base_lsn { return Err(LsnMismatch(base_lsn, prev_max_lsn).into()); } for (block_id, body) in data { let hash: [u8; 32] = match body { RedoContentOrHash::Content(x) => blake3::hash(x).into(), RedoContentOrHash::Hash(x) => x, }; let has_cas: Option<Vec<u8>> = has_cas_stmt .query_row(params![&hash[..]], |r| r.get(0)) .optional() .unwrap(); if has_cas.is_none() { match body { RedoContentOrHash::Content(content) => { let content = align_block(content); let content = zstd::encode_all(&*content, 3)?; insert_cas_compressed_stmt .execute(params![&hash[..], &content[..]]) .unwrap(); } RedoContentOrHash::Hash(_) => return Err(MissingHash(hex::encode(&hash)).into()), } } insert_redo_stmt .execute(params![block_id, &hash[..]]) .unwrap(); } max_lsn = get_max_lsn_stmt .query_row(params![], |r| r.get(0)) .optional() .unwrap(); } txn.commit().unwrap(); Ok(max_lsn.unwrap_or(0)) } pub fn max_lsn(&self) -> u64 { let x: Option<u64> = self .db .lock() .prepare_cached("select max(lsn) from redo_v1") .unwrap() .query_row(params![], |r| r.get(0)) .unwrap(); x.unwrap_or(0) } pub fn exists_in_cas(&self, hash: &[u8; 32]) -> bool { let v: Option<u32> = self .db .lock() .query_row( "select 1 from cas_v1 where hash = ?", params![&hash[..]], |r| r.get(0), ) .optional() .unwrap(); v.is_some() } pub fn list_consistent_point(&self) -> Vec<ConsistentPoint> { let db = self.db.lock(); let mut stmt = db .prepare_cached("select lsn, size, created_at from consistent_point_v1 order by lsn asc") .unwrap(); stmt .query_map(params![], |r| { Ok(ConsistentPoint { lsn: r.get(0)?, size: r.get(1)?, created_at: r.get(2)?, }) }) .unwrap() .collect::<Result<_, rusqlite::Error>>() .unwrap() } pub fn add_consistent_point(&self, lsn: u64, size: u64) { let db = self.db.lock(); let now = SystemTime::now() .duration_since(UNIX_EPOCH) .unwrap() .as_secs(); let mut stmt = db .prepare_cached( "insert or ignore into consistent_point_v1 (lsn, size, created_at) values(?, ?, ?)", ) .unwrap(); stmt.execute(params![lsn, size, now]).unwrap(); } pub fn squash(&self, start_lsn: u64, end_lsn: u64) -> Result<()> { let mut db = self.db.lock(); let txn = db.transaction_with_behavior(TransactionBehavior::Immediate)?; txn.execute_batch(&format!(r#" delete from consistent_point_v1 where lsn > {from} and lsn < {to}; create temp table squash ( `lsn` integer not null primary key ); insert into temp.squash (lsn) select max(lsn) from redo_v1 where lsn > {from} and lsn <= {to} group by block_id; delete from redo_v1 where
random
[ { "content": "pub fn sha256hash(data: &[u8]) -> [u8; 32] {\n\n let mut h = Sha256::new();\n\n h.update(data);\n\n h.finalize().into()\n\n}\n", "file_path": "bsync/src/util.rs", "rank": 1, "score": 161150.78567438372 }, { "content": "pub fn align_block(data: &[u8]) -> Cow<[u8]> {\n\n let ...
Rust
src/network/message/coinbase/mod.rs
yobicash/yobi
20e639f8ffcf0ba8dea4123d3d4ef5a7a815e072
use libyobicash::errors::YErrorKind as LibErrorKind; use libyobicash::utils::random::*; use libyobicash::utils::time::*; use libyobicash::utils::version::*; use libyobicash::crypto::hash::digest::YDigest64; use libyobicash::crypto::hash::sha::YSHA512; use libyobicash::coinbase::YCoinbase; use bytes::{BytesMut, BufMut, BigEndian, ByteOrder}; use network::rpc_method::YRPCMethod; use version::*; use errors::*; #[derive(Clone, Eq, PartialEq, Debug, Default, Serialize, Deserialize)] pub struct YGetCbReq { pub id: YDigest64, pub version: YVersion, pub time: YTime, pub nonce: u32, pub method: YRPCMethod, pub cb_id: YDigest64, } impl YGetCbReq { pub fn new(cb_id: YDigest64) -> YHResult<YGetCbReq> { let mut req = YGetCbReq { id: YDigest64::default(), version: default_version(), time: YTime::now(), nonce: YRandom::u32(), method: YRPCMethod::GetCb, cb_id: cb_id, }; req.id = req.calc_id()?; Ok(req) } pub fn check(&self) -> YHResult<()> { if self.id != self.calc_id()? { return Err(YHErrorKind::Lib(LibErrorKind::InvalidChecksum).into()); } if self.version.major() > default_version().major() { return Err(YHErrorKind::Lib(LibErrorKind::InvalidVersion(self.version.to_string())).into()); } if self.time > YTime::now() { return Err(YHErrorKind::Lib(LibErrorKind::InvalidTime).into()); } if self.method != YRPCMethod::GetCb { return Err(YHErrorKind::InvalidRPCMethod.into()); } Ok(()) } pub fn calc_id(&self) -> YHResult<YDigest64> { let mut buf = BytesMut::new(); buf.put(&self.version.to_bytes()?[..]); buf.put(&self.time.to_bytes()[..]); buf.put_u32::<BigEndian>(self.nonce); buf.put(self.method.to_bytes()); buf.put(self.cb_id.to_bytes()); Ok(YSHA512::hash(&buf.to_vec())) } pub fn to_bytes(&self) -> YHResult<Vec<u8>> { self.check()?; let mut buf = BytesMut::new(); buf.put(self.id.to_bytes()); buf.put(&self.version.to_bytes()?[..]); buf.put(&self.time.to_bytes()[..]); buf.put_u32::<BigEndian>(self.nonce); buf.put(self.method.to_bytes()); buf.put(self.cb_id.to_bytes()); Ok(buf.to_vec()) } pub fn from_bytes(buf: &[u8]) -> YHResult<YGetCbReq> { if buf.len() != 156 { return Err(YHErrorKind::InvalidLength.into()); } let mut b = BytesMut::new(); b.extend_from_slice(buf); let id = YDigest64::from_bytes(b.get(0..64).unwrap())?; let version = YVersion::from_bytes(b.get(64..76).unwrap())?; let time = YTime::from_bytes(b.get(76..84).unwrap())?; let nonce = BigEndian::read_u32(b.get(84..88).unwrap()); let method = BigEndian::read_u32(b.get(88..92).unwrap()).into(); let cb_id = YDigest64::from_bytes(b.get(92..156).unwrap())?; let get_cb_req = YGetCbReq { id: id, version: version, time: time, nonce: nonce, method: method, cb_id: cb_id, }; get_cb_req.check()?; Ok(get_cb_req) } } #[derive(Clone, Eq, PartialEq, Default, Debug, Serialize, Deserialize)] pub struct YGetCbRes { pub id: YDigest64, pub version: YVersion, pub time: YTime, pub nonce: u32, pub method: YRPCMethod, pub cb: YCoinbase, } impl YGetCbRes { pub fn new(cb: &YCoinbase) -> YHResult<YGetCbRes> { let mut res = YGetCbRes { id: YDigest64::default(), version: default_version(), time: YTime::now(), nonce: YRandom::u32(), method: YRPCMethod::GetCb, cb: cb.clone(), }; res.id = res.calc_id()?; Ok(res) } pub fn check(&self) -> YHResult<()> { if self.id != self.calc_id()? { return Err(YHErrorKind::Lib(LibErrorKind::InvalidChecksum).into()); } if self.version.major() > default_version().major() { return Err(YHErrorKind::Lib(LibErrorKind::InvalidVersion(self.version.to_string())).into()); } if self.time > YTime::now() { return Err(YHErrorKind::Lib(LibErrorKind::InvalidTime).into()); } if self.method != YRPCMethod::GetCb { return Err(YHErrorKind::InvalidRPCMethod.into()); } self.cb.check()?; Ok(()) } pub fn calc_id(&self) -> YHResult<YDigest64> { let mut buf = BytesMut::new(); buf.put(&self.version.to_bytes()?[..]); buf.put(&self.time.to_bytes()[..]); buf.put_u32::<BigEndian>(self.nonce); buf.put(self.method.to_bytes()); buf.put(self.cb.to_bytes()?); Ok(YSHA512::hash(&buf.to_vec())) } pub fn to_bytes(&self) -> YHResult<Vec<u8>> { self.check()?; let mut buf = BytesMut::new(); buf.put(self.id.to_bytes()); buf.put(&self.version.to_bytes()?[..]); buf.put(&self.time.to_bytes()[..]); buf.put_u32::<BigEndian>(self.nonce); buf.put(self.method.to_bytes()); buf.put(self.cb.to_bytes()?); Ok(buf.to_vec()) } pub fn from_bytes(buf: &[u8]) -> YHResult<YGetCbRes> { if buf.len() < 192 { return Err(YHErrorKind::InvalidLength.into()); } let mut b = BytesMut::new(); b.extend_from_slice(buf); let id = YDigest64::from_bytes(b.get(0..64).unwrap())?; let version = YVersion::from_bytes(b.get(64..76).unwrap())?; let time = YTime::from_bytes(b.get(76..84).unwrap())?; let nonce = BigEndian::read_u32(b.get(84..88).unwrap()); let method = BigEndian::read_u32(b.get(88..92).unwrap()).into(); let cb = YCoinbase::from_bytes(b.get(92..).unwrap())?; let get_cb_res = YGetCbRes { id: id, version: version, time: time, nonce: nonce, method: method, cb: cb, }; get_cb_res.check()?; Ok(get_cb_res) } }
use libyobicash::errors::YErrorKind as LibErrorKind; use libyobicash::utils::random::*; use libyobicash::utils::time::*; use libyobicash::utils::version::*; use libyobicash::crypto::hash::digest::YDigest64; use libyobicash::crypto::hash::sha::YSHA512; use libyobicash::coinbase::YCoinbase; use bytes::{BytesMut, BufMut, BigEndian, ByteOrder}; use network::rpc_method::YRPCMethod; use version::*; use errors::*; #[derive(Clone, Eq, PartialEq, Debug, Default, Serialize, Deserialize)] pub struct YGetCbReq { pub id: YDigest64, pub version: YVersion, pub time: YTime, pub nonce: u32, pub method: YRPCMethod, pub cb_id: YDigest64, } impl YGetCbReq { pub fn new(cb_id: YDigest64) -> YHResult<YGetCbReq> { let mut req = YGetCbReq { id: YDigest64::default(), version: default_version(), time: YTime::now(), nonce: YRandom::u32(), method: YRPCMethod::GetCb, cb_id: cb_id, }; req.id = req.calc_id()?; Ok(req) } pub fn check(&self) -> YHResult<()> { if self.id != self.calc_id()? { return Err(YHErrorKind::Lib(LibErrorKind::InvalidChecksum).into()); } if self.version.major() > default_version().major() { return Err(YHErrorKind::Lib(LibErrorKind::InvalidVersion(self.version.to_string())).into()); } if self.time > YTime::now() { return Err(YHErrorKind::Lib(LibErrorKind::InvalidTime).into()); } if self.method != YRPCMethod::GetCb { return Err(YHErrorKind::InvalidRPCMethod.into()); } Ok(()) } pub fn calc_id(&self) -> YHResult<YDigest64> { let mut buf = BytesMut::new(); buf.put(&self.version.to_bytes()?[..]); buf.put(&self.time.to_bytes()[..]); buf.put_u32::<BigEndian>(self.nonce); buf.put(self.method.to_bytes()); buf.put(self.cb_id.to_bytes()); Ok(YSHA512::hash(&buf.to_vec())) } pub fn to_bytes(&self) -> YHResult<Vec<u8>> { self.check()?; let mut buf = BytesMut::new(); buf.put(self.id.to_bytes()); buf.put(&self.version.to_bytes()?[..]); buf.put(&self.time.to_bytes()[..]); buf.put_u32::<BigEndian>(self.nonce); buf.put(self.method.to_bytes()); buf.put(self.cb_id.to_bytes()); Ok(buf.to_vec()) } pub fn from_bytes(buf: &[u8]) -> YHResult<YGetCbReq> { if buf.len() != 156 { return Err(YHErrorKind::InvalidLength.into()); } let mut b = BytesMut::new(); b.extend_from_slice(buf); let id = YDigest64::from_bytes(b.get(0..64).unwrap())?; let version = YVersion::from_bytes(b.get(64..76).unwrap())?; let time = YTime::from_bytes(b.get(76..84).unwrap())?; let nonce = BigEndian::read_u32(b.get(84..88).unwrap()); let method = BigEndian::read_u32(b.get(88..92).unwrap()).into(); let cb_id = YDigest64::from_bytes(b.get(92..156).unwrap())?; let get_cb_req = YGetCbReq { id: id, version: version, time: time, nonce: nonce, method: method, cb_id: cb_id, }; get_cb_req.check()?; Ok(get_cb_req) } } #[derive(Clone, Eq, PartialEq, Default, Debug, Serialize, Deserialize)] pub struct YGetCbRes { pub id: YDigest64, pub version: YVersion, pub time: YTime, pub nonce: u32, pub method: YRPCMethod, pub cb: YCoinbase, } impl YGetCbRes { pub fn new(cb: &YCoinbase) -> YHResult<YGetCbRes> { let mut res = YGetCbRes { id: YDigest64::default(), version: default_version(), time: YTime::now(), nonce: YRandom::u32(), method: YRPCMethod::GetCb, cb: cb.clone(), }; res.id = res.calc_id()?; Ok(res) } pub fn check(&self) -> YHResult<()> { if self.id != self.calc_id()? { return Err(YHErrorKind::Lib(LibErrorKind::InvalidChecksum).into()); } if self.version.major() > default_version().major() { return Err(YHErrorKind::Lib(LibErrorKind::InvalidVersion(self.version.to_string())).into()); } if self.time > YTime::now() { return Err(YHErrorKind::Lib(LibErrorKind::InvalidTime).into()); } if self.method != YRPCMethod::GetCb { return Err(YHErrorKind::InvalidRPCMethod.into()); } self.cb.check()?; Ok(()) } pub fn
BigEndian::read_u32(b.get(88..92).unwrap()).into(); let cb = YCoinbase::from_bytes(b.get(92..).unwrap())?; let get_cb_res = YGetCbRes { id: id, version: version, time: time, nonce: nonce, method: method, cb: cb, }; get_cb_res.check()?; Ok(get_cb_res) } }
calc_id(&self) -> YHResult<YDigest64> { let mut buf = BytesMut::new(); buf.put(&self.version.to_bytes()?[..]); buf.put(&self.time.to_bytes()[..]); buf.put_u32::<BigEndian>(self.nonce); buf.put(self.method.to_bytes()); buf.put(self.cb.to_bytes()?); Ok(YSHA512::hash(&buf.to_vec())) } pub fn to_bytes(&self) -> YHResult<Vec<u8>> { self.check()?; let mut buf = BytesMut::new(); buf.put(self.id.to_bytes()); buf.put(&self.version.to_bytes()?[..]); buf.put(&self.time.to_bytes()[..]); buf.put_u32::<BigEndian>(self.nonce); buf.put(self.method.to_bytes()); buf.put(self.cb.to_bytes()?); Ok(buf.to_vec()) } pub fn from_bytes(buf: &[u8]) -> YHResult<YGetCbRes> { if buf.len() < 192 { return Err(YHErrorKind::InvalidLength.into()); } let mut b = BytesMut::new(); b.extend_from_slice(buf); let id = YDigest64::from_bytes(b.get(0..64).unwrap())?; let version = YVersion::from_bytes(b.get(64..76).unwrap())?; let time = YTime::from_bytes(b.get(76..84).unwrap())?; let nonce = BigEndian::read_u32(b.get(84..88).unwrap()); let method =
random
[ { "content": "pub fn default_version() -> YVersion {\n\n YVersion::from_str(VERSION).unwrap()\n\n}\n", "file_path": "src/version/mod.rs", "rank": 0, "score": 144484.94624858562 }, { "content": "fn main() {\n\n /*\n\n let opt = YNodeOpt::from_args();\n\n println!(\"yobicashd opt: ...
Rust
src/syndication.rs
qezz/rjbot
14ae18305ea36588750b8c34f49c749bd98217c3
use crate::context::Context; use atom_syndication::{Error as AtomError, Feed as AtomFeed}; use bytes::buf::BufExt; use carapax::{methods::SendMessage, types::ParseMode, ExecuteError}; use reqwest::{Error as HttpError, StatusCode}; use rss::{self, Channel as RssChannel, Error as RssError}; use std::{error::Error, fmt, str::FromStr, time::Duration}; use tokio::time::error::Elapsed; use tokio_postgres::Error as PostgresError; pub struct Syndication { context: Context, } impl Syndication { pub fn new(context: Context) -> Self { Self { context } } async fn get_feeds(&self) -> Result<Vec<Feed>, SyndicationError> { let mut result = Vec::new(); let rows = self .context .pg_client .query( "SELECT id, url, kind, last_entry FROM feeds WHERE extract(epoch from (now() - last_update)) >= timeout OR last_update IS NULL", &[], ) .await .map_err(SyndicationError::GetFeeds)?; for row in rows { let id: i32 = row.get(0); let url: String = row.get(1); let kind: String = row.get(2); let last_entry: Option<String> = row.get(3); result.push(Feed { id, url, kind: kind.parse()?, last_entry, }) } Ok(result) } async fn get_last_entries( &self, url: &str, kind: FeedKind, last_id: Option<String>, ) -> Result<Vec<Post>, SyndicationError> { let rep = self.context.http_client.get(url).send().await?; let status = rep.status(); if !status.is_success() { return Err(SyndicationError::BadStatus(status)); } let data = rep.bytes().await?; let new_entries = match kind { FeedKind::Rss => { let channel = RssChannel::read_from(data.reader())?; let items = channel.into_items(); if items.is_empty() { vec![] } else { let pos = items.iter().position(|e| match (e.guid(), last_id.clone()) { (Some(guid), Some(last_id)) => guid.value() == last_id, _ => false, }); if let Some(pos) = pos { items[..pos].iter().rev().filter_map(Post::try_from_rss).collect() } else { items[..=0].iter().filter_map(Post::try_from_rss).collect() } } } FeedKind::Atom => { let feed = AtomFeed::read_from(data.reader())?; let entries = feed.entries(); if entries.is_empty() { vec![] } else { let pos = entries.iter().position(|e| { if let Some(last_id) = last_id.clone() { return e.id() == last_id; } false }); if let Some(pos) = pos { entries[..pos].iter().rev().filter_map(Post::try_from_atom).collect() } else { entries[..=0].iter().filter_map(Post::try_from_atom).collect() } } } }; Ok(new_entries) } pub async fn run(self) -> Result<(), SyndicationError> { let interval = Duration::from_secs(60); let fetch_timeout = Duration::from_secs(600); loop { for feed in self.get_feeds().await? { let _feed_clone = feed.clone(); let last_entry_future = self.get_last_entries(&feed.url, feed.kind, feed.last_entry); let timeout_result = tokio::time::timeout(fetch_timeout, last_entry_future).await; let result = || -> Result<Vec<Post>, SyndicationError> { let new_entries = timeout_result??; Ok(new_entries) }; match result() { Ok(ref entries) => { for entry in entries { let formatted_entry = format!( r#"<a href="{}">{}</a>"#, entry.link, ParseMode::Html.escape(entry.title.clone()), ); self.context .api .execute( SendMessage::new(self.context.config.chat_id, formatted_entry) .parse_mode(ParseMode::Html), ) .await .map_err(SyndicationError::SendMessage)?; self.context .pg_client .execute("UPDATE feeds SET last_entry = $2 WHERE id = $1", &[&feed.id, &entry.id]) .await .map_err(SyndicationError::UpdateFeed)?; } self.context .pg_client .execute("UPDATE feeds SET last_update = now() WHERE id = $1", &[&feed.id]) .await .map_err(SyndicationError::UpdateFeed)?; } Err(err) => { log::error!("inner syndication error: {}, ({:?})", err, err); } } } tokio::time::sleep(interval).await } } } #[derive(Clone, Debug)] struct Feed { id: i32, url: String, kind: FeedKind, last_entry: Option<String>, } #[derive(Clone, Debug)] enum FeedKind { Atom, Rss, } impl FromStr for FeedKind { type Err = SyndicationError; fn from_str(raw: &str) -> Result<Self, Self::Err> { Ok(match raw { "atom" => FeedKind::Atom, "rss" => FeedKind::Rss, _ => return Err(SyndicationError::UnknownFeedKind(String::from(raw))), }) } } #[derive(Debug)] pub enum SyndicationError { Atom(AtomError), BadStatus(StatusCode), GetFeeds(PostgresError), HttpRequest(HttpError), Rss(RssError), SendMessage(ExecuteError), Timeout(Elapsed), UpdateFeed(PostgresError), UnknownFeedKind(String), } impl From<AtomError> for SyndicationError { fn from(err: AtomError) -> Self { SyndicationError::Atom(err) } } impl From<HttpError> for SyndicationError { fn from(err: HttpError) -> Self { SyndicationError::HttpRequest(err) } } impl From<RssError> for SyndicationError { fn from(err: RssError) -> Self { SyndicationError::Rss(err) } } impl From<Elapsed> for SyndicationError { fn from(err: Elapsed) -> Self { SyndicationError::Timeout(err) } } impl Error for SyndicationError { fn source(&self) -> Option<&(dyn Error + 'static)> { match self { SyndicationError::GetFeeds(err) => Some(err), SyndicationError::HttpRequest(err) => Some(err), SyndicationError::Rss(err) => Some(err), SyndicationError::SendMessage(err) => Some(err), SyndicationError::UpdateFeed(err) => Some(err), _ => None, } } } impl fmt::Display for SyndicationError { fn fmt(&self, out: &mut fmt::Formatter) -> fmt::Result { match self { SyndicationError::Atom(err) => write!(out, "failed to parse atom feed: {}", err), SyndicationError::BadStatus(status) => write!(out, "server repsond with {} status code", status), SyndicationError::GetFeeds(err) => write!(out, "failed to get feeds: {}", err), SyndicationError::HttpRequest(err) => write!(out, "http request error: {}", err), SyndicationError::Rss(err) => write!(out, "failed to parse RSS: {}", err), SyndicationError::SendMessage(err) => write!(out, "failed to send message: {}", err), SyndicationError::Timeout(elapsed) => write!(out, "timeout elapsed: {}", elapsed), SyndicationError::UpdateFeed(err) => write!(out, "failed to update feed: {}", err), SyndicationError::UnknownFeedKind(kind) => write!(out, "unknown feed kind: {}", kind), } } } #[derive(Debug)] pub struct Post { link: String, title: String, id: Option<String>, } impl Post { pub fn try_from_rss(item: &rss::Item) -> Option<Self> { match (item.title(), item.link()) { (Some(title), Some(link)) => Some(Post { title: title.into(), link: link.into(), id: item.guid().map(|x| x.value().to_string()), }), _ => None, } } pub fn try_from_atom(entry: &atom_syndication::Entry) -> Option<Self> { let links = entry.links(); if links.is_empty() { None } else { let link = &links[0]; let title = link.title().unwrap_or_else(|| entry.title()); Some(Post { title: title.into(), link: link.href().into(), id: Some(entry.id().to_string()), }) } } }
use crate::context::Context; use atom_syndication::{Error as AtomError, Feed as AtomFeed}; use bytes::buf::BufExt; use carapax::{methods::SendMessage, types::ParseMode, ExecuteError}; use reqwest::{Error as HttpError, StatusCode}; use rss::{self, Channel as RssChannel, Error as RssError}; use std::{error::Error, fmt, str::FromStr, time::Duration}; use tokio::time::error::Elapsed; use tokio_postgres::Error as PostgresError; pub struct Syndication { context: Context, } impl Syndication { pub fn new(context: Context) -> Self { Self { context } } async fn get_feeds(&self) -> Result<Vec<Feed>, SyndicationError> { let mut result = Vec::new(); let rows = self .context .pg_client .query( "SELECT id, url, kind, last_entry FROM feeds WHERE extract(epoch from (now() - last_update)) >= timeout OR last_update IS NULL", &[], ) .await .map_err(SyndicationError::GetFeeds)?; for row in rows { let id: i32 = row.get(0); let url: String = row.get(1); let kind: String = row.get(2); let last_entry: Option<String> = row.get(3); result.push(Feed { id, url, kind: kind.parse()?, last_entry, }) } Ok(result) } async fn get_last_entries( &self, url: &str, kind: FeedKind, last_id: Option<String>, ) -> Result<Vec<Post>, SyndicationError> { let rep = self.context.http_client.get(url).send().await?; let status = rep.status(); if !status.is_success() { return Err(SyndicationError::BadStatus(status)); } let data = rep.bytes().await?; let new_entries = match kind { FeedKind::Rss => { let channel = RssChannel::read_from(data.reader())?; let items = channel.into_items(); if items.is_empty() { vec![] } else { let pos = items.iter().position(|e| match (e.guid(), last_id.clone()) { (Some(guid), Some(last_id)) => guid.value() == last_id, _ => false, }); if let Some(pos) = pos { items[..pos].iter().rev().filter_map(Post::try_from_rss).collect() } else { items[..=0].iter().filter_map(Post::try_from_rss).collect() } } } FeedKind::Atom => { let feed = AtomFeed::read_from(data.reader())?; let entries = feed.entries(); if entries.is_empty() { vec![] } else { let pos = entries.iter().position(|e| { if let Some(last_id) = last_id.clone() { return e.id() == last_id; } false }); if let Some(pos) = pos { entries[..pos].iter().rev().filter_map(Post::try_from_atom).collect() } else { entries[..=0].iter().filter_map(Post::try_from_atom).collect() } } } }; Ok(new_entries) } pub async fn run(self) -> Result<(), SyndicationError> { let interval = Duration::from_secs(60); let fetch_timeout = Duration::from_secs(600); loop { for feed in self.get_feeds().await? { let _feed_clone = feed.clone(); let last_entry_future = self.get_last_entries(&feed.url, feed.kind, feed.last_entry); let timeout_result = tokio::time::timeout(fetch_timeout, last_entry_future).await; let result = || -> Result<Vec<Post>, SyndicationError> { let new_entries = timeout_result??; Ok(new_entries) }; match result() { Ok(ref entries) => { for entry in entries { let formatted_entry = format!( r#"<a href="{}">{}</a>"#, entry.link, ParseMode::Html.escape(entry.title.clone()), ); self.context .api .execute( SendMessage::new(self.context.config.chat_id, formatted_entry) .parse_mode(ParseMode::Html), ) .await .map_err(SyndicationError::SendMessage)?; self.context .pg_client .execute("UPDATE feeds SET last_entry = $2 WHERE id = $1", &[&feed.id, &entry.id]) .await .map_err(SyndicationError::UpdateFeed)?; } self.context .pg_client .execute("UPDATE feeds SET last_update = now() WHERE id = $1", &[&feed.id]) .await .map_err(SyndicationError::UpdateFeed)?; } Err(err) => { log::error!("inner syndication error: {}, ({:?})", err, err); } } } tokio::time::sleep(interval).await } } } #[derive(Clone, Debug)] struct Feed { id: i32, url: String, kind: FeedKind, last_entry: Option<String>, } #[derive(Clone, Debug)] enum FeedKind { Atom, Rss, } impl FromStr for FeedKind { type Err = SyndicationError; fn from_str(raw: &str) -> Result<Self, Self::Err> { Ok(match raw { "atom" => FeedKind::Atom, "rss" => FeedKind::Rss, _ => return Err(SyndicationError::UnknownFeedKind(String::from(raw))), }) } } #[derive(Debug)] pub enum SyndicationError { Atom(AtomError), BadStatus(StatusCode), GetFeeds(PostgresError), HttpRequest(HttpError), Rss(RssError), SendMessage(ExecuteError), Timeout(Elapsed), UpdateFeed(PostgresError), UnknownFeedKind(String), } impl From<AtomError> for SyndicationError { fn from(err: AtomError) -> Self { SyndicationError::Atom(err) } } impl From<HttpError> for SyndicationError { fn from(err: HttpError) -> Self { SyndicationError::HttpRequest(err) } } impl From<RssError> for SyndicationError { fn from(err: RssError) -> Self { SyndicationError::Rss(err) } } impl From<Elapsed> for SyndicationError { fn from(err: Elapsed) -> Self { SyndicationError::Timeout(err) } } impl Error for SyndicationError { fn source(&self) -> Option<&(dyn Error + 'static)> { match self { SyndicationError::GetFeeds(err) => Some(err), SyndicationError::HttpRequest(err) => Some(err), SyndicationError::Rss(err) => Some(err), SyndicationError::SendMessage(err) => Some(err), SyndicationError::UpdateFeed(err) => Some(err), _ => None, } } } impl fmt::Display for SyndicationError { fn fmt(&self, out: &mut fmt::Formatter) -> fmt::Result { match self { SyndicationError::Atom(err) => write!(out, "failed to parse atom feed: {}", err), SyndicationError::BadStatus(status) => write!(out, "server repsond with {} status code", status), SyndicationError::GetFeeds(err) => write!(out, "failed to get feeds: {}", err), SyndicationError::HttpRequest(err) => write!(out, "http request error: {}", err), SyndicationError::Rss(err) => write!(out, "failed to parse RSS: {}", err), SyndicationError::SendMessage(err) => write!(out, "failed to send message: {}", err), SyndicationError::Timeout(elapsed) => write!(out, "timeout elapsed: {}", elapsed), SyndicationError::UpdateFeed(err) => write!(out, "failed to update feed: {}", err), SyndicationError::UnknownFeedKind(kind) => write!(out, "unknown feed kind: {}", kind), } } } #[derive(Debug)] pub struct Post { link: String, title: String, id: Option<String>, } impl Post { pub fn try_from_rss(item: &rss::Item) -> Option<Self> { match (item.title(), item.link()) { (Some(title), Some(link)) => Some(Post { title: title.into(), link: link.into(), id: item.guid().map(|x| x.value().to_string()), }), _ => None, } } pub fn try_from_atom(entry: &atom_syndication::Entry) -> Option<Self> { let links = entry.links(); if links.is_empty() { None } else { let link = &links[0];
}
let title = link.title().unwrap_or_else(|| entry.title()); Some(Post { title: title.into(), link: link.href().into(), id: Some(entry.id().to_string()), }) } }
function_block-function_prefix_line
[]
Rust
src/utils.rs
matteopolak/stock-display
d03ad470d7ef786f3652dd8ea5ba5523316d0f3a
use colored::{ColoredString, Colorize}; use plotlib::page::Page; use plotlib::repr::Plot; use plotlib::style::{PointMarker, PointStyle}; use plotlib::view::ContinuousView; use reqwest::{header, Client, Error, Response}; use std::collections::VecDeque; use std::io::{self, Write}; use std::str; use std::time::{Duration, SystemTime}; use termsize::{self, Size}; use crate::constants; use crate::structs; pub fn get_input_string(phrase: &str, input_length: usize) -> String { let mut input: String = String::with_capacity(input_length); print!("{}", phrase); io::stdout().flush().ok(); io::stdin() .read_line(&mut input) .expect("Could not read from stdin"); if input.ends_with('\n') { input.pop(); if input.ends_with('\r') { input.pop(); } } input } pub fn pretty_print_data( ticker: &str, points: &VecDeque<(f64, f64)>, current_price: f64, last_price: f64, average_price: f64, width: u32, height: u32, index: u32, (mtd, qtd, ytd): (f64, f64, f64), ) -> () { let plot: Plot = Plot::new(Vec::from_iter(points.clone().into_iter())).point_style( PointStyle::new() .marker(PointMarker::Circle) .colour("#DD3355"), ); let view: ContinuousView = ContinuousView::new().add(plot).x_range( (if index <= width { 0 } else { index - width }) as f64, width as f64, ); println!( "{}", Page::single(&view) .dimensions(width, height) .to_text() .unwrap() ); println!( " {} | Price: {} | Last: {} | Average: {} | Change: {} | MTD: {} | QTD: {} | YTD: {}", ticker.cyan(), round_and_whiten(current_price), round_and_whiten(last_price), diff_without_sign(average_price, current_price), diff_with_sign(last_price, current_price), diff_with_sign_percent(mtd, current_price), diff_with_sign_percent(qtd, current_price), diff_with_sign_percent(ytd, current_price), ); } pub fn round_and_whiten(num: f64) -> ColoredString { format!("${:.2}", num).white() } pub fn diff_with_sign(old: f64, new: f64) -> ColoredString { let diff = new - old; let greater = diff >= 0.; let string = format!("{}${:.2}", if greater { '+' } else { '-' }, diff.abs()); if greater { string.green() } else { string.red() } } pub fn diff_without_sign(old: f64, new: f64) -> ColoredString { let diff = new - old; let string = format!("${:.2}", old); if diff >= 0. { string.green() } else { string.red() } } pub fn diff_with_sign_percent(old: f64, new: f64) -> ColoredString { let diff = new - old; let string = format!("{:+.2}%", diff / old * 100.); if diff >= 0. { string.green() } else { string.red() } } pub fn sleep(s: u64) -> tokio::time::Sleep { tokio::time::sleep(tokio::time::Duration::from_secs(s)) } pub fn terminal_size() -> (u32, u32) { let Size { cols: x, rows: y } = termsize::get().unwrap(); (x as u32 - 15, y as u32 - 6) } pub async fn stock_price(uri: &str, client: &Client) -> Option<f64> { let request: Result<Response, Error> = client .get(uri) .header(header::ACCEPT_LANGUAGE, "en-US;q=0.9") .header(header::ACCEPT_ENCODING, "text") .header(header::USER_AGENT, constants::USER_AGENT_HEADER) .send() .await; if let Ok(response) = request { let json: structs::NasdaqDataWrap = match response.json::<structs::NasdaqDataWrap>().await { Ok(j) => j, Err(_) => return None, }; let raw: Vec<u8> = json .data .primaryData .lastSalePrice .into_bytes() .into_iter() .skip(1) .collect::<Vec<u8>>(); let price: f64 = str::from_utf8(&raw).unwrap().parse::<f64>().unwrap(); return Some(price); } None } pub async fn is_valid_ticker(ticker: &str, client: &Client) -> bool { if !(1..5).contains(&ticker.len()) { return false; } let uri: String = constants::NASDAQ_API_ENDPOINT.replace("{ticker}", ticker); let request: Result<Response, Error> = client .get(uri) .header(header::ACCEPT_LANGUAGE, "en-US;q=0.9") .header(header::ACCEPT_ENCODING, "text") .header(header::USER_AGENT, constants::USER_AGENT_HEADER) .send() .await; if let Ok(response) = request { let status = match response.json::<structs::NasdaqStatusWrap>().await { Ok(j) => j.status.rCode, Err(_) => return false, }; return status == 200; } false } pub async fn ticker_history(ticker: &str, client: &Client) -> Option<(f64, f64, f64)> { let year = current_year(); let uri = constants::MARKETSTACK_API_ENDPOINT .replace("{ticker}", ticker) .replace("{start}", &format!("{}-01-01", year)) .replace("{end}", &format!("{}-12-31", year)); let request: Result<Response, Error> = client.get(uri).send().await; if let Ok(response) = request { let days = match response.json::<structs::NameStackDataWrap>().await { Ok(j) => j.data, Err(_) => return None, }; let length = days.len(); let last = match days.get(length - 1) { Some(d) => d, None => return None, }; let mtd = days.get(29).unwrap_or(last).open; let qtd = days.get(90).unwrap_or(last).open; let ytd = days.get(364).unwrap_or(last).open; return Some((mtd, qtd, ytd)); } None } pub fn current_year() -> u64 { let now: Duration = SystemTime::now() .duration_since(SystemTime::UNIX_EPOCH) .expect("We must be in the past..."); 1970 + now.as_secs() / 31536000 }
use colored::{ColoredString, Colorize}; use plotlib::page::Page; use plotlib::repr::Plot; use plotlib::style::{PointMarker, PointStyle}; use plotlib::view::ContinuousView; use reqwest::{header, Client, Error, Response}; use std::collections::VecDeque; use std::io::{self, Write}; use std::str; use std::time::{Duration, SystemTime}; use termsize::{self, Size}; use crate::constants; use crate::structs; pub fn get_input_string(phrase: &str, input_length: usize) -> String { let mut input: String = String::with_capacity(input_length); print!("{}", phrase); io::stdout().flush().ok(); io::stdin() .read_line(&mut input) .expect("Could not read from stdin"); if input.ends_with('\n') { input.pop(); if input.ends_with('\r') { input.pop(); } } input } pub fn pretty_print_data( ticker: &str, points: &VecDeque<(f64, f64)>, current_price: f64, last_price: f64, average_price: f64, width: u32, height: u32, index: u32, (mtd, qtd, ytd): (f64, f64, f64), ) -> () { let plot: Plot = Plot::new(Vec::from_iter(points.clone().into_iter())).point_style( PointStyle::new() .marker(PointMarker::Circle) .colour("#DD3355"), ); let view: ContinuousView = ContinuousView::new().add(plot).x_range( (if index <= width { 0 } else { index - width }) as f64, width as f64, ); println!( "{}", Page::single(&view) .dimensions(width, height) .to_text() .unwrap() ); println!( " {} | Price: {} | Last: {} | Average: {} | Change: {} | MTD: {} | QTD: {} | YTD: {}", ticker.cyan(), round_and_whiten(current_price), round_and_whiten(last_price), diff_without_sign(average_price, current_price), diff_with_sign(last_price, current_price), diff_with_sign_percent(mtd, current_price), diff_with_sign_percent(qtd, current_price), diff_with_sign_percent(ytd, current_price), ); } pub fn round_and_whiten(num: f64) -> ColoredString { format!("${:.2}", num).white() } pub fn diff_with_sign(old: f64, new: f64) -> ColoredString { let diff = new - old; let greater = diff >= 0.; let string = format!("{}${:.2}", if greater { '+' } else { '-' }, diff.abs()); if greater { string.green() } else { string.red() } } pub fn diff_without_sign(old: f64, new: f64) -> ColoredString { let diff = new - old; let string = format!("${:.2}", old); if diff >= 0. { string.green() } else { string.red() } } pub fn diff_with_sign_percent(old: f64, new: f64) -> ColoredString { let diff = new - old; let string = format!("{:+.2}%", diff / old * 100.); if diff >= 0. { string.green() } else { string.red() } } pub fn sleep(s: u64) -> tokio::time::Sleep { tokio::time::sleep(tokio::time::Duration::from_secs(s)) } pub fn terminal_size() -> (u32, u32) { let Size { cols: x, rows: y } = termsize::get().unwrap(); (x as u32 - 15, y as u32 - 6) } pub async fn stock_price(uri: &str, client: &Client) -> Option<f64> { let request: Result<Response, Error> = client .get(uri) .header(header::ACCEPT_LANGUAGE, "en-US;q=0.9") .header(header::ACCEPT_ENCODING, "text") .header(header::USER_AGENT, constants::USER_AGENT_HEADER) .send() .await; if let Ok(response) = request { let json: structs::NasdaqDataWrap = match response.json::<structs::NasdaqDataWrap>().await { Ok(j) => j, Err(_) => return None, }; let raw: Vec<u8> =
; } let uri: String = constants::NASDAQ_API_ENDPOINT.replace("{ticker}", ticker); let request: Result<Response, Error> = client .get(uri) .header(header::ACCEPT_LANGUAGE, "en-US;q=0.9") .header(header::ACCEPT_ENCODING, "text") .header(header::USER_AGENT, constants::USER_AGENT_HEADER) .send() .await; if let Ok(response) = request { let status = match response.json::<structs::NasdaqStatusWrap>().await { Ok(j) => j.status.rCode, Err(_) => return false, }; return status == 200; } false } pub async fn ticker_history(ticker: &str, client: &Client) -> Option<(f64, f64, f64)> { let year = current_year(); let uri = constants::MARKETSTACK_API_ENDPOINT .replace("{ticker}", ticker) .replace("{start}", &format!("{}-01-01", year)) .replace("{end}", &format!("{}-12-31", year)); let request: Result<Response, Error> = client.get(uri).send().await; if let Ok(response) = request { let days = match response.json::<structs::NameStackDataWrap>().await { Ok(j) => j.data, Err(_) => return None, }; let length = days.len(); let last = match days.get(length - 1) { Some(d) => d, None => return None, }; let mtd = days.get(29).unwrap_or(last).open; let qtd = days.get(90).unwrap_or(last).open; let ytd = days.get(364).unwrap_or(last).open; return Some((mtd, qtd, ytd)); } None } pub fn current_year() -> u64 { let now: Duration = SystemTime::now() .duration_since(SystemTime::UNIX_EPOCH) .expect("We must be in the past..."); 1970 + now.as_secs() / 31536000 }
json .data .primaryData .lastSalePrice .into_bytes() .into_iter() .skip(1) .collect::<Vec<u8>>(); let price: f64 = str::from_utf8(&raw).unwrap().parse::<f64>().unwrap(); return Some(price); } None } pub async fn is_valid_ticker(ticker: &str, client: &Client) -> bool { if !(1..5).contains(&ticker.len()) { return false
random
[ { "content": "\t// create a counter\n\n\tlet mut i: u32 = 1;\n\n\n\n\t// create some utility variables for metrics\n\n\tlet mut first: bool = true;\n\n\tlet mut last_price: f64 = 0.;\n\n\tlet mut total_price: f64 = 0.;\n\n\n\n\tlet history = match utils::ticker_history(&ticker, &client).await {\n\n\t\tSome(h) =...
Rust
src/bin/nydus-image/main.rs
cloudaice/image-service
7c982b2db3282d2616d37134cef8dd9ec2df426d
#[macro_use(crate_version, crate_authors)] extern crate clap; extern crate stderrlog; mod builder; mod node; mod stargz; mod tree; mod validator; #[macro_use] extern crate log; extern crate serde; const BLOB_ID_MAXIMUM_LENGTH: usize = 1024; use clap::{App, Arg, SubCommand}; use vmm_sys_util::tempfile::TempFile; use std::collections::BTreeMap; use std::fs::metadata; use std::fs::rename; use std::io::{self, Result, Write}; use std::path::{Path, PathBuf}; use std::sync::Arc; use builder::SourceType; use nydus_utils::einval; use nydus_utils::log_level_to_verbosity; use rafs::metadata::digest; use rafs::storage::{backend, compress, factory}; use validator::Validator; fn upload_blob( backend: Arc<dyn backend::BlobBackendUploader>, blob_id: &str, blob_path: &Path, ) -> Result<()> { backend .upload(blob_id, blob_path, |(current, total)| { io::stdout().flush().unwrap(); print!("\r"); print!( "Backend blob uploading: {}/{} bytes ({}%)", current, total, current * 100 / total, ); }) .map_err(|e| { error!("upload_blob backend.upload {:?}", e); e })?; print!("\r"); io::stdout().flush().unwrap(); Ok(()) } fn get_readahead_files(source: &Path) -> Result<BTreeMap<PathBuf, Option<u64>>> { let stdin = io::stdin(); let mut files = BTreeMap::new(); let source_path = source.canonicalize().unwrap(); loop { let mut file = String::new(); let ret = stdin.read_line(&mut file); match ret { Ok(size) => { if size == 0 { break; } let file_name = file.trim(); if file_name.is_empty() { continue; } let path = Path::new(file_name); if !path.exists() { warn!("{} does not exist, ignore it!", path.to_str().unwrap()); continue; } let canonicalized_name; match path.canonicalize() { Ok(p) => { if !p.starts_with(&source_path) { continue; } canonicalized_name = p; } Err(_) => continue, } let file_name_trimmed = Path::new("/").join( canonicalized_name .strip_prefix(&source_path) .unwrap() .to_path_buf(), ); debug!( "readahead file: {}, trimmed file name {}", file_name, file_name_trimmed.to_str().unwrap() ); files.insert(file_name_trimmed, None); } Err(err) => { error!("Failed to parse readahead files: {}", err); } } } Ok(files) } fn main() -> Result<()> { let cmd = App::new("nydus image builder") .version(crate_version!()) .author(crate_authors!()) .about("Build image using nydus format.") .subcommand( SubCommand::with_name("create") .about("dump image bootstrap and upload blob to storage backend") .arg( Arg::with_name("SOURCE") .help("source directory") .required(true) .index(1), ) .arg( Arg::with_name("source-type") .long("source-type") .help("source type") .takes_value(true) .default_value("directory") .possible_values(&["directory", "stargz_index"]) ) .arg( Arg::with_name("blob") .long("blob") .help("blob file path") .takes_value(true), ) .arg( Arg::with_name("bootstrap") .long("bootstrap") .help("bootstrap file path (required)") .takes_value(true), ) .arg( Arg::with_name("blob-id") .long("blob-id") .help("blob id (as object id in backend)") .takes_value(true), ) .arg( Arg::with_name("compressor") .long("compressor") .help("how blob will be compressed: none, lz4_block (default)") .takes_value(true) .required(false) .default_value("lz4_block"), ) .arg( Arg::with_name("digester") .long("digester") .help("how inode and blob chunk will be digested: blake3 (default), sha256") .takes_value(true) .required(false) .default_value("blake3"), ) .arg( Arg::with_name("parent-bootstrap") .long("parent-bootstrap") .help("bootstrap file path of parent (optional)") .takes_value(true) .required(false), ) .arg( Arg::with_name("backend-type") .long("backend-type") .help("blob storage backend type (enable backend upload if specified)") .takes_value(true), ) .arg( Arg::with_name("backend-config") .long("backend-config") .help("blob storage backend config (json)") .takes_value(true), ) .arg( Arg::with_name("prefetch-policy") .long("prefetch-policy") .help("Prefetch policy: fs(issued from Fs layer), blob(issued from backend/blob layer), none(no readahead is needed)") .takes_value(true) .required(false) .default_value("none"), ) .arg( Arg::with_name("repeatable") .long("repeatable") .help("Produce environment independent image") .takes_value(false) .required(false), ) .arg( Arg::with_name("disable-check") .long("disable-check") .help("Disable to validate bootstrap file after building") .takes_value(false) .required(false) ) ) .subcommand( SubCommand::with_name("check") .about("validate image bootstrap") .arg( Arg::with_name("bootstrap") .long("bootstrap") .help("bootstrap file path (required)") .takes_value(true), ) ) .arg( Arg::with_name("log-level") .long("log-level") .default_value("info") .help("Specify log level: trace, debug, info, warn, error") .takes_value(true) .required(false) .global(true), ) .get_matches(); let v = cmd .value_of("log-level") .unwrap() .parse() .unwrap_or(log::LevelFilter::Warn); stderrlog::new() .quiet(false) .verbosity(log_level_to_verbosity(v)) .timestamp(stderrlog::Timestamp::Second) .init() .unwrap(); if let Some(matches) = cmd.subcommand_matches("create") { let source_path = Path::new(matches.value_of("SOURCE").expect("SOURCE is required")); let source_type: SourceType = matches .value_of("source-type") .expect("source-type is required") .parse()?; let source_file = metadata(source_path) .map_err(|e| einval!(format!("failed to get source path {:?}", e)))?; let mut blob_id = String::new(); if let Some(p_blob_id) = matches.value_of("blob-id") { blob_id = String::from(p_blob_id); if blob_id.len() > BLOB_ID_MAXIMUM_LENGTH { return Err(einval!(format!( "blob id is limited to length {}", BLOB_ID_MAXIMUM_LENGTH ))); } } let mut compressor = matches.value_of("compressor").unwrap_or_default().parse()?; let mut digester = matches.value_of("digester").unwrap_or_default().parse()?; let repeatable = matches.is_present("repeatable"); match source_type { SourceType::Directory => { if !source_file.is_dir() { return Err(einval!("source must be a directory")); } } SourceType::StargzIndex => { if !source_file.is_file() { return Err(einval!("source must be a JSON file")); } if blob_id.trim() == "" { return Err(einval!("blob-id can't be empty")); } if compressor != compress::Algorithm::GZip { trace!("compressor set to {}", compress::Algorithm::GZip); } compressor = compress::Algorithm::GZip; if digester != digest::Algorithm::Sha256 { trace!("digester set to {}", digest::Algorithm::Sha256); } digester = digest::Algorithm::Sha256; } } let bootstrap_path = Path::new( matches .value_of("bootstrap") .expect("bootstrap is required"), ); let temp_file = TempFile::new_with_prefix("").unwrap(); let mut blob_path = matches .value_of("blob") .map(|p| Path::new(p)) .unwrap_or_else(|| temp_file.as_path()); let mut parent_bootstrap = Path::new(""); if let Some(_parent_bootstrap) = matches.value_of("parent-bootstrap") { parent_bootstrap = Path::new(_parent_bootstrap); } let prefetch_policy = matches .value_of("prefetch-policy") .unwrap_or_default() .parse()?; let hint_readahead_files = if prefetch_policy != builder::PrefetchPolicy::None { get_readahead_files(source_path)? } else { BTreeMap::new() }; let mut ib = builder::Builder::new( source_type, source_path, blob_path, bootstrap_path, parent_bootstrap, blob_id, compressor, digester, hint_readahead_files, prefetch_policy, !repeatable, )?; let (blob_ids, blob_size) = ib.build()?; if !matches.is_present("disable-check") { let mut validator = Validator::new(&bootstrap_path)?; match validator.check(false) { Ok(valid) => { if !valid { return Err(einval!("Failed to build bootstrap from source")); } } Err(err) => { return Err(err); } } } if blob_size > 0 { let blob_id = blob_ids.last().unwrap(); let mut uploaded = false; if let Some(backend_type) = matches.value_of("backend-type") { if let Some(backend_config) = matches.value_of("backend-config") { let config = factory::BackendConfig { backend_type: backend_type.to_owned(), backend_config: serde_json::from_str(backend_config).map_err(|e| { error!("failed to parse backend_config json: {}", e); e })?, }; let blob_backend = factory::new_uploader(config).unwrap(); upload_blob(blob_backend, blob_id.as_str(), blob_path)?; uploaded = true; } } if !uploaded && blob_path == temp_file.as_path() { trace!("rename {:?} to {}", blob_path, blob_id); rename(blob_path, blob_id)?; blob_path = Path::new(blob_id); } } if blob_size > 0 { info!( "build finished, blob id: {:?}, blob file: {:?}", blob_ids, blob_path ); } else { info!("build finished, blob id: {:?}", blob_ids); } } if let Some(matches) = cmd.subcommand_matches("check") { let bootstrap_path = Path::new( matches .value_of("bootstrap") .expect("bootstrap is required"), ); let mut validator = Validator::new(bootstrap_path)?; match validator.check(true) { Ok(valid) => { if valid { info!("Bootstrap is valid"); } else { return Err(einval!("Bootstrap is invalid")); } } Err(err) => { return Err(err); } } } Ok(()) }
#[macro_use(crate_version, crate_authors)] extern crate clap; extern crate stderrlog; mod builder; mod node; mod stargz; mod tree; mod validator; #[macro_use] extern crate log; extern crate serde; const BLOB_ID_MAXIMUM_LENGTH: usize = 1024; use clap::{App, Arg, SubCommand}; use vmm_sys_util::tempfile::TempFile; use std::collections::BTreeMap; use std::fs::metadata; use std::fs::rename; use std::io::{self, Result, Write}; use std::path::{Path, PathBuf}; use std::sync::Arc; use builder::SourceType; use nydus_utils::einval; use nydus_utils::log_level_to_verbosity; use rafs::metadata::digest; use rafs::storage::{backend, compress, factory}; use validator::Validator; fn upload_blob( backend: Arc<dyn backend::BlobBackendUploader>, blob_id: &str, blob_path: &Path, ) -> Result<()> { backend .upload(blob_id, blob_path, |(current, total)| { io::stdout().flush().unwrap(); print!("\r"); print!( "Backend blob uploading: {}/{} bytes ({}%)", current, total, current * 100 / total, ); }) .map_err(|e| { error!("upload_blob backend.upload {:?}", e); e })?; print!("\r"); io::stdout().flush().unwrap(); Ok(()) } fn get_readahead_files(source: &Path) -> Result<BTreeMap<PathBuf, Option<u64>>> { let stdin = io::stdin(); let mut files = BTreeMap::new(); let source_path = source.canonicalize().unwrap(); loop { let mut file = String::new(); let ret = stdin.read_line(&mut file); match ret { Ok(size) => { if size == 0 { break; } let file_name = file.trim(); if file_name.is_empty() { continue; } let path = Path::new(file_name); if !path.exists() { warn!("{} does not exist, ignore it!", path.to_str().unwrap()); continue; } let canonicalized_name; match path.canonicalize() { Ok(p) => { if !p.starts_with(&source_path) { continue; } canonicalized_name = p; } Err(_) => continue, } let file_name_trimmed = Path::new("/").join( canonicalized_name .strip_prefix(&source_path) .unwrap() .to_path_buf(), ); debug!( "readahead file: {}, trimmed file name {}", file_name, file_name_trimmed.to_str().unwrap() ); files.insert(file_name_trimmed, None); } Err(err) => { error!("Failed to parse readahead files: {}", err); } } } Ok(files) } fn main() -> Result<()> { let cmd = App::new("nydus image builder") .version(crate_version!()) .author(crate_authors!()) .about("Build image using nydus format.") .subcommand( SubCommand::with_name("create") .about("dump image bootstrap and upload blob to storage backend") .arg( Arg::with_name("SOURCE") .help("source directory") .required(true) .index(1), ) .arg( Arg::with_name("source-type") .long("source-type") .help("source type") .takes_value(true) .default_value("directory") .possible_values(&["directory", "stargz_index"]) ) .arg( Arg::with_name("blob") .long("blob") .help("blob file path") .takes_value(true), ) .arg( Arg::with_name("bootstrap") .long("bootstrap") .help("bootstrap file path (required)") .takes_value(true), ) .arg( Arg::with_name("blob-id") .long("blob-id") .help("blob id (as object id in backend)") .takes_value(true), ) .arg( Arg::with_name("compressor") .long("compressor") .help("how blob will be compressed: none, lz4_block (default)") .takes_value(true) .required(false) .default_value("lz4_block"), ) .arg( Arg::with_name("digester") .long("digester") .help("how inode and blob chunk will be digested: blake3 (default), sha256") .takes_value(true) .required(false) .default_value("blake3"), ) .arg( Arg::with_name("parent-bootstrap") .long("parent-bootstrap") .help("bootstrap file path of parent (optional)") .takes_value(true) .required(false), ) .arg( Arg::with_name("backend-type") .long("backend-type") .help("blob storage backend type (enable backend upload if specified)") .takes_value(true), ) .arg( Arg::with_name("backend-config") .long("backend-config") .help("blob storage backend config (json)") .takes_value(true), ) .arg( Arg::with_name("prefetch-policy") .long("prefetch-policy") .help("Prefetch policy: fs(issued from Fs layer), blob(issued from backend/blob layer), none(no readahead is needed)") .takes_value(true) .required(false) .default_value("none"), ) .arg( Arg::with_name("repeatable") .long("repeatable") .help("Produce environment independent image") .takes_value(false) .required(false), ) .arg( Arg::with_name("disable-check") .long("disable-check") .help("Disable to validate bootstrap file after building") .takes_value(false) .required(false) ) ) .subcommand( SubCommand::with_name("check") .about("validate image bootstrap") .arg( Arg::with_name("bootstrap") .long("bootstrap") .help("bootstrap file path (required)") .takes_value(true), ) ) .arg( Arg::with_name("log-level") .long("log-level") .default_value("info") .help("Specify log level: trace, debug, info, warn, error") .takes_value(true) .required(false) .global(true), ) .get_matches(); let v = cmd .value_of("log-level") .unwrap() .parse() .unwrap_or(log::LevelFilter::Warn); stderrlog::new() .quiet(false) .verbosity(log_level_to_verbosity(v)) .timestamp(stderrlog::Timestamp::Second) .init() .unwrap(); if let Some(matches) = cmd.subcommand_matches("create") { let source_path = Path::new(matches.value_of("SOURCE").expect("SOURCE is required")); let source_type: SourceType = matches .value_of("source-type") .expect("source-type is required") .parse()?; let source_file = metadata(source_path) .map_err(|e| einval!(format!("failed to get source path {:?}", e)))?; let mut blob_id = String::new(); if let Some(p_blob_id) = matches.value_of("blob-id") { blob_id = String::from(p_blob_id); if blob_id.len() > BLOB_ID_MAXIMUM_LENGTH { return Err(einval!(format!( "blob id is limited to length {}", BLOB_ID_MAXIMUM_LENGTH ))); } } let mut compressor = matches.value_of("compressor").unwrap_or_default().parse()?; let mut digester = matches.value_of("digester").unwrap_or_default().parse()?; let repeatable = matches.is_present("repeatable"); match source_type { SourceType::Directory => { if !source_file.is_dir() { return Err(einval!("source must be a directory")); } } SourceType::StargzIndex => { if !source_file.is_file() { return Err(einval!("source must be a JSON file")); } if blob_id.trim() == "" { return Err(einval!("blob-id can't be empty")); } if compressor != compress::Algorithm::GZip { trace!("compressor set to {}", compress::Algorithm::GZip); } compressor = compress::Algorithm::GZip; if digester != digest::Algorithm::Sha256 { trace!("digester set to {}", digest::Algorithm::Sha256); } digester = digest::Algorithm::Sha256; } } let bootstrap_path = Path::new( matches .value_of("bootstrap") .expect("bootstrap is required"), ); let temp_file = TempFile::new_with_prefix("").unwrap(); let mut blob_path = matches .value_of("blob") .map(|p| Path::new(p)) .unwrap_or_else(|| temp_file.as_path()); let mut parent_bootstrap = Path::new(""); if let Some(_parent_bootstrap) = matches.value_of("parent-bootstrap") { parent_bootstrap = Path::new(_parent_bootstrap); } let prefetch_policy = matches .value_of("prefetch-policy") .unwrap_or_default() .parse()?; let hint_readahead_files = if prefetch_policy != builder::PrefetchPolicy::None { get_readahead_files(source_path)? } else { BTreeMap::new() }; let mut ib = builder::Builder::new( source_type, source_path, blob_path, bootstrap_path, parent_bootstrap, blob_id, compressor, digester, hint_readahead_files, prefetch_policy, !repeatable, )?; let (blob_ids, blob_size) = ib.build()?; if !matches.is_present("disable-check") { let mut validator = Validator::new(&bootstrap_path)?; match validator.check(false) { Ok(valid) => { if !valid { return Err(einval!("Failed to build bootstrap from source")); } } Err(err) => { return Err(err); } } } if blob_size > 0 { let blob_id = blob_ids.last().unwrap(); let mut uploaded = false;
if !uploaded && blob_path == temp_file.as_path() { trace!("rename {:?} to {}", blob_path, blob_id); rename(blob_path, blob_id)?; blob_path = Path::new(blob_id); } } if blob_size > 0 { info!( "build finished, blob id: {:?}, blob file: {:?}", blob_ids, blob_path ); } else { info!("build finished, blob id: {:?}", blob_ids); } } if let Some(matches) = cmd.subcommand_matches("check") { let bootstrap_path = Path::new( matches .value_of("bootstrap") .expect("bootstrap is required"), ); let mut validator = Validator::new(bootstrap_path)?; match validator.check(true) { Ok(valid) => { if valid { info!("Bootstrap is valid"); } else { return Err(einval!("Bootstrap is invalid")); } } Err(err) => { return Err(err); } } } Ok(()) }
if let Some(backend_type) = matches.value_of("backend-type") { if let Some(backend_config) = matches.value_of("backend-config") { let config = factory::BackendConfig { backend_type: backend_type.to_owned(), backend_config: serde_json::from_str(backend_config).map_err(|e| { error!("failed to parse backend_config json: {}", e); e })?, }; let blob_backend = factory::new_uploader(config).unwrap(); upload_blob(blob_backend, blob_id.as_str(), blob_path)?; uploaded = true; } }
if_condition
[ { "content": "pub fn new_uploader(mut config: BackendConfig) -> Result<Arc<dyn BlobBackendUploader>> {\n\n // Disable http timeout for upload request\n\n config.backend_config[\"connect_timeout\"] = 0.into();\n\n config.backend_config[\"timeout\"] = 0.into();\n\n match config.backend_type.as_str() {...
Rust
vehicle-information-service/examples/server.rs
buesima/vehicle-information-service
086b9ba38947cc266867c4700e11f4e9ea727810
#[macro_use] extern crate log; extern crate structopt; use actix::prelude::*; use actix_web::{middleware, web, App, HttpResponse, HttpServer}; use futures::prelude::*; use futures_util::compat::Stream01CompatExt; use serde_json::json; use std::net::{IpAddr, Ipv4Addr, SocketAddr}; use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc; use std::time::Duration; use structopt::StructOpt; use tokio_socketcan; use vehicle_information_service::{AppState, KnownError, Router, Set, SignalManager, UpdateSignal}; const PATH_PRIVATE_EXAMPLE_PRINT_SET: &str = "Private.Example.Print.Set"; const PATH_PRIVATE_EXAMPLE_INTERVAL: &str = "Private.Example.Interval"; const PATH_PRIVATE_EXAMPLE_SOCKETCAN_LAST_FRAME_ID: &str = "Private.Example.SocketCan.Last.Frame.Id"; #[derive(StructOpt, Debug, Clone)] #[structopt(name = "Vehicle Information Service Demo")] struct Opt { #[structopt( short = "c", long = "can", default_value = "vcan0", help = "CAN Interface" )] can_interface: String, #[structopt( short = "p", long = "port", default_value = "14430", help = "Websocket Port" )] port: u16, } fn main() { env_logger::init(); let sys = actix::System::new("vis-server-example"); let opt = Opt::from_args(); let socket_addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), opt.port); info!("Starting server"); HttpServer::new(move || { let app_state: AppState = Default::default(); let interval_signal_source = IntervalSignalSource::new(app_state.signal_manager_addr().clone()); interval_signal_source.start(); let can_id_stream = tokio_socketcan::CANSocket::open(&opt.can_interface) .unwrap() .compat() .map_ok(|frame| frame.id()); app_state.spawn_stream_signal_source( PATH_PRIVATE_EXAMPLE_SOCKETCAN_LAST_FRAME_ID.into(), can_id_stream, ); let example_set = PrintSetRecipient::start_default(); app_state.add_set_recipient( PATH_PRIVATE_EXAMPLE_PRINT_SET.into(), example_set.recipient().clone(), ); App::new() .data(app_state) .wrap(middleware::Logger::default()) .configure(Router::configure_routes) .default_service(web::route().to(|| HttpResponse::NotFound())) }) .bind(socket_addr) .unwrap() .start(); let _ = sys.run(); } pub(crate) struct IntervalSignalSource { signal_manager_addr: Addr<SignalManager>, interval_handle: Option<SpawnHandle>, count: Arc<AtomicUsize>, } impl IntervalSignalSource { pub fn new(signal_manager_addr: Addr<SignalManager>) -> Self { IntervalSignalSource { signal_manager_addr, interval_handle: None, count: Default::default(), } } } impl Actor for IntervalSignalSource { type Context = Context<Self>; fn started(&mut self, ctx: &mut Context<Self>) { self.interval_handle = self.interval_handle.or_else(|| { Some(ctx.run_interval(Duration::from_secs(1), |act, _ctx| { let v = act.count.fetch_add(1, Ordering::SeqCst); let update = UpdateSignal { path: PATH_PRIVATE_EXAMPLE_INTERVAL.into(), value: json!(v), }; act.signal_manager_addr.do_send(update); })) }); } } #[derive(Default)] struct PrintSetRecipient {} impl Actor for PrintSetRecipient { type Context = Context<Self>; fn started(&mut self, _ctx: &mut Context<Self>) { info!( "Print `set`-recipient started, PATH: {}", PATH_PRIVATE_EXAMPLE_PRINT_SET ); } fn stopped(&mut self, _ctx: &mut Context<Self>) { info!( "Print `set`-recipient stopped, PATH: {}", PATH_PRIVATE_EXAMPLE_PRINT_SET ); } } impl Handler<Set> for PrintSetRecipient { type Result = Result<(), KnownError>; fn handle(&mut self, msg: Set, _ctx: &mut Context<Self>) -> Result<(), KnownError> { info!("Received SET for path `{}`, value: {}", msg.path, msg.value); Ok(()) } }
#[macro_use] extern crate log; extern crate structopt; use actix::prelude::*; use actix_web::{middleware, web, App, HttpResponse, HttpServer}; use futures::prelude::*; use futures_util::compat::Stream01CompatExt; use serde_json::json; use std::net::{IpAddr, Ipv4Addr, SocketAddr}; use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc; use std::time::Duration; use structopt::StructOpt; use tokio_socketcan; use vehicle_information_service::{AppState, KnownError, Router, Set, SignalManager, UpdateSignal}; const PATH_PRIVATE_EXAMPLE_PRINT_SET: &str = "Private.Example.Print.Set"; const PATH_PRIVATE_EXAMPLE_INTERVAL: &str = "Private.Example.Interval"; const PATH_PRIVATE_EXAMPLE_SOCKETCAN_LAST_FRAME_ID: &str = "Private.Example.SocketCan.Last.Frame.Id"; #[derive(StructOpt, Debug, Clone)] #[structopt(name = "Vehicle Information Service Demo")] struct Opt { #[structopt( short = "c", long = "can", default_value = "vcan0", help = "CAN Interface" )] can_interface: String, #[structopt( short = "p", long = "port", default_value = "14430", help = "Websocket Port" )] port: u16, } fn main() { env_logger::init(); let sys = actix::System::new("vis-server-example"); let opt = Opt::from_args(); let socket_addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), opt.port); info!("Starting server"); HttpServer::new(move || { let app_state: AppState = Default::default(); let interval_signal_source = IntervalSignalSource::new(app_state.signal_manager_addr().clone()); interval_signal_source.start(); let can_id_stream = tokio_socketcan::CANSocket::open(&opt.can_interface) .unwrap() .compat() .map_ok(|frame| frame.id()); app_state.spawn_stream_signal_source( PATH_PRIVATE_EXAMPLE_SOCKETCAN_LAST_FRAME_ID.into(), can_id_stream, ); let example_set = PrintSetRecipient::start_default(); app_state.add_set_recipient( PATH_PRIVATE_EXAMPLE_PRINT_SET.into(), example_set.recipient().clone(), ); App::new() .data(app_state) .wrap(middleware::Logger::default()) .configure(Router::configure_routes) .default_service(web::route().to(|| HttpResponse::NotFound())) }) .bind(socket_addr) .unwrap() .start(); let _ = sys.run(); } pub(crate) struct IntervalSignalSource { signal_manager_addr: Addr<SignalManager>, interval_handle: Option<SpawnHandle>, count: Arc<AtomicUsize>, } impl IntervalSignalSource { pub fn new(signal_manager_addr: Addr<SignalManager>) -> Self { IntervalSignalSource { signal_manager_addr, interval_handle: None, count: Default::default(), } } } impl Actor for IntervalSignalSource { type Context = Context<Self>; fn started(&mut self, ctx: &mut Context<Self>) { self.interval_handle = self.interval_handle.or_else(|| { Some(ctx.run_interval(Duration::from_secs(1), |act, _ctx| { let v = act.count.fetch_add(1, Ordering::SeqCst); let update = UpdateSignal { path: PATH_PRIVATE_EXAMPLE_INTERVAL.int
} #[derive(Default)] struct PrintSetRecipient {} impl Actor for PrintSetRecipient { type Context = Context<Self>; fn started(&mut self, _ctx: &mut Context<Self>) { info!( "Print `set`-recipient started, PATH: {}", PATH_PRIVATE_EXAMPLE_PRINT_SET ); } fn stopped(&mut self, _ctx: &mut Context<Self>) { info!( "Print `set`-recipient stopped, PATH: {}", PATH_PRIVATE_EXAMPLE_PRINT_SET ); } } impl Handler<Set> for PrintSetRecipient { type Result = Result<(), KnownError>; fn handle(&mut self, msg: Set, _ctx: &mut Context<Self>) -> Result<(), KnownError> { info!("Received SET for path `{}`, value: {}", msg.path, msg.value); Ok(()) } }
o(), value: json!(v), }; act.signal_manager_addr.do_send(update); })) }); }
function_block-function_prefixed
[ { "content": "///\n\n/// Does the val match the filter criteria\n\n/// Returns:\n\n/// Ok(true) : E.g. value changed sufficiently or there was no filter set\n\n/// Ok(false) : Did not reach change threshold\n\n/// Err(...): Occurs when the value is not an integer, filters only work for ints\n\n///\n\npub fn mat...
Rust
tests/common/mod.rs
jedel1043/regress
2c3de40bc72b1875b47fdca77b23a4c6ce22c6f9
pub fn test_parse_fails(pattern: &str) { let res = regress::Regex::new(pattern); assert!(res.is_err(), "Pattern should not have parsed: {}", pattern); } pub fn test_parse_fails_flags(pattern: &str, flags: &str) { let res = regress::Regex::with_flags(pattern, flags); assert!(res.is_err(), "Pattern should not have parsed: {}", pattern); } fn format_match(r: &regress::Match, input: &str) -> String { let mut result = input[r.range()].to_string(); for cg in r.captures.iter() { result.push(','); if let Some(cg) = cg { result.push_str(&input[cg.clone()]) } } result } pub trait StringTestHelpers { fn test_eq(&self, s: &str); } impl StringTestHelpers for String { fn test_eq(&self, rhs: &str) { assert_eq!(self.as_str(), rhs) } } pub trait VecTestHelpers { fn test_eq(&self, rhs: Vec<&str>); } impl VecTestHelpers for Vec<&str> { fn test_eq(&self, rhs: Vec<&str>) { assert_eq!(*self, rhs) } } #[derive(Debug, Clone)] pub struct TestCompiledRegex { re: regress::Regex, tc: TestConfig, } impl TestCompiledRegex { pub fn matches(&'_ self, input: &'_ str, start: usize) -> Vec<regress::Match> { use regress::backends as rbe; match (self.tc.use_ascii(input), self.tc.backend) { (true, Backend::PikeVM) => { rbe::find::<rbe::PikeVMExecutor>(&self.re, input, start).collect() } (false, Backend::PikeVM) => { rbe::find::<rbe::PikeVMExecutor>(&self.re, input, start).collect() } (true, Backend::Backtracking) => { rbe::find_ascii::<rbe::BacktrackExecutor>(&self.re, input, start).collect() } (false, Backend::Backtracking) => { rbe::find::<rbe::BacktrackExecutor>(&self.re, input, start).collect() } } } pub fn find(&self, input: &str) -> Option<regress::Match> { self.matches(input, 0).into_iter().next() } pub fn match1f(&self, input: &str) -> String { match self.find(input) { Some(m) => format_match(&m, input), None => panic!("Failed to match {}", input), } } pub fn match1_named_group(&self, input: &str, group: &str) -> String { match self.find(input) { Some(m) => match m.named_group(group) { Some(r) => match input.get(r.clone()) { Some(str) => str.to_string(), None => panic!("Cannot get range from string input {:?}", r), }, None => panic!("Named capture group does not exist {}", group), }, None => panic!("Failed to match {}", input), } } pub fn match1_vec<'a, 'b>(&'a self, input: &'b str) -> Vec<Option<&'b str>> { let mut result = Vec::new(); let m: regress::Match = self.find(input).expect("Failed to match"); result.push(Some(&input[m.range()])); for cr in m.captures { result.push(cr.map(|r| &input[r])); } result } pub fn test_fails(&self, input: &str) { assert!(self.find(input).is_none(), "Should not have matched") } pub fn test_succeeds(&self, input: &str) { assert!(self.find(input).is_some(), "Should have matched") } pub fn match_all_from(&'_ self, input: &'_ str, start: usize) -> Vec<regress::Range> { self.matches(input, start) .into_iter() .map(move |m| m.range()) .collect() } pub fn match_all<'a, 'b>(&'a self, input: &'b str) -> Vec<&'b str> { self.matches(input, 0) .into_iter() .map(move |m| &input[m.range()]) .collect() } pub fn run_global_match(&self, input: &str) -> String { self.matches(input, 0) .into_iter() .map(move |m| format_match(&m, input)) .collect::<Vec<String>>() .join(",") } } #[derive(Debug, Copy, Clone)] enum Backend { PikeVM, Backtracking, } #[derive(Debug, Copy, Clone)] pub struct TestConfig { ascii: bool, optimize: bool, backend: Backend, } impl TestConfig { pub fn use_ascii(&self, s: &str) -> bool { self.ascii && s.is_ascii() } pub fn compile(&self, pattern: &str) -> TestCompiledRegex { self.compilef(pattern, "") } pub fn compilef(&self, pattern: &str, flags_str: &str) -> TestCompiledRegex { let mut flags = regress::Flags::from(flags_str); flags.no_opt = !self.optimize; let re = regress::Regex::with_flags(pattern, flags); assert!( re.is_ok(), "Failed to parse! flags: {} pattern: {}, error: {}", flags_str, pattern, re.unwrap_err() ); TestCompiledRegex { re: re.unwrap(), tc: *self, } } pub fn test_match_succeeds(&self, pattern: &str, flags_str: &str, input: &str) { let cr = self.compilef(pattern, flags_str); cr.test_succeeds(input) } pub fn test_match_fails(&self, pattern: &str, flags_str: &str, input: &str) { let cr = self.compilef(pattern, flags_str); cr.test_fails(input) } } pub fn test_with_configs<F>(func: F) where F: Fn(TestConfig), { func(TestConfig { ascii: true, optimize: false, backend: Backend::PikeVM, }); func(TestConfig { ascii: false, optimize: false, backend: Backend::PikeVM, }); func(TestConfig { ascii: true, optimize: false, backend: Backend::Backtracking, }); func(TestConfig { ascii: false, optimize: false, backend: Backend::Backtracking, }); func(TestConfig { ascii: true, optimize: true, backend: Backend::Backtracking, }); func(TestConfig { ascii: false, optimize: true, backend: Backend::Backtracking, }); }
pub fn test_parse_fails(pattern: &str) { let res = regress::Regex::new(pattern); assert!(res.is_err(), "Pattern should not have parsed: {}", pattern); } pub fn test_parse_fails_flags(pattern: &str, flags: &str) { let res = regress::Regex::with_flags(pattern, flags); assert!(res.is_err(), "Pattern should not have parsed: {}", pattern); } fn format_match(r: &regress::Match, input: &str) -> String { let mut result = input[r.range()].to_string(); for cg in r.captures.iter() { result.push(','); if let Some(cg) = cg { result.push_str(&input[cg.clone()]) } } result } pub trait StringTestHelpers { fn test_eq(&self, s: &str); } impl StringTestHelpers for String { fn test_eq(&self, rhs: &str) { assert_eq!(self.as_str(), rhs) } } pub trait VecTestHelpers { fn test_eq(&self, rhs: Vec<&str>); } impl VecTestHelpers for Vec<&str> { fn test_eq(&self, rhs: Vec<&str>) { assert_eq!(*self, rhs) } } #[derive(Debug, Clone)] pub struct TestCompiledRegex { re: regress::Regex, tc: TestConfig, } impl TestCompiledRegex { pub fn matches(&'_ self, input: &'_ str, start: usize) -> Vec<regress::Match> { use regress::backends as rbe; match (self.tc.use_ascii(input), self.tc.backend) { (true, Backend::PikeVM) => { rbe::find::<rbe::PikeVMExecutor>(&self.re, input, start).collect() } (false, Backend::PikeVM) => { rbe::find::<rbe::PikeVMExecutor>(&self.re, input, start).collect() } (true, Backend::Backtracking) => { rbe::find_ascii::<rbe::BacktrackExecutor>(&self.re, input, start).collect() } (false, Backend::Backtracking) => { rbe::find::<rbe::BacktrackExecutor>(&self.re, input, start).collect() } } } pub fn find(&self, input: &str) -> Option<regress::Match> { self.matches(input, 0).into_iter().next() } pub fn match1f(&self, input: &str) -> String { match self.find(input) { Some(m) => format_match(&m, input), None => panic!("Failed to match {}", input), } } pub fn match1_named_group(&self, input: &str, group: &str) -> String { match self.find(input) { Some(m) => match m.named_group(group) {
pub fn match1_vec<'a, 'b>(&'a self, input: &'b str) -> Vec<Option<&'b str>> { let mut result = Vec::new(); let m: regress::Match = self.find(input).expect("Failed to match"); result.push(Some(&input[m.range()])); for cr in m.captures { result.push(cr.map(|r| &input[r])); } result } pub fn test_fails(&self, input: &str) { assert!(self.find(input).is_none(), "Should not have matched") } pub fn test_succeeds(&self, input: &str) { assert!(self.find(input).is_some(), "Should have matched") } pub fn match_all_from(&'_ self, input: &'_ str, start: usize) -> Vec<regress::Range> { self.matches(input, start) .into_iter() .map(move |m| m.range()) .collect() } pub fn match_all<'a, 'b>(&'a self, input: &'b str) -> Vec<&'b str> { self.matches(input, 0) .into_iter() .map(move |m| &input[m.range()]) .collect() } pub fn run_global_match(&self, input: &str) -> String { self.matches(input, 0) .into_iter() .map(move |m| format_match(&m, input)) .collect::<Vec<String>>() .join(",") } } #[derive(Debug, Copy, Clone)] enum Backend { PikeVM, Backtracking, } #[derive(Debug, Copy, Clone)] pub struct TestConfig { ascii: bool, optimize: bool, backend: Backend, } impl TestConfig { pub fn use_ascii(&self, s: &str) -> bool { self.ascii && s.is_ascii() } pub fn compile(&self, pattern: &str) -> TestCompiledRegex { self.compilef(pattern, "") } pub fn compilef(&self, pattern: &str, flags_str: &str) -> TestCompiledRegex { let mut flags = regress::Flags::from(flags_str); flags.no_opt = !self.optimize; let re = regress::Regex::with_flags(pattern, flags); assert!( re.is_ok(), "Failed to parse! flags: {} pattern: {}, error: {}", flags_str, pattern, re.unwrap_err() ); TestCompiledRegex { re: re.unwrap(), tc: *self, } } pub fn test_match_succeeds(&self, pattern: &str, flags_str: &str, input: &str) { let cr = self.compilef(pattern, flags_str); cr.test_succeeds(input) } pub fn test_match_fails(&self, pattern: &str, flags_str: &str, input: &str) { let cr = self.compilef(pattern, flags_str); cr.test_fails(input) } } pub fn test_with_configs<F>(func: F) where F: Fn(TestConfig), { func(TestConfig { ascii: true, optimize: false, backend: Backend::PikeVM, }); func(TestConfig { ascii: false, optimize: false, backend: Backend::PikeVM, }); func(TestConfig { ascii: true, optimize: false, backend: Backend::Backtracking, }); func(TestConfig { ascii: false, optimize: false, backend: Backend::Backtracking, }); func(TestConfig { ascii: true, optimize: true, backend: Backend::Backtracking, }); func(TestConfig { ascii: false, optimize: true, backend: Backend::Backtracking, }); }
Some(r) => match input.get(r.clone()) { Some(str) => str.to_string(), None => panic!("Cannot get range from string input {:?}", r), }, None => panic!("Named capture group does not exist {}", group), }, None => panic!("Failed to match {}", input), } }
function_block-function_prefix_line
[ { "content": "/// Try parsing a given pattern.\n\n/// Return the resulting IR regex, or an error.\n\npub fn try_parse(pattern: &str, flags: api::Flags) -> Result<ir::Regex, Error> {\n\n // for q in 0..=0x10FFFF {\n\n // if let Some(c) = core::char::from_u32(q) {\n\n // let cc = folds::fold(...
Rust
mqtt/mqtt-policy/src/substituter.rs
dmolokanov/iotedge
a42fe5abbb98b6de32fd832ac75e0e8ebd740a10
use mqtt_broker::auth::Activity; use policy::{Request, Result, Substituter}; #[allow(clippy::doc_markdown)] #[derive(Debug)] pub struct MqttSubstituter { device_id: String, } impl MqttSubstituter { pub fn new(device_id: impl Into<String>) -> Self { Self { device_id: device_id.into(), } } fn device_id(&self) -> &str { &self.device_id } fn replace_variable(&self, value: &str, context: &Request<Activity>) -> String { match context.context() { Some(context) => { let mut result = value.to_owned(); for variable in VariableIter::new(value) { result = match variable { crate::CLIENT_ID_VAR => replace( &result, variable, context.client_info().client_id().as_str(), ), crate::IDENTITY_VAR => { replace(&result, variable, context.client_info().auth_id().as_str()) } crate::DEVICE_ID_VAR => { replace(&result, variable, extract_device_id(context)) } crate::MODULE_ID_VAR => { replace(&result, variable, extract_module_id(context)) } crate::EDGEHUB_ID_VAR => replace(&result, variable, self.device_id()), _ => result, }; } result } None => value.to_owned(), } } } impl Substituter for MqttSubstituter { type Context = Activity; fn visit_identity(&self, value: &str, context: &Request<Self::Context>) -> Result<String> { Ok(self.replace_variable(value, context)) } fn visit_resource(&self, value: &str, context: &Request<Self::Context>) -> Result<String> { Ok(self.replace_variable(value, context)) } } #[derive(Debug)] pub(super) struct VariableIter<'a> { value: &'a str, index: usize, } impl<'a> VariableIter<'a> { pub fn new(value: &'a str) -> Self { Self { value, index: 0 } } } impl<'a> Iterator for VariableIter<'a> { type Item = &'a str; fn next(&mut self) -> Option<Self::Item> { let value = &self.value[self.index..]; if let Some(start) = value.find("{{") { if let Some(end) = value.find("}}") { if start < end { self.index = self.index + end + 2; return Some(&value[start..end + 2]); } } } None } } fn replace(value: &str, variable: &str, substitution: &str) -> String { value.replace(variable, substitution) } fn extract_device_id(activity: &Activity) -> &str { let auth_id = activity.client_info().auth_id().as_str(); auth_id.split('/').next().unwrap_or_default() } fn extract_module_id(activity: &Activity) -> &str { let auth_id = activity.client_info().auth_id().as_str(); auth_id.split('/').nth(1).unwrap_or_default() } #[cfg(test)] mod tests { use proptest::prelude::*; use test_case::test_case; use crate::tests; use super::*; #[test_case("{{iot:identity}}", "test_device_auth_id", "test_device_client_id", "test_device_auth_id"; "iot:identity variable")] #[test_case("namespace-{{iot:identity}}-suffix", "test_device_auth_id", "test_device_client_id", "namespace-test_device_auth_id-suffix"; "iot:identity variable substring")] #[test_case("{{mqtt:client_id}}", "test_device_auth_id", "test_device_client_id", "test_device_client_id"; "mqtt:client_id variable")] #[test_case("namespace-{{mqtt:client_id}}-suffix", "test_device_auth_id", "test_device_client_id", "namespace-test_device_client_id-suffix"; "mqtt:client_id variable substring")] #[test_case("{{iot:device_id}}", "test_device_auth_id", "test_device_client_id", "test_device_auth_id"; "iot:device_id variable")] #[test_case("namespace-{{iot:device_id}}-suffix", "test_device_auth_id", "test_device_client_id", "namespace-test_device_auth_id-suffix"; "iot:device_id variable substring")] #[test_case("{{iot:module_id}}", "test_device_id/test_module_id", "test_device_client_id", "test_module_id"; "iot:module_id variable")] #[test_case("namespace-{{iot:module_id}}-suffix", "test_device_id/test_module_id", "test_device_client_id", "namespace-test_module_id-suffix"; "iot:module_id variable substring")] #[test_case("{{iot:this_device_id}}", "test_device_auth_id", "test_device_client_id", "edge_device"; "iot:this_device_id variable")] #[test_case("namespace-{{iot:this_device_id}}-suffix", "test_device_auth_id", "test_device_client_id", "namespace-edge_device-suffix"; "iot:this_device_id variable substring")] #[test_case("{{invalid}}", "test_device_auth_id", "test_device_client_id", "{{invalid}}"; "invalid variable")] #[test_case("{{{}bad}}}", "test_device_auth_id", "test_device_client_id", "{{{}bad}}}"; "bad variable")] #[test_case("{{{}bad}", "test_device_auth_id", "test_device_client_id", "{{{}bad}"; "bad variable 2")] #[test_case("{}bad}}", "test_device_auth_id", "test_device_client_id", "{}bad}}"; "bad variable 3")] #[test_case("{{iot:this_device_id}}{{iot:module_id}}", "test_device_auth_id/test_module", "test_device_client_id", "edge_devicetest_module"; "multiple variable")] #[test_case("namespace-{{iot:this_device_id}}/{{iot:module_id}}-suffix", "test_device_auth_id/test_module", "test_device_client_id", "namespace-edge_device/test_module-suffix"; "multiple variable substring")] fn visit_identity_test(input: &str, auth_id: &str, client_id: &str, expected: &str) { let request = Request::with_context( "some_identity", "some_operation", "some_resource", tests::create_connect_activity(client_id, auth_id), ) .unwrap(); assert_eq!( expected, MqttSubstituter::new("edge_device") .visit_identity(input, &request) .unwrap() ); } #[test_case("{{iot:identity}}", "test_device_auth_id", "test_device_client_id", "test_device_auth_id"; "iot:identity variable")] #[test_case("namespace-{{iot:identity}}-suffix", "test_device_auth_id", "test_device_client_id", "namespace-test_device_auth_id-suffix"; "iot:identity variable substring")] #[test_case("{{mqtt:client_id}}", "test_device_auth_id", "test_device_client_id", "test_device_client_id"; "mqtt:client_id variable")] #[test_case("namespace-{{mqtt:client_id}}-suffix", "test_device_auth_id", "test_device_client_id", "namespace-test_device_client_id-suffix"; "mqtt:client_id variable substring")] #[test_case("{{iot:device_id}}", "test_device_auth_id", "test_device_client_id", "test_device_auth_id"; "iot:device_id variable")] #[test_case("namespace-{{iot:device_id}}-suffix", "test_device_auth_id", "test_device_client_id", "namespace-test_device_auth_id-suffix"; "iot:device_id variable substring")] #[test_case("{{iot:module_id}}", "test_device_id/test_module_id", "test_device_client_id", "test_module_id"; "iot:module_id variable")] #[test_case("namespace-{{iot:module_id}}-suffix", "test_device_id/test_module_id", "test_device_client_id", "namespace-test_module_id-suffix"; "iot:module_id variable substring")] #[test_case("{{iot:this_device_id}}", "test_device_auth_id", "test_device_client_id", "edge_device"; "iot:this_device_id variable")] #[test_case("namespace-{{iot:this_device_id}}-suffix", "test_device_auth_id", "test_device_client_id", "namespace-edge_device-suffix"; "iot:this_device_id variable substring")] #[test_case("{{invalid}}", "test_device_auth_id", "test_device_client_id", "{{invalid}}"; "invalid variable")] #[test_case("{{{}bad}}}", "test_device_auth_id", "test_device_client_id", "{{{}bad}}}"; "bad variable")] #[test_case("{{{}bad}", "test_device_auth_id", "test_device_client_id", "{{{}bad}"; "bad variable 2")] #[test_case("{}bad}}", "test_device_auth_id", "test_device_client_id", "{}bad}}"; "bad variable 3")] #[test_case("{{iot:this_device_id}}{{iot:module_id}}", "test_device_auth_id/test_module", "test_device_client_id", "edge_devicetest_module"; "multiple variable")] #[test_case("namespace-{{iot:this_device_id}}/{{iot:module_id}}-suffix", "test_device_auth_id/test_module", "test_device_client_id", "namespace-edge_device/test_module-suffix"; "multiple variable substring")] fn visit_resource_test(input: &str, auth_id: &str, client_id: &str, expected: &str) { let request = Request::with_context( "some_identity", "some_operation", "some_resource", tests::create_publish_activity(client_id, auth_id), ) .unwrap(); assert_eq!( expected, MqttSubstituter::new("edge_device") .visit_resource(input, &request) .unwrap() ); } proptest! { #[test] fn iterator_does_not_crash(value in "[a-z\\{\\}]+") { drop(VariableIter::new(&value).collect::<Vec<_>>()); } } }
use mqtt_broker::auth::Activity; use policy::{Request, Result, Substituter}; #[allow(clippy::doc_markdown)] #[derive(Debug)] pub struct MqttSubstituter { device_id: String, } impl MqttSubstituter { pub fn new(device_id: impl Into<String>) -> Self { Self { device_id: device_id.into(), } } fn device_id(&self) -> &str { &self.device_id } fn replace_variable(&self, value: &str, context: &Request<Activity>) -> String { match context.context() { Some(context) => { let mut result = value.to_owned(); for variable in VariableIter::new(value) { result = match variable { crate::CLIENT_ID_VAR => replace( &result, variable, context.client_info().client_id().as_str(), ), crate::IDENTITY_VAR => { replace(&result, variable, context.client_info().auth_id().as_str()) } crate::DEVICE_ID_VAR => { replace(&result, variable, extract_device_id(context)) } crate::MODULE_ID_VAR => { replace(&result, variable, extract_module_id(context)) } crate::EDGEHUB_ID_VAR => replace(&result, variable, self.device_id()), _ => result, }; } result } None => value.to_owned(), } } } impl Substituter for MqttSubstituter { type Context = Activity; fn visit_identity(&self, va
:device_id variable")] #[test_case("namespace-{{iot:device_id}}-suffix", "test_device_auth_id", "test_device_client_id", "namespace-test_device_auth_id-suffix"; "iot:device_id variable substring")] #[test_case("{{iot:module_id}}", "test_device_id/test_module_id", "test_device_client_id", "test_module_id"; "iot:module_id variable")] #[test_case("namespace-{{iot:module_id}}-suffix", "test_device_id/test_module_id", "test_device_client_id", "namespace-test_module_id-suffix"; "iot:module_id variable substring")] #[test_case("{{iot:this_device_id}}", "test_device_auth_id", "test_device_client_id", "edge_device"; "iot:this_device_id variable")] #[test_case("namespace-{{iot:this_device_id}}-suffix", "test_device_auth_id", "test_device_client_id", "namespace-edge_device-suffix"; "iot:this_device_id variable substring")] #[test_case("{{invalid}}", "test_device_auth_id", "test_device_client_id", "{{invalid}}"; "invalid variable")] #[test_case("{{{}bad}}}", "test_device_auth_id", "test_device_client_id", "{{{}bad}}}"; "bad variable")] #[test_case("{{{}bad}", "test_device_auth_id", "test_device_client_id", "{{{}bad}"; "bad variable 2")] #[test_case("{}bad}}", "test_device_auth_id", "test_device_client_id", "{}bad}}"; "bad variable 3")] #[test_case("{{iot:this_device_id}}{{iot:module_id}}", "test_device_auth_id/test_module", "test_device_client_id", "edge_devicetest_module"; "multiple variable")] #[test_case("namespace-{{iot:this_device_id}}/{{iot:module_id}}-suffix", "test_device_auth_id/test_module", "test_device_client_id", "namespace-edge_device/test_module-suffix"; "multiple variable substring")] fn visit_identity_test(input: &str, auth_id: &str, client_id: &str, expected: &str) { let request = Request::with_context( "some_identity", "some_operation", "some_resource", tests::create_connect_activity(client_id, auth_id), ) .unwrap(); assert_eq!( expected, MqttSubstituter::new("edge_device") .visit_identity(input, &request) .unwrap() ); } #[test_case("{{iot:identity}}", "test_device_auth_id", "test_device_client_id", "test_device_auth_id"; "iot:identity variable")] #[test_case("namespace-{{iot:identity}}-suffix", "test_device_auth_id", "test_device_client_id", "namespace-test_device_auth_id-suffix"; "iot:identity variable substring")] #[test_case("{{mqtt:client_id}}", "test_device_auth_id", "test_device_client_id", "test_device_client_id"; "mqtt:client_id variable")] #[test_case("namespace-{{mqtt:client_id}}-suffix", "test_device_auth_id", "test_device_client_id", "namespace-test_device_client_id-suffix"; "mqtt:client_id variable substring")] #[test_case("{{iot:device_id}}", "test_device_auth_id", "test_device_client_id", "test_device_auth_id"; "iot:device_id variable")] #[test_case("namespace-{{iot:device_id}}-suffix", "test_device_auth_id", "test_device_client_id", "namespace-test_device_auth_id-suffix"; "iot:device_id variable substring")] #[test_case("{{iot:module_id}}", "test_device_id/test_module_id", "test_device_client_id", "test_module_id"; "iot:module_id variable")] #[test_case("namespace-{{iot:module_id}}-suffix", "test_device_id/test_module_id", "test_device_client_id", "namespace-test_module_id-suffix"; "iot:module_id variable substring")] #[test_case("{{iot:this_device_id}}", "test_device_auth_id", "test_device_client_id", "edge_device"; "iot:this_device_id variable")] #[test_case("namespace-{{iot:this_device_id}}-suffix", "test_device_auth_id", "test_device_client_id", "namespace-edge_device-suffix"; "iot:this_device_id variable substring")] #[test_case("{{invalid}}", "test_device_auth_id", "test_device_client_id", "{{invalid}}"; "invalid variable")] #[test_case("{{{}bad}}}", "test_device_auth_id", "test_device_client_id", "{{{}bad}}}"; "bad variable")] #[test_case("{{{}bad}", "test_device_auth_id", "test_device_client_id", "{{{}bad}"; "bad variable 2")] #[test_case("{}bad}}", "test_device_auth_id", "test_device_client_id", "{}bad}}"; "bad variable 3")] #[test_case("{{iot:this_device_id}}{{iot:module_id}}", "test_device_auth_id/test_module", "test_device_client_id", "edge_devicetest_module"; "multiple variable")] #[test_case("namespace-{{iot:this_device_id}}/{{iot:module_id}}-suffix", "test_device_auth_id/test_module", "test_device_client_id", "namespace-edge_device/test_module-suffix"; "multiple variable substring")] fn visit_resource_test(input: &str, auth_id: &str, client_id: &str, expected: &str) { let request = Request::with_context( "some_identity", "some_operation", "some_resource", tests::create_publish_activity(client_id, auth_id), ) .unwrap(); assert_eq!( expected, MqttSubstituter::new("edge_device") .visit_resource(input, &request) .unwrap() ); } proptest! { #[test] fn iterator_does_not_crash(value in "[a-z\\{\\}]+") { drop(VariableIter::new(&value).collect::<Vec<_>>()); } } }
lue: &str, context: &Request<Self::Context>) -> Result<String> { Ok(self.replace_variable(value, context)) } fn visit_resource(&self, value: &str, context: &Request<Self::Context>) -> Result<String> { Ok(self.replace_variable(value, context)) } } #[derive(Debug)] pub(super) struct VariableIter<'a> { value: &'a str, index: usize, } impl<'a> VariableIter<'a> { pub fn new(value: &'a str) -> Self { Self { value, index: 0 } } } impl<'a> Iterator for VariableIter<'a> { type Item = &'a str; fn next(&mut self) -> Option<Self::Item> { let value = &self.value[self.index..]; if let Some(start) = value.find("{{") { if let Some(end) = value.find("}}") { if start < end { self.index = self.index + end + 2; return Some(&value[start..end + 2]); } } } None } } fn replace(value: &str, variable: &str, substitution: &str) -> String { value.replace(variable, substitution) } fn extract_device_id(activity: &Activity) -> &str { let auth_id = activity.client_info().auth_id().as_str(); auth_id.split('/').next().unwrap_or_default() } fn extract_module_id(activity: &Activity) -> &str { let auth_id = activity.client_info().auth_id().as_str(); auth_id.split('/').nth(1).unwrap_or_default() } #[cfg(test)] mod tests { use proptest::prelude::*; use test_case::test_case; use crate::tests; use super::*; #[test_case("{{iot:identity}}", "test_device_auth_id", "test_device_client_id", "test_device_auth_id"; "iot:identity variable")] #[test_case("namespace-{{iot:identity}}-suffix", "test_device_auth_id", "test_device_client_id", "namespace-test_device_auth_id-suffix"; "iot:identity variable substring")] #[test_case("{{mqtt:client_id}}", "test_device_auth_id", "test_device_client_id", "test_device_client_id"; "mqtt:client_id variable")] #[test_case("namespace-{{mqtt:client_id}}-suffix", "test_device_auth_id", "test_device_client_id", "namespace-test_device_client_id-suffix"; "mqtt:client_id variable substring")] #[test_case("{{iot:device_id}}", "test_device_auth_id", "test_device_client_id", "test_device_auth_id"; "iot
random
[ { "content": "pub fn ensure_not_empty_with_context<D, F>(value: &str, context: F) -> Result<(), Context<D>>\n\nwhere\n\n D: fmt::Display + Send + Sync,\n\n F: FnOnce() -> D,\n\n{\n\n if value.trim().is_empty() {\n\n return Err(ErrorKind::ArgumentEmpty(String::new()).context(context()));\n\n }...
Rust
libtransact/src/context/manager/sync.rs
leebradley/transact
6aca715a5cd5b89e08e2906e48e046f210c56387
/* * Copyright 2019 Bitwise IO, Inc. * Copyright 2019 Cargill Incorporated * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ----------------------------------------------------------------------------- */ use std::sync::{Arc, Mutex}; use crate::context::error::ContextManagerError; use crate::context::{manager, ContextId, ContextLifecycle}; use crate::protocol::receipt::{Event, TransactionReceipt}; use crate::state::Read; #[derive(Clone)] pub struct ContextManager { internal_manager: Arc<Mutex<manager::ContextManager>>, } impl ContextManager { pub fn new(database: Box<dyn Read<StateId = String, Key = String, Value = Vec<u8>>>) -> Self { ContextManager { internal_manager: Arc::new(Mutex::new(manager::ContextManager::new(database))), } } pub fn get( &self, context_id: &ContextId, keys: &[String], ) -> Result<Vec<(String, Vec<u8>)>, ContextManagerError> { self.internal_manager .lock() .expect("Lock in the get method was poisoned") .get(context_id, keys) } pub fn set_state( &self, context_id: &ContextId, key: String, value: Vec<u8>, ) -> Result<(), ContextManagerError> { self.internal_manager .lock() .expect("Lock in set_state was poisoned") .set_state(context_id, key, value) } pub fn delete_state( &self, context_id: &ContextId, key: &str, ) -> Result<Option<Vec<u8>>, ContextManagerError> { self.internal_manager .lock() .expect("Lock in delete_state was poisoned") .delete_state(context_id, key) } pub fn add_event( &self, context_id: &ContextId, event: Event, ) -> Result<(), ContextManagerError> { self.internal_manager .lock() .expect("Lock in add_event was poisoned") .add_event(context_id, event) } pub fn add_data( &self, context_id: &ContextId, data: Vec<u8>, ) -> Result<(), ContextManagerError> { self.internal_manager .lock() .expect("Lock in add_data was poisoned") .add_data(context_id, data) } } impl ContextLifecycle for ContextManager { fn create_context(&mut self, dependent_contexts: &[ContextId], state_id: &str) -> ContextId { self.internal_manager .lock() .expect("Lock in create_context was poisoned") .create_context(dependent_contexts, state_id) } fn drop_context(&mut self, context_id: ContextId) { self.internal_manager .lock() .expect("Lock in drop_context was poisoned") .drop_context(context_id) } fn get_transaction_receipt( &self, context_id: &ContextId, transaction_id: &str, ) -> Result<TransactionReceipt, ContextManagerError> { self.internal_manager .lock() .expect("Lock in get_transaction_receipt was poisoned") .get_transaction_receipt(context_id, transaction_id) } fn clone_box(&self) -> Box<dyn ContextLifecycle> { Box::new(self.clone()) } }
/* * Copyright 2019 Bitwise IO, Inc. * Copyright 2019 Cargill Incorporated * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * --------------------------------------------------------------
pub fn get( &self, context_id: &ContextId, keys: &[String], ) -> Result<Vec<(String, Vec<u8>)>, ContextManagerError> { self.internal_manager .lock() .expect("Lock in the get method was poisoned") .get(context_id, keys) } pub fn set_state( &self, context_id: &ContextId, key: String, value: Vec<u8>, ) -> Result<(), ContextManagerError> { self.internal_manager .lock() .expect("Lock in set_state was poisoned") .set_state(context_id, key, value) } pub fn delete_state( &self, context_id: &ContextId, key: &str, ) -> Result<Option<Vec<u8>>, ContextManagerError> { self.internal_manager .lock() .expect("Lock in delete_state was poisoned") .delete_state(context_id, key) } pub fn add_event( &self, context_id: &ContextId, event: Event, ) -> Result<(), ContextManagerError> { self.internal_manager .lock() .expect("Lock in add_event was poisoned") .add_event(context_id, event) } pub fn add_data( &self, context_id: &ContextId, data: Vec<u8>, ) -> Result<(), ContextManagerError> { self.internal_manager .lock() .expect("Lock in add_data was poisoned") .add_data(context_id, data) } } impl ContextLifecycle for ContextManager { fn create_context(&mut self, dependent_contexts: &[ContextId], state_id: &str) -> ContextId { self.internal_manager .lock() .expect("Lock in create_context was poisoned") .create_context(dependent_contexts, state_id) } fn drop_context(&mut self, context_id: ContextId) { self.internal_manager .lock() .expect("Lock in drop_context was poisoned") .drop_context(context_id) } fn get_transaction_receipt( &self, context_id: &ContextId, transaction_id: &str, ) -> Result<TransactionReceipt, ContextManagerError> { self.internal_manager .lock() .expect("Lock in get_transaction_receipt was poisoned") .get_transaction_receipt(context_id, transaction_id) } fn clone_box(&self) -> Box<dyn ContextLifecycle> { Box::new(self.clone()) } }
--------------- */ use std::sync::{Arc, Mutex}; use crate::context::error::ContextManagerError; use crate::context::{manager, ContextId, ContextLifecycle}; use crate::protocol::receipt::{Event, TransactionReceipt}; use crate::state::Read; #[derive(Clone)] pub struct ContextManager { internal_manager: Arc<Mutex<manager::ContextManager>>, } impl ContextManager { pub fn new(database: Box<dyn Read<StateId = String, Key = String, Value = Vec<u8>>>) -> Self { ContextManager { internal_manager: Arc::new(Mutex::new(manager::ContextManager::new(database))), } }
random
[ { "content": "-- Copyright 2021 Cargill Incorporated\n", "file_path": "libtransact/src/state/merkle/sql/migration/postgres/migrations/2021-07-29-105100-change-log/up.sql", "rank": 0, "score": 165368.19925065702 }, { "content": "-- Copyright 2021 Cargill Incorporated\n", "file_path": "lib...
Rust
src/bin/aoc2021/day12.rs
knutwalker/aoc
711aa804ab14fc2c376db5a4140a845fa902068d
use aoc::ProcessInput; use indexmap::IndexSet; use std::{convert::Infallible, str::FromStr}; type Input = Cave; type Output = usize; register!( "input/day12.txt"; (cave: input!(process Input)) -> Output { cave.count_paths(false); cave.count_paths(true); } ); #[derive(Clone, Copy, Debug)] pub struct Cave { graph: [u16; 16], start: u8, end: u8, on_visit: u16, } impl Cave { fn count_paths(self, can_visit_twice: bool) -> usize { fn iterate(c: &Cave, node: u8, visited: u16, twice: bool) -> usize { let mut to_visit = c.graph[usize::from(node)]; to_visit &= !visited | [0, c.on_visit][usize::from(twice)]; let mut paths = 0; while to_visit != 0 { let next = to_visit & to_visit.wrapping_neg(); to_visit &= to_visit - 1; let next_node = next.trailing_zeros() as u8; if next_node == c.end { paths += 1; } else { let next_twice = twice && next & visited != next; let next_visited = visited | (c.on_visit & next); paths += iterate(c, next_node, next_visited, next_twice); } } paths } iterate( &self, self.start, 1 << self.start, can_visit_twice, ) } } impl ProcessInput for Cave { type In = input!(parse Connection); type Out = Self; fn process(input: <Self::In as aoc::PuzzleInput>::Out) -> Self::Out { let mut ids = IndexSet::new(); let mut graph = [0; 16]; for path in input { let source = ids.insert_full(path.source).0; let target = ids.insert_full(path.target).0; graph[source] |= 1 << target; graph[target] |= 1 << source; } let on_visit = ids .iter() .map(|id| match id { CaveType::Small(_) => 1, _ => 0, }) .enumerate() .fold(0, |on_visit, (id, ov)| on_visit | (ov << id)); let start = ids.get_index_of(&CaveType::Start).unwrap() as u8; let end = ids.get_index_of(&CaveType::End).unwrap() as u8; Self { graph, start, end, on_visit, } } } #[derive(Clone, Debug)] pub struct Connection { source: CaveType, target: CaveType, } impl FromStr for Connection { type Err = Infallible; fn from_str(s: &str) -> Result<Self, Self::Err> { let (source, target) = s.split_once('-').unwrap(); Ok(Self { source: source.parse()?, target: target.parse()?, }) } } #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub enum CaveType { Start, End, Small(String), Big(String), } impl FromStr for CaveType { type Err = Infallible; fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(match s { "start" => Self::Start, "end" => Self::End, small if small.as_bytes()[0].is_ascii_lowercase() => Self::Small(small.to_string()), big => Self::Big(big.to_string()), }) } } #[cfg(test)] mod tests { use super::*; use aoc::{Solution, SolutionExt}; use test::Bencher; #[test] fn test_ex() { let input = r#" start-A start-b A-c A-b b-d A-end b-end "#; let (res1, res2) = Solver::run_on(input); assert_eq!(res1, 10); assert_eq!(res2, 36); } #[test] fn test_ex2() { let input = r#" dc-end HN-start start-kj dc-start dc-HN LN-dc HN-end kj-sa kj-HN kj-dc "#; let (res1, res2) = Solver::run_on(input); assert_eq!(res1, 19); assert_eq!(res2, 103); } #[test] fn test_ex3() { let input = r#" fs-end he-DX fs-he start-DX pj-DX end-zg zg-sl zg-pj pj-he RW-he fs-DX pj-RW zg-RW start-pj he-WI zg-he pj-fs start-RW "#; let (res1, res2) = Solver::run_on(input); assert_eq!(res1, 226); assert_eq!(res2, 3509); } #[test] fn test() { let (res1, res2) = Solver::run_on_input(); assert_eq!(res1, 5756); assert_eq!(res2, 144_603); } #[bench] fn bench_parsing(b: &mut Bencher) { let input = Solver::puzzle_input(); b.bytes = input.len() as u64; b.iter(|| Solver::parse_input(input)); } #[bench] fn bench_pt1(b: &mut Bencher) { let cave = Solver::parse_input(Solver::puzzle_input()); b.iter(|| cave.count_paths(false)); } #[bench] fn bench_pt2(b: &mut Bencher) { let cave = Solver::parse_input(Solver::puzzle_input()); b.iter(|| cave.count_paths(true)); } }
use aoc::ProcessInput; use indexmap::IndexSet; use std::{convert::Infallible, str::FromStr}; type Input = Cave; type Output = usize; register!( "input/day12.txt"; (cave: input!(process Input)) -> Output { cave.count_paths(false); cave.count_paths(true); } ); #[derive(Clone, Copy, Debug)] pub struct Cave { graph: [u16; 16], start: u8, end: u8, on_visit: u16, } impl Cave { fn count_paths(self, can_visit_twice: bool) -> usize { fn iterate(c: &Cave, node: u8, visited: u16, twice: bool) -> usize { let mut to_visit = c.graph[usize::from(node)]; to_visit &= !visited | [0, c.on_visit][usize::from(twice)]; let mut paths = 0; while to_visit != 0 { let next = to_visit & to_visit.wrapping_neg(); to_visit &= to_visit - 1; let next_node = next.trailing_zeros() as u8; if next_node == c.end { paths += 1; } else { let next_twice = twice && next & visited != next; let next_visited = visited | (c.on_visit & next); paths += iterate(c, next_node, next_visited, next_twice); } } paths } iterate( &self, self.start, 1 << self.start, can_visit_twice, ) } } impl ProcessInput for Cave { type In = input!(parse Connection); type Out = Self; fn process(input: <Self::In as aoc::PuzzleInput>::Out) -> Self::Out { let mut ids = IndexSet::new(); let mut graph = [0; 16]; for path in input { let source = ids.insert_full(path.source).0; let target = ids.insert_full(path.target).0; graph[source] |= 1 << target; graph[target] |= 1 << source; } let on_visit = ids .iter() .map(|id| match id { CaveType::Small(_) => 1, _ => 0, }) .enumerate() .fold(0, |on_visit, (id, ov)| on_visit | (ov << id)); let start = ids.get_index_of(&CaveType::Start).unwrap() as u8; let end = ids.get_index_of(&CaveType::End).unwrap() as u8; Self { graph, start, end, on_visit, } } } #[derive(Clone, Debug)] pub struct Connection { source: CaveType, target: CaveType, } impl FromStr for Connection { type Err = Infallible; fn from_str(s: &str) -> Result<Self, Self::Err> { let (source, target) = s.split_once('-').unwrap(); Ok(Self { source: source.parse()?, target: target.parse()?, }) } } #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub enum CaveType { Start, End, Small(String), Big(String), } impl FromStr for CaveType { type Err = Infallible; fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(match s { "start" => Self::Start, "end" => Self::End, small if small.as_bytes()[0].is_ascii_lowercase() => Self::Small(small.to_string()), big => Self::Big(big.to_string()), }) } } #[cfg(test)] mod tests { use super::*; use aoc::{Solution, SolutionExt}; use test::Bencher; #[test] fn test_ex() { let input = r#" start-A start-b A-c A-b b-d A-end b-end "#; let (res1, res2) = Solver::run_on(input); assert_eq!(res1, 10); assert_eq!(res2, 36); } #[test] fn test_ex2() { let input = r#" dc-end HN-start start-kj dc-start dc-HN LN-dc HN-end kj-sa kj-HN kj-dc "#; let (res1, res2) = Solver::run_on(input); assert_eq!(res1, 19); assert_eq!(res2, 103); } #[test] fn test_ex3() { let input = r#" fs-end he-DX fs-he start-DX pj-DX end-zg zg-sl zg-pj pj-he RW-
#[test] fn test() { let (res1, res2) = Solver::run_on_input(); assert_eq!(res1, 5756); assert_eq!(res2, 144_603); } #[bench] fn bench_parsing(b: &mut Bencher) { let input = Solver::puzzle_input(); b.bytes = input.len() as u64; b.iter(|| Solver::parse_input(input)); } #[bench] fn bench_pt1(b: &mut Bencher) { let cave = Solver::parse_input(Solver::puzzle_input()); b.iter(|| cave.count_paths(false)); } #[bench] fn bench_pt2(b: &mut Bencher) { let cave = Solver::parse_input(Solver::puzzle_input()); b.iter(|| cave.count_paths(true)); } }
he fs-DX pj-RW zg-RW start-pj he-WI zg-he pj-fs start-RW "#; let (res1, res2) = Solver::run_on(input); assert_eq!(res1, 226); assert_eq!(res2, 3509); }
function_block-function_prefixed
[ { "content": "pub fn lines(s: &str) -> impl Iterator<Item = &str> + '_ {\n\n s.lines().map(str::trim).filter(|line| !line.is_empty())\n\n}\n\n\n\npub struct PuzzleSolution<T> {\n\n pub part1: T,\n\n pub part2: T,\n\n pub parse_time: Duration,\n\n pub part1_time: Duration,\n\n pub part2_time: D...
Rust
Assembler/src/asm/statement.rs
karannewatia/SCALE-MAMBA
467b33a6c80050789204ea3ee3b5cf0113354f85
use super::{Instruction, IoInstruction, Statement}; use crate::binary::instructions::RegisterMode; use crate::compiler::Compiler; use crate::lexer::{MapAllValues, Register}; use crate::span::Spanned; use crate::transforms::vectorize; use std::num::NonZeroU32; struct RegisterModeRead; struct RegisterModeWrite; trait RegisterModeTrait { const MODE: RegisterMode; } impl RegisterModeTrait for RegisterModeRead { const MODE: RegisterMode = RegisterMode::Read; } impl RegisterModeTrait for RegisterModeWrite { const MODE: RegisterMode = RegisterMode::Write; } #[derive(Clone, Debug)] pub struct RegisterIterator { v: NonZeroU32, registers: Vec<Register>, } impl RegisterIterator { pub fn is_empty(&self) -> bool { self.registers.is_empty() } } impl<'a> IntoIterator for &'a RegisterIterator { type Item = Register; type IntoIter = Box<dyn Iterator<Item = Register> + 'a>; fn into_iter(self) -> Self::IntoIter { let v = self.v; Box::new(self.registers.iter().flat_map(move |&r| vectorize(v, r))) } } impl IntoIterator for RegisterIterator { type Item = Register; type IntoIter = Box<dyn Iterator<Item = Register>>; fn into_iter(self) -> Self::IntoIter { let v = self.v; Box::new( self.registers .into_iter() .flat_map(move |r| vectorize(v, r)), ) } } impl<'a> Statement<'a> { pub fn write_registers(&self, cx: &Compiler) -> RegisterIterator { RegisterIterator { v: self.vectorized.elem, registers: self.write_registers_base(cx), } } pub fn read_registers(&self, cx: &Compiler) -> RegisterIterator { RegisterIterator { v: self.vectorized.elem, registers: self.read_registers_base(cx), } } pub fn write_registers_base(&self, cx: &Compiler) -> Vec<Register> { self.read_registers_generic::<RegisterModeWrite>(cx) } pub fn read_registers_base(&self, cx: &Compiler) -> Vec<Register> { self.read_registers_generic::<RegisterModeRead>(cx) } fn read_registers_generic<T: RegisterModeTrait>(&self, cx: &Compiler) -> Vec<Register> { use crate::binary::instructions::ArgTy; let relexed = self.relex(cx).0; let (instr, args) = match relexed.fetch_instr(cx) { Some(i) => i, None => return Vec::new(), }; let mut list = Vec::new(); let mut args_iter = args.iter(); for decl_arg in instr.args { match decl_arg.ty { ArgTy::Register(_, mode) if mode == T::MODE => { list.push(args_iter.next().unwrap().require(cx).elem) } ArgTy::List { element_type, len_arg, } => { let arg_pos = instr .args .iter() .position(|arg| arg.name == len_arg) .unwrap(); let len: Spanned<i32> = args[arg_pos].require(cx); let len = match &instr.args[arg_pos].ty { ArgTy::Int { signed: false, offset, } => len.elem - offset, ty => panic!("invalid array length type {:?}", ty), }; match element_type { ArgTy::List { .. } => unimplemented!("nested lists"), ArgTy::Register(_, mode) if *mode == T::MODE => { for _ in 0..len { list.push(args_iter.next().unwrap().require(cx).elem); } } _ => { for _ in 0..len { args_iter.next().unwrap(); } } } } _ => { args_iter.next().unwrap(); } } } assert_eq!(args_iter.next(), None, "{:?}", relexed); list } pub fn memory_read(&self, cx: &Compiler) -> bool { let relexed = self.relex(cx).0; let (instr, _) = match relexed.fetch_instr(cx) { Some(i) => i, None => return false, }; instr.mem_read } pub fn memory_write(&self, cx: &Compiler) -> bool { let relexed = self.relex(cx).0; let (instr, _) = match relexed.fetch_instr(cx) { Some(i) => i, None => return false, }; instr.mem_write } pub fn replace_registers(&mut self, cx: &Compiler, mut f: impl FnMut(Register) -> Register) { match &mut self.instr { Instruction::Assign { destination, value } => { destination.map_all_values(cx, &mut f); value.map_all_values(cx, &mut f); } Instruction::Io { instr: IoInstruction::OutputShares { registers }, .. } => { for register in registers { register.map_all_values(cx, &mut f); } } Instruction::StartOpen { registers } => { for register in registers { register.map_all_values(cx, &mut f); } } Instruction::StopOpen { registers } => { for reg in registers { reg.map_all_values(cx, &mut f); } } Instruction::General { destinations, values, .. } => { for value in values { value.map_all_values(cx, &mut f); } for dest in destinations { dest.map_all_values(cx, &mut f); } } Instruction::Nop => {} Instruction::Io { instr, .. } => instr.replace_registers(cx, &mut f), } } pub fn is_barrier(&self, cx: &Compiler) -> bool { let relexed = self.relex(cx).0; let (instr, _) = match relexed.fetch_instr(cx) { Some(i) => i, None => return false, }; instr.barrier } pub fn is_startopen(&self) -> bool { matches!(self.instr, Instruction::StartOpen { .. }) } pub fn is_stopopen(&self) -> bool { matches!(self.instr, Instruction::StopOpen { .. }) } } impl IoInstruction { pub fn replace_registers(&mut self, cx: &Compiler, mut f: impl FnMut(Register) -> Register) { match self { IoInstruction::InputShares { registers } => { for reg in registers { reg.map_all_values(cx, &mut f); } } IoInstruction::OutputShares { registers } => { for reg in registers { reg.map_all_values(cx, &mut f); } } } } }
use super::{Instruction, IoInstruction, Statement}; use crate::binary::instructions::RegisterMode; use crate::compiler::Compiler; use crate::lexer::{MapAllValues, Register}; use crate::span::Spanned; use crate::transforms::vectorize; use std::num::NonZeroU32; struct RegisterModeRead; struct RegisterModeWrite; trait RegisterModeTrait { const MODE: RegisterMode; } impl RegisterModeTrait for RegisterModeRead { const MODE: RegisterMode = RegisterMode::Read; } impl RegisterModeTrait for RegisterModeWrite { const MODE: RegisterMode = RegisterMode::Write; } #[derive(Clone, Debug)] pub struct RegisterIterator { v: NonZeroU32, registers: Vec<Register>, } impl RegisterIterator { pub fn is_empty(&self) -> bool { self.registers.is_empty() } } impl<'a> IntoIterator for &'a RegisterIterator { type Item = Register; type IntoIter = Box<dyn Iterator<Item = Register> + 'a>; fn into_iter(self) -> Self::IntoIter { let v = self.v; Box::new(self.registers.iter().flat_map(move |&r| vectorize(v, r))) } } impl IntoIterator for RegisterIterator { type Item = Register; type IntoIter = Box<dyn Iterator<Item = Register>>; fn into_iter(self) -> Self::IntoIter { let v = self.v; Box::new( self.registers .into_iter() .flat_map(move |r| vectorize(v, r)), ) } } impl<'a> Statement<'a> { pub fn write_registers(&self, cx: &Compiler) -> RegisterIterator { RegisterIterator { v: self.vectorized.elem, registers: self.write_registers_base(cx), } } pub fn read_registers(&self, cx: &Compiler) -> RegisterIterator { RegisterIterator { v: self.vectorized.elem, registers: self.read_registers_base(cx), } } pub fn write_registers_base(&self, cx: &Compiler) -> Vec<Register> { self.read_registers_generic::<RegisterModeWrite>(cx) } pub fn read_registers_base(&self, cx: &Compiler) -> Vec<Register> { self.read_registers_generic::<RegisterModeRead>(cx) } fn read_registers_generic<T: RegisterModeTrait>(&self, cx: &Compiler) -> Vec<Register> { use crate::binary::instructions::ArgTy; let relexed = self.relex(cx).0; let (instr, args) = match relexed.fetch_instr(cx) { Some(i) => i, None => return Vec::new(), }; let mut list = Vec::new(); let mut args_iter = args.iter(); for decl_arg in instr.args { match decl_arg.ty { ArgTy::Register(_, mode) if mode == T::MODE => { list.push(args_iter.next().unwrap().require(cx).elem) } ArgTy::List { element_type, len_arg, } => { let arg_pos = instr .args .iter() .position(|arg| arg.name == len_arg) .unwrap(); let len: Spanned<i32> = args[arg_pos].require(cx); let len = match &instr.args[arg_pos].ty { ArgTy::Int { signed: false, offset, } => len.elem - offset, ty => panic!("invalid array length type {:?}", ty), }; match element_type { ArgTy::List { .. } => unimplemented!("nested lists"), ArgTy::Register(_, mode) if *mode == T::MODE => { for _ in 0..len { list.push(args_iter.next().unwrap().require(cx).elem); } } _ => { for _ in 0..len { args_iter.next().unwrap(); } } } } _ => { args_iter.next().unwrap(); } } } assert_eq!(args_iter.next(), None, "{:?}", relexed); list } pub fn memory_read(&self, cx: &Compiler) -> bool { let relexed = self.relex(cx).0; let (instr, _) = match relexed.fetch_instr(cx) { Some(i) => i, None => return false, }; instr.mem_read } pub fn memory_write(&self, cx: &Compiler) -> bool { let relexed = self.relex(cx).0; let (instr, _) = match relexed.fetch_instr(cx) { Some(i) => i, None => return false, }; instr.mem_write } pub fn replace_registers(&mut self, cx: &Compiler, mut f: impl FnMut(Register) -> Register) { match &mut self.instr { Instruction::Assign { destination, value } => { destination.map_all_values(cx, &mut f); value.map_all_values(cx, &mut f); } Instruction::Io { instr: IoInstruction::OutputShares { registers }, .. } => { for register in registers { register.map_all_values(cx, &mut f); } } Instruction::StartOpen { registers } => { for register in registers { register.map_all_values(cx, &mut f); } } Instruction::StopOpen { registers } => { for reg in registers { reg.map_all_values(cx, &mut f); } } Instruction::General { destinations, values, .. } => { for value in values { value.map_all_values(cx, &mut f); } for dest in destinations { dest.map_all_values(cx, &mut f); } } Instruction::Nop => {} Instruction::Io { instr, .. } => instr.replace_registers(cx, &mut f), } } pub fn is_barrier(&self, cx: &Compiler) -> bool { let relexed = self.relex(cx).0; let (instr, _) = match relexed.fetch_instr(cx) { Some(i) => i, None => return false, }; instr.barrier } pub fn is_startopen(&self) -> bool { matches!(self.instr, Instruction::StartOpen { .. }) } pub fn is_stopopen(&self) -> bool { matches!(self.instr, Instruction::StopOpen { .. }) } } impl IoInstruction { pub fn replace_registers(&mut self, cx: &Compiler, mut f: impl FnMut(Register) -> Register) {
}
match self { IoInstruction::InputShares { registers } => { for reg in registers { reg.map_all_values(cx, &mut f); } } IoInstruction::OutputShares { registers } => { for reg in registers { reg.map_all_values(cx, &mut f); } } } }
function_block-function_prefix_line
[ { "content": "/// Returns `true` if the statement is valid\n\npub fn validate(cx: &Compiler, stmt: &Statement<'_>) -> bool {\n\n let relexed = stmt.relex(cx);\n\n let (instr, args) = match relexed.0.fetch_instr(cx) {\n\n Some(i) => i,\n\n None => return true,\n\n };\n\n for (arg_index,...
Rust
firmware_rust/AirQualitySensor/src/bin/firmware.rs
Tao173/AirQualitySensor
735faece883f3b21394d61d2431b15c6accc54d8
#![no_main] #![no_std] use firmware as _; use firmware::hal; #[rtic::app(device = firmware::hal::stm32, peripherals = true, dispatchers = [USART1, USART2])] mod app { use super::hal; use hal::gpio; use hal::prelude::*; use hal::stm32; use hal::timer; use systick_monotonic::*; use firmware::subsystems::{Galvos, LEDs, Sensor, GALVO_CALIBRATION}; #[monotonic(binds = SysTick, default = true)] type SystickMono = Systick<1000>; #[local] struct LocalResources { next_sensor_tick: <SystickMono as rtic::Monotonic>::Instant, next_led_tick: <SystickMono as rtic::Monotonic>::Instant, next_galvo_tick: <SystickMono as rtic::Monotonic>::Instant, watchdog: hal::watchdog::IndependedWatchdog, } #[shared] struct SharedResources { sensor_subsystem: Sensor, galvo_subsystem: Galvos< timer::pwm::PwmPin<stm32::TIM16, timer::Channel1>, timer::pwm::PwmPin<stm32::TIM14, timer::Channel1>, timer::pwm::PwmPin<stm32::TIM1, timer::Channel1>, >, led_subsystem: LEDs< gpio::gpioa::PA0<gpio::Output<gpio::PushPull>>, gpio::gpioa::PA1<gpio::Output<gpio::PushPull>>, gpio::gpioa::PA2<gpio::Output<gpio::PushPull>>, >, i2c: hal::i2c::I2c< stm32::I2C2, gpio::gpioa::PA12<gpio::Output<gpio::OpenDrain>>, gpio::gpioa::PA11<gpio::Output<gpio::OpenDrain>>, >, } #[init] fn init(ctx: init::Context) -> (SharedResources, LocalResources, init::Monotonics) { ctx.device.RCC.ahbenr.modify(|_, w| w.dmaen().set_bit()); defmt::println!("Finomnis' AirQualitySensor - Galvo Version"); let mut rcc = ctx.device.RCC.constrain(); let gpioa = ctx.device.GPIOA.split(&mut rcc); let i2c_sda = gpioa.pa12.into_open_drain_output(); let i2c_scl = gpioa.pa11.into_open_drain_output(); let delay = ctx.core.SYST.delay(&mut rcc); let i2c = ctx .device .I2C2 .i2c(i2c_sda, i2c_scl, hal::i2c::Config::new(100.khz()), &mut rcc); let now = monotonics::now(); let sensor_subsystem = Sensor::new(); let next_sensor_tick = now; sensor_tick::spawn_at(next_sensor_tick).unwrap(); let led_subsystem = LEDs::new( gpioa.pa0.into_push_pull_output(), gpioa.pa1.into_push_pull_output(), gpioa.pa2.into_push_pull_output(), ); let next_led_tick = now + 1.millis(); led_tick::spawn_at(next_led_tick).unwrap(); let pwm1 = ctx.device.TIM16.pwm(1.khz(), &mut rcc).bind_pin(gpioa.pa6); let pwm2 = ctx.device.TIM14.pwm(1.khz(), &mut rcc).bind_pin(gpioa.pa4); let pwm3 = ctx.device.TIM1.pwm(1.khz(), &mut rcc).bind_pin(gpioa.pa7); let galvo_subsystem = Galvos::new(( (pwm1, false, GALVO_CALIBRATION.0), (pwm2, false, GALVO_CALIBRATION.1), (pwm3, true, GALVO_CALIBRATION.2), )); let next_galvo_tick = now + 2.millis(); galvo_tick::spawn_at(next_galvo_tick).unwrap(); let mut watchdog = ctx.device.IWDG.constrain(); watchdog.start(2000.ms()); watchdog_wagger::spawn_at(monotonics::now()).unwrap(); ( SharedResources { i2c, sensor_subsystem, led_subsystem, galvo_subsystem, }, LocalResources { next_sensor_tick, next_led_tick, next_galvo_tick, watchdog, }, init::Monotonics(Systick::new(delay.release(), rcc.clocks.sys_clk.0)), ) } #[task(shared = [i2c, sensor_subsystem, led_subsystem, galvo_subsystem], local = [next_sensor_tick])] fn sensor_tick(mut ctx: sensor_tick::Context) { let next_sensor_tick = ctx.local.next_sensor_tick; let delay = ctx.shared.i2c.lock(|i2c| { ctx.shared.sensor_subsystem.lock(|sensor_subsystem| { let (delay, changed) = sensor_subsystem.tick(i2c); if changed { ctx.shared.led_subsystem.lock(|led_subsystem| { led_subsystem.update_leds(sensor_subsystem.get_value()) }); ctx.shared.galvo_subsystem.lock(|galvo_subsystem| { galvo_subsystem.update_values(sensor_subsystem.get_value()) }); } delay }) }); *next_sensor_tick = *next_sensor_tick + delay.millis(); sensor_tick::spawn_at(*next_sensor_tick).unwrap(); } #[task(shared = [sensor_subsystem, led_subsystem], local = [next_led_tick])] fn led_tick(mut ctx: led_tick::Context) { let next_led_tick = ctx.local.next_led_tick; let sensor_value = ctx.shared.sensor_subsystem.lock(|s| s.get_value().clone()); let delay = ctx.shared.led_subsystem.lock(|s| s.tick(&sensor_value)); *next_led_tick = *next_led_tick + delay.millis(); led_tick::spawn_at(*next_led_tick).unwrap(); } #[task(shared = [galvo_subsystem], local = [next_galvo_tick])] fn galvo_tick(mut ctx: galvo_tick::Context) { let next_galvo_tick = ctx.local.next_galvo_tick; let delay = ctx.shared.galvo_subsystem.lock(|s| s.tick()); *next_galvo_tick = *next_galvo_tick + delay.millis(); galvo_tick::spawn_at(*next_galvo_tick).unwrap(); } #[task(local = [watchdog])] fn watchdog_wagger(ctx: watchdog_wagger::Context) { ctx.local.watchdog.feed(); watchdog_wagger::spawn_at(monotonics::now() + 100.millis()).unwrap(); } }
#![no_main] #![no_std] use firmware as _; use firmware::hal; #[rtic::app(device = firmware::hal::stm32, peripherals = true, dispatchers = [USART1, USART2])] mod app { use super::hal; use hal::gpio; use hal::prelude::*; use hal::stm32; use hal::timer; use systick_monotonic::*; use firmware::subsystems::{Galvos, LEDs, Sensor, GALVO_CALIBRATION}; #[monotonic(binds = SysTick, default = true)] type SystickMono = Systick<1000>; #[local] struct LocalResources { next_sensor_tick: <SystickMono as rtic::Monotonic>::Instant, next_led_tick: <SystickMono as rtic::Monotonic>::Instant, next_galvo_tick: <SystickMono as rtic::Monotonic>::Instant, watchdog: hal::watchdog::IndependedWatchdog, } #[shared] struct SharedResources { sensor_subsystem: Sensor, galvo_subsystem: Galvos< timer::pwm::PwmPin<stm32::TIM16, timer::Channel1>, timer::pwm::PwmPin<stm32::TIM14, timer::Channel1>, timer::pwm::PwmPin<stm32::TIM1, timer::Channel1>, >, led_subsystem: LEDs< gpio::gpioa::PA0<gpio::Output<gpio::PushPull>>, gpio::gpioa::PA1<gpio::Output<gpio::PushPull>>, gpio::gpioa::PA2<gpio::Output<gpio::PushPull>>, >, i2c: hal::i2c::I2c< stm32::I2C2, gpio::gpioa::PA12<gpio::Output<gpio::OpenDrain>>, gpio::gpioa::PA11<gpio::Output<gpio::OpenDrain>>, >, } #[init] fn init(ctx: init::Context) -> (SharedResources, LocalResources, init::Monotonics) { ctx.device.RCC.ahbenr.modify(|_, w| w.dmaen().set_bit()); defmt::println!("Finomnis' AirQualitySensor - Galvo Version"); let mut rcc = ctx.device.RCC.constrain(); let gpioa = ctx.device.GPIOA.split(&mut rcc); let i2c_sda = gpioa.pa12.into_open_drain_output(); let i2c_scl = gpioa.pa11.into_open_drain_output(); l
into_push_pull_output(), gpioa.pa1.into_push_pull_output(), gpioa.pa2.into_push_pull_output(), ); let next_led_tick = now + 1.millis(); led_tick::spawn_at(next_led_tick).unwrap(); let pwm1 = ctx.device.TIM16.pwm(1.khz(), &mut rcc).bind_pin(gpioa.pa6); let pwm2 = ctx.device.TIM14.pwm(1.khz(), &mut rcc).bind_pin(gpioa.pa4); let pwm3 = ctx.device.TIM1.pwm(1.khz(), &mut rcc).bind_pin(gpioa.pa7); let galvo_subsystem = Galvos::new(( (pwm1, false, GALVO_CALIBRATION.0), (pwm2, false, GALVO_CALIBRATION.1), (pwm3, true, GALVO_CALIBRATION.2), )); let next_galvo_tick = now + 2.millis(); galvo_tick::spawn_at(next_galvo_tick).unwrap(); let mut watchdog = ctx.device.IWDG.constrain(); watchdog.start(2000.ms()); watchdog_wagger::spawn_at(monotonics::now()).unwrap(); ( SharedResources { i2c, sensor_subsystem, led_subsystem, galvo_subsystem, }, LocalResources { next_sensor_tick, next_led_tick, next_galvo_tick, watchdog, }, init::Monotonics(Systick::new(delay.release(), rcc.clocks.sys_clk.0)), ) } #[task(shared = [i2c, sensor_subsystem, led_subsystem, galvo_subsystem], local = [next_sensor_tick])] fn sensor_tick(mut ctx: sensor_tick::Context) { let next_sensor_tick = ctx.local.next_sensor_tick; let delay = ctx.shared.i2c.lock(|i2c| { ctx.shared.sensor_subsystem.lock(|sensor_subsystem| { let (delay, changed) = sensor_subsystem.tick(i2c); if changed { ctx.shared.led_subsystem.lock(|led_subsystem| { led_subsystem.update_leds(sensor_subsystem.get_value()) }); ctx.shared.galvo_subsystem.lock(|galvo_subsystem| { galvo_subsystem.update_values(sensor_subsystem.get_value()) }); } delay }) }); *next_sensor_tick = *next_sensor_tick + delay.millis(); sensor_tick::spawn_at(*next_sensor_tick).unwrap(); } #[task(shared = [sensor_subsystem, led_subsystem], local = [next_led_tick])] fn led_tick(mut ctx: led_tick::Context) { let next_led_tick = ctx.local.next_led_tick; let sensor_value = ctx.shared.sensor_subsystem.lock(|s| s.get_value().clone()); let delay = ctx.shared.led_subsystem.lock(|s| s.tick(&sensor_value)); *next_led_tick = *next_led_tick + delay.millis(); led_tick::spawn_at(*next_led_tick).unwrap(); } #[task(shared = [galvo_subsystem], local = [next_galvo_tick])] fn galvo_tick(mut ctx: galvo_tick::Context) { let next_galvo_tick = ctx.local.next_galvo_tick; let delay = ctx.shared.galvo_subsystem.lock(|s| s.tick()); *next_galvo_tick = *next_galvo_tick + delay.millis(); galvo_tick::spawn_at(*next_galvo_tick).unwrap(); } #[task(local = [watchdog])] fn watchdog_wagger(ctx: watchdog_wagger::Context) { ctx.local.watchdog.feed(); watchdog_wagger::spawn_at(monotonics::now() + 100.millis()).unwrap(); } }
et delay = ctx.core.SYST.delay(&mut rcc); let i2c = ctx .device .I2C2 .i2c(i2c_sda, i2c_scl, hal::i2c::Config::new(100.khz()), &mut rcc); let now = monotonics::now(); let sensor_subsystem = Sensor::new(); let next_sensor_tick = now; sensor_tick::spawn_at(next_sensor_tick).unwrap(); let led_subsystem = LEDs::new( gpioa.pa0.
function_block-random_span
[]
Rust
third-party/rust/shed/futures_stats/src/futures01.rs
baioc/antlir
e3b47407b72c4aee835adf4e68fccd9abff457f2
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under both the MIT license found in the * LICENSE-MIT file in the root directory of this source tree and the Apache * License, Version 2.0 found in the LICENSE-APACHE file in the root directory * of this source tree. */ use futures_01_ext::{BoxFuture, BoxFutureNonSend, BoxStream, FutureExt, StreamExt}; use futures_old::{Async, Future, IntoFuture, Poll, Stream}; use std::time::{Duration, Instant}; use super::{FutureStats, StreamStats}; pub struct TimedFuture<F> { inner: F, start: Option<Instant>, poll_count: u64, poll_time: Duration, } impl<F> TimedFuture<F> { fn new(future: F) -> Self { TimedFuture { inner: future, start: None, poll_count: 0, poll_time: Duration::from_secs(0), } } } impl<F: Future> Future for TimedFuture<F> { type Item = (Result<F::Item, F::Error>, FutureStats); type Error = !; fn poll(&mut self) -> Poll<Self::Item, Self::Error> { let _ = self.start.get_or_insert_with(Instant::now); self.poll_count += 1; let poll_start = Instant::now(); let poll = self.inner.poll(); self.poll_time += poll_start.elapsed(); let res = match poll { Ok(Async::NotReady) => return Ok(Async::NotReady), Ok(Async::Ready(v)) => Ok(v), Err(e) => Err(e), }; let stats = FutureStats { completion_time: self.start.expect("start time not set").elapsed(), poll_time: self.poll_time, poll_count: self.poll_count, }; Ok(Async::Ready((res, stats))) } } pub struct TimedStream<S, C, R> where R: IntoFuture<Item = (), Error = ()> + 'static, S: Stream, { inner: S, callback: Option<C>, callback_future: Option<R::Future>, start: Option<Instant>, stream_result: Option<Result<(), S::Error>>, count: usize, poll_count: u64, poll_time: Duration, first_item_time: Option<Duration>, } impl<S, C, R> TimedStream<S, C, R> where R: IntoFuture<Item = (), Error = ()> + 'static, S: Stream, { fn new(stream: S, callback: C) -> Self { TimedStream { inner: stream, callback: Some(callback), callback_future: None, start: None, stream_result: None, count: 0, poll_count: 0, poll_time: Duration::from_secs(0), first_item_time: None, } } } impl<S, C, R> Stream for TimedStream<S, C, R> where S: Stream, C: FnOnce(StreamStats, Result<(), &S::Error>) -> R, R: IntoFuture<Item = (), Error = ()> + 'static, { type Item = S::Item; type Error = S::Error; fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> { if self.callback_future.is_some() { return self.poll_callback_future(); } let _ = self.start.get_or_insert_with(Instant::now); self.poll_count += 1; let poll_start = Instant::now(); let poll = self.inner.poll(); self.poll_time += poll_start.elapsed(); match poll { Ok(Async::NotReady) => Ok(Async::NotReady), notfinished @ Ok(Async::Ready(Some(_))) => { self.count += 1; if self.count == 1 { self.first_item_time = Some(self.start.expect("start time not set").elapsed()); } notfinished } Ok(Async::Ready(None)) => { let callback_future = self.run_callback(Ok(())); self.stream_result = Some(Ok(())); self.callback_future = Some(callback_future.into_future()); self.poll_callback_future() } Err(err) => { let callback_future = self.run_callback(Err(&err)); self.stream_result = Some(Err(err)); self.callback_future = Some(callback_future.into_future()); self.poll_callback_future() } } } } impl<S, C, R> TimedStream<S, C, R> where S: Stream, C: FnOnce(StreamStats, Result<(), &S::Error>) -> R, R: IntoFuture<Item = (), Error = ()> + 'static, { fn run_callback(&mut self, res: Result<(), &S::Error>) -> R { let stats = StreamStats { completion_time: self.start.expect("start time not set").elapsed(), poll_time: self.poll_time, poll_count: self.poll_count, count: self.count, first_item_time: self.first_item_time, }; let callback = self.callback.take().expect("callback was already called"); callback(stats, res) } fn poll_callback_future( &mut self, ) -> Poll<Option<<Self as Stream>::Item>, <Self as Stream>::Error> { if let Some(ref mut fut) = self.callback_future { let poll = fut.poll(); if poll == Ok(Async::NotReady) { return Ok(Async::NotReady); } let stream_result = self .stream_result .take() .expect("stream result should have been set"); match stream_result { Ok(()) => Ok(Async::Ready(None)), Err(err) => Err(err), } } else { panic!("callback future is not set!"); } } } fn time_future<F, C, R>(future: F, callback: C) -> impl Future<Item = F::Item, Error = F::Error> where F: Future, C: FnOnce(FutureStats, Result<&F::Item, &F::Error>) -> R, R: IntoFuture<Item = (), Error = ()> + 'static, R::Future: 'static, { TimedFuture::new(future).then(|res| { let (res, stats) = res.expect("unexpected unreachable err"); callback(stats, res.as_ref()).into_future().then(|_| res) }) } fn future_with_timing<F>( future: F, ) -> impl Future<Item = (FutureStats, F::Item), Error = (FutureStats, F::Error)> where F: Future, { TimedFuture::new(future).then(|res| { let (real_res, stats) = res.expect("unexpected unreachable err"); match real_res { Ok(r) => Ok((stats, r)), Err(e) => Err((stats, e)), } }) } pub trait Timed: Future + Sized + Send + 'static { fn timed<C, R>(self, callback: C) -> BoxFuture<Self::Item, Self::Error> where C: FnOnce(FutureStats, Result<&Self::Item, &Self::Error>) -> R + Send + 'static, R: IntoFuture<Item = (), Error = ()> + 'static, R::Future: Send + 'static, Self::Item: Send, Self::Error: Send, { time_future(self, callback).boxify() } fn collect_timing(self) -> BoxFuture<(FutureStats, Self::Item), (FutureStats, Self::Error)> where Self::Item: Send, Self::Error: Send, { future_with_timing(self).boxify() } } pub trait TimedNonSend: Future + Sized + 'static { fn timed_nonsend<C, R>(self, callback: C) -> BoxFutureNonSend<Self::Item, Self::Error> where C: FnOnce(FutureStats, Result<&Self::Item, &Self::Error>) -> R + 'static, R: IntoFuture<Item = (), Error = ()> + 'static, R::Future: 'static, { time_future(self, callback).boxify_nonsend() } fn collect_timing( self, ) -> BoxFutureNonSend<(FutureStats, Self::Item), (FutureStats, Self::Error)> { future_with_timing(self).boxify_nonsend() } } impl<T: Future + Send + 'static> Timed for T {} impl<T: Future + 'static> TimedNonSend for T {} pub trait TimedStreamTrait: Stream + Sized + Send + 'static { fn timed<C, R>(self, callback: C) -> BoxStream<Self::Item, Self::Error> where C: FnOnce(StreamStats, Result<(), &Self::Error>) -> R + Send + 'static, R: IntoFuture<Item = (), Error = ()> + Send + 'static, R::Future: 'static, <R as futures_old::IntoFuture>::Future: Send, Self::Item: Send, Self::Error: Send, { TimedStream::new(self, callback).boxify() } } impl<T: Stream + Send + 'static> TimedStreamTrait for T {} #[cfg(test)] mod tests { use super::*; use anyhow::Error; use futures_old::future::{err, ok}; use futures_old::stream::{iter_ok, once}; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; #[test] fn test_timed_stream_simple() { let callback_called = Arc::new(AtomicBool::new(false)); const TEST_COUNT: usize = 3; let s: BoxStream<_, ()> = iter_ok([0; TEST_COUNT].iter()) .timed({ let callback_called = callback_called.clone(); move |stats, _| { assert_eq!(stats.count, TEST_COUNT); callback_called.store(true, Ordering::SeqCst); Ok(()) } }) .boxify(); tokio_old::run(s.collect().map(|_| ())); assert!(callback_called.load(Ordering::SeqCst)); } #[test] fn test_timed_stream_error() { let callback_called = Arc::new(AtomicBool::new(false)); let err_happened = Arc::new(AtomicBool::new(false)); let err_reported = Arc::new(AtomicBool::new(false)); let s: BoxStream<(), _> = once(Err(Error::msg("err"))) .timed({ let callback_called = callback_called.clone(); let err_reported = err_reported.clone(); move |_, res| { callback_called.store(true, Ordering::SeqCst); err_reported.store(res.is_err(), Ordering::SeqCst); Ok(()) } }) .boxify(); tokio_old::run(s.collect().map(|_| ()).map_err({ let err_happened = err_happened.clone(); move |_| err_happened.store(true, Ordering::SeqCst) })); assert!(callback_called.load(Ordering::SeqCst)); assert!(err_happened.load(Ordering::SeqCst)); assert!(err_reported.load(Ordering::SeqCst)); } #[test] fn test_timed_with_future() { let sleep_fut = tokio_timer::sleep(Duration::from_millis(300)); let future_called = Arc::new(AtomicBool::new(false)); let s: BoxStream<_, ()> = iter_ok([1, 2, 3].iter()) .timed({ let future_called = future_called.clone(); move |_, _| { sleep_fut .map(move |_| { future_called.store(true, Ordering::SeqCst); () }) .map_err(|_| ()) } }) .boxify(); tokio_old::run(s.collect().map(|_| ())); assert!(future_called.load(Ordering::SeqCst)); } #[test] fn test_timed_with_err_and_future() { let sleep_fut = tokio_timer::sleep(Duration::from_millis(300)); let future_called = Arc::new(AtomicBool::new(false)); let err_happened = Arc::new(AtomicBool::new(false)); let err_reported = Arc::new(AtomicBool::new(false)); let s: BoxStream<(), _> = once(Err(Error::msg("err"))) .timed({ let err_reported = err_reported.clone(); let future_called = future_called.clone(); move |_, res| { err_reported.store(res.is_err(), Ordering::SeqCst); sleep_fut .map(move |_| { future_called.store(true, Ordering::SeqCst); () }) .map_err(|_| ()) } }) .boxify(); tokio_old::run(s.collect().map(|_| ()).map_err({ let err_happened = err_happened.clone(); move |_| { err_happened.store(true, Ordering::SeqCst); () } })); assert!(err_happened.load(Ordering::SeqCst)); assert!(err_reported.load(Ordering::SeqCst)); assert!(future_called.load(Ordering::SeqCst)); } #[test] fn test_collect_timings_with_future_ok() { let result_ok = Arc::new(AtomicBool::new(false)); let f: BoxFuture<u32, ()> = ok(123).boxify(); let f = Timed::collect_timing(f) .map({ let result_ok = result_ok.clone(); move |(_, r)| { result_ok.store(r == 123, Ordering::SeqCst); () } }) .map(|_| ()) .map_err(|_| ()) .boxify(); tokio_old::run(f); assert!(result_ok.load(Ordering::SeqCst)); } #[test] fn test_collect_timings_with_future_error() { let err_ok = Arc::new(AtomicBool::new(false)); let f: BoxFuture<(), u32> = err(123).boxify(); let f = Timed::collect_timing(f) .map_err({ let err_ok = err_ok.clone(); move |(_, r)| { err_ok.store(r == 123, Ordering::SeqCst); () } }) .map(|_| ()) .map_err(|_| ()) .boxify(); tokio_old::run(f); assert!(err_ok.load(Ordering::SeqCst)); } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under both the MIT license found in the * LICENSE-MIT file in the root directory of this source tree and the Apache * License, Version 2.0 found in the LICENSE-APACHE file in the root directory * of this source tree. */ use futures_01_ext::{BoxFuture, BoxFutureNonSend, BoxStream, FutureExt, StreamExt}; use futures_old::{Async, Future, IntoFuture, Poll, Stream}; use std::time::{Duration, Instant}; use super::{FutureStats, StreamStats}; pub struct TimedFuture<F> { inner: F, start: Option<Instant>, poll_count: u64, poll_time: Duration, } impl<F> TimedFuture<F> { fn new(future: F) -> Self { TimedFuture { inner: future, start: None, poll_count: 0, poll_time: Duration::from_secs(0), } } } impl<F: Future> Future for TimedFuture<F> { type Item = (Result<F::Item, F::Error>, FutureStats); type Error = !; fn poll(&mut self) -> Poll<Self::Item, Self::Error> { let _ = self.start.get_or_insert_with(Instant::now); self.poll_count += 1; let poll_start = Instant::now(); let poll = self.inner.poll(); self.poll_time += poll_start.elapsed(); let res = match poll { Ok(Async::NotReady) => return Ok(Async::NotReady), Ok(Async::Ready(v)) => Ok(v), Err(e) => Err(e), }; let stats = FutureStats { completion_time: self.start.expect("start time not set").elapsed(), poll_time: self.poll_time, poll_count: self.poll_count, }; Ok(Async::Ready((res, stats))) } } pub struct TimedStream<S, C, R> where R: IntoFuture<Item = (), Error = ()> + 'static, S: Stream, { inner: S, callback: Option<C>, callback_future: Option<R::Future>, start: Option<Instant>, stream_result: Option<Result<(), S::Error>>, count: usize, poll_count: u64, poll_time: Duration, first_item_time: Option<Duration>, } impl<S, C, R> TimedStream<S, C, R> where R: IntoFuture<Item = (), Error = ()> + 'static, S: Stream, { fn new(stream: S, callback: C) -> Self { TimedStream { inner: stream, callback: Some(callback), callback_future: None, start: None, stream_result: None, count: 0, poll_count: 0, poll_time: Duration::from_secs(0), first_item_time: None, } } } impl<S, C, R> Stream for TimedStream<S, C, R> where S: Stream, C: FnOnce(StreamStats, Result<(), &S::Error>) -> R, R: IntoFuture<Item = (), Error = ()> + 'static, { type Item = S::Item; type Error = S::Error; fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> { if self.callback_future.is_some() { return self.poll_callback_future(); } let _ = self.start.get_or_insert_with(Instant::now); self.poll_count += 1; let poll_start = Instant::now(); let poll = self.inner.poll(); self.poll_time += poll_start.elapsed(); match poll { Ok(Async::NotReady) => Ok(Async::NotReady), notfinished @ Ok(Async::Ready(Some(_))) => { self.count += 1; if self.count == 1 { self.first_item_time = Some(self.start.expect("start time not set").elapsed()); } notfinished } Ok(Async::Ready(None)) => { let callback_future = self.run_callback(Ok(())); self.stream_result = Some(Ok(())); self.callback_future = Some(callback_future.into_future()); self.poll_callback_future() } Err(err) => { let callback_future = self.run_callback(Err(&err)); self.stream_result = Some(Err(err)); self.callback_future = Some(callback_future.into_future()); self.poll_callback_future() } } } } impl<S, C, R> TimedStream<S, C, R> where S: Stream, C: FnOnce(StreamStats, Result<(), &S::Error>) -> R, R: IntoFuture<Item = (), Error = ()> + 'static, { fn run_callback(&mut self, res: Result<(), &S::Error>) -> R { let stats = StreamStats { completion_time: self.start.expect("start time not set").elapsed(), poll_time: self.poll_time, poll_count: self.poll_count, count: self.count, first_item_time: self.first_item_time, }; let callback = self.callback.take().expect("callback was already called"); callback(stats, res) } fn poll_callback_future( &mut self, ) -> Poll<Option<<Self as Stream>::Item>, <Self as Stream>::Error> { if let Some(ref mut fut) = self.callback_future { let poll = fut.poll(); if poll == Ok(Async::NotReady) { return Ok(Async::NotReady); } let stream_result = self .stream_result .take() .expect("stream result should have been set"); match stream_result { Ok(()) => Ok(Async::Ready(None)), Err(err) => Err(err), } } else { panic!("callback future is not set!"); } } } fn time_future<F, C, R>(future: F, callback: C) -> impl Future<Item = F::Item, Error = F::Error> where F: Future, C: FnOnce(FutureStats, Result<&F::Item, &F::Error>) -> R, R: IntoFuture<Item = (), Error = ()> + 'static, R::Future: 'static, { TimedFuture::new(future).then(|res| { let (res, stats) = res.expect("unexpected unreachable err"); callback(stats, res.as_ref()).into_future().then(|_| res) }) } fn future_with_timing<F>( future: F, ) -> impl Future<Item = (FutureStats, F::Item), Error = (FutureStats, F::Error)> where F: Future, { TimedFuture::new(future).then(|res| { let (real_res, stats) = res.expect("unexpected unreachable err"); match real_res { Ok(r) => Ok((stats, r)), Err(e) => Err((stats, e)), } }) } pub trait Timed: Future + Sized + Send + 'static { fn timed<C, R>(self, callback: C) -> BoxFuture<Self::Item, Self::Error> where C: FnOnce(FutureStats, Result<&Self::Item, &Self::Error>) -> R + Send + 'static, R: IntoFuture<Item = (), Error = ()> + 'static, R::Future: Send + 'static, Self::Item: Send, Self::Error: Send, { time_future(self, callback).boxify() } fn collect_timing(self) -> BoxFuture<(FutureStats, Self::Item), (FutureStats, Self::Error)> where Self::Item: Send, Self::Error: Send, { future_with_timing(self).boxify() } } pub trait TimedNonSend: Future + Sized + 'static { fn timed_nonsend<C, R>(self, callback: C) -> BoxFutureNonSend<Self::Item, Self::Error> where C: FnOnce(FutureStats, Result<&Self::Item, &Self::Error>) -> R + 'static, R: IntoFuture<Item = (), Error = ()> + 'static, R::Future: 'static, { time_future(self, callback).boxify_nonsend() } fn collect_timing( self, ) -> BoxFutureNonSend<(FutureStats, Self::Item), (FutureStats, Self::Error)> { future_with_timing(self).boxify_nonsend() } } impl<T: Future + Send + 'static> Timed for T {} impl<T: Future + 'static> TimedNonSend for T {} pub trait TimedStreamTrait: Stream + Sized + Send + 'static { fn timed<C, R>(self, callback: C) -> BoxStream<Self::Item, Self::Error> where C: FnOnce(StreamStats, Result<(), &Self::Error>) -> R + Send + 'static, R: IntoFuture<Item = (), Error = ()> + Send + 'static, R::Future: 'static, <R as futures_old::IntoFuture>::Future: Send, Self::Item: Send, Self::Error: Send, { TimedStream::new(self, callback).boxify() } } impl<T: Stream + Send + 'static> TimedStreamTrait for T {} #[cfg(test)] mod tests { use super::*; use anyhow::Error; use futures_old::future::{err, ok}; use futures_old::stream::{iter_ok, once}; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; #[test] fn test_timed_stream_simple() { let callback_called = Arc::new(AtomicBool::new(false)); const TEST_COUNT: usize
; move |stats, _| { assert_eq!(stats.count, TEST_COUNT); callback_called.store(true, Ordering::SeqCst); Ok(()) } }) .boxify(); tokio_old::run(s.collect().map(|_| ())); assert!(callback_called.load(Ordering::SeqCst)); } #[test] fn test_timed_stream_error() { let callback_called = Arc::new(AtomicBool::new(false)); let err_happened = Arc::new(AtomicBool::new(false)); let err_reported = Arc::new(AtomicBool::new(false)); let s: BoxStream<(), _> = once(Err(Error::msg("err"))) .timed({ let callback_called = callback_called.clone(); let err_reported = err_reported.clone(); move |_, res| { callback_called.store(true, Ordering::SeqCst); err_reported.store(res.is_err(), Ordering::SeqCst); Ok(()) } }) .boxify(); tokio_old::run(s.collect().map(|_| ()).map_err({ let err_happened = err_happened.clone(); move |_| err_happened.store(true, Ordering::SeqCst) })); assert!(callback_called.load(Ordering::SeqCst)); assert!(err_happened.load(Ordering::SeqCst)); assert!(err_reported.load(Ordering::SeqCst)); } #[test] fn test_timed_with_future() { let sleep_fut = tokio_timer::sleep(Duration::from_millis(300)); let future_called = Arc::new(AtomicBool::new(false)); let s: BoxStream<_, ()> = iter_ok([1, 2, 3].iter()) .timed({ let future_called = future_called.clone(); move |_, _| { sleep_fut .map(move |_| { future_called.store(true, Ordering::SeqCst); () }) .map_err(|_| ()) } }) .boxify(); tokio_old::run(s.collect().map(|_| ())); assert!(future_called.load(Ordering::SeqCst)); } #[test] fn test_timed_with_err_and_future() { let sleep_fut = tokio_timer::sleep(Duration::from_millis(300)); let future_called = Arc::new(AtomicBool::new(false)); let err_happened = Arc::new(AtomicBool::new(false)); let err_reported = Arc::new(AtomicBool::new(false)); let s: BoxStream<(), _> = once(Err(Error::msg("err"))) .timed({ let err_reported = err_reported.clone(); let future_called = future_called.clone(); move |_, res| { err_reported.store(res.is_err(), Ordering::SeqCst); sleep_fut .map(move |_| { future_called.store(true, Ordering::SeqCst); () }) .map_err(|_| ()) } }) .boxify(); tokio_old::run(s.collect().map(|_| ()).map_err({ let err_happened = err_happened.clone(); move |_| { err_happened.store(true, Ordering::SeqCst); () } })); assert!(err_happened.load(Ordering::SeqCst)); assert!(err_reported.load(Ordering::SeqCst)); assert!(future_called.load(Ordering::SeqCst)); } #[test] fn test_collect_timings_with_future_ok() { let result_ok = Arc::new(AtomicBool::new(false)); let f: BoxFuture<u32, ()> = ok(123).boxify(); let f = Timed::collect_timing(f) .map({ let result_ok = result_ok.clone(); move |(_, r)| { result_ok.store(r == 123, Ordering::SeqCst); () } }) .map(|_| ()) .map_err(|_| ()) .boxify(); tokio_old::run(f); assert!(result_ok.load(Ordering::SeqCst)); } #[test] fn test_collect_timings_with_future_error() { let err_ok = Arc::new(AtomicBool::new(false)); let f: BoxFuture<(), u32> = err(123).boxify(); let f = Timed::collect_timing(f) .map_err({ let err_ok = err_ok.clone(); move |(_, r)| { err_ok.store(r == 123, Ordering::SeqCst); () } }) .map(|_| ()) .map_err(|_| ()) .boxify(); tokio_old::run(f); assert!(err_ok.load(Ordering::SeqCst)); } }
= 3; let s: BoxStream<_, ()> = iter_ok([0; TEST_COUNT].iter()) .timed({ let callback_called = callback_called.clone()
random
[ { "content": "/// A trait that provides the `timed` method to [futures_old::Stream] for gathering stats\n\npub trait TimedStreamExt: Stream + Sized {\n\n /// Combinator that returns a stream that will gather some statistics and\n\n /// pass them for inspection to the provided callback when the stream\n\n ...
Rust
src/io/seq.rs
lskatz/ROSS.rs
6dfc202ac7e2b94769657f53c450954daa7ce267
use std::collections::HashMap; use std::clone::Clone; #[test] fn test_new_seq() { let id = "MY_ID".to_string(); let seq = "AATNGGCC".to_string(); let qual = "#ABCDE!!".to_string(); let cleanable = Seq::new(&id,&seq,&qual); let formatted = format!("@{}\n{}\n+\n{}", &id, &seq, &qual); assert_eq!(cleanable.to_string(), formatted); } #[test] fn test_cleanable() { let id = "MY_ID".to_string(); let seq = "AATNGGCC".to_string(); let qual = "#ABCDE!!".to_string(); let mut cleanable = Seq::new(&id,&seq,&qual); cleanable.lower_ambiguity_q(); cleanable.trim(); assert_eq!(cleanable.to_string(), "@MY_ID\nATNGG\n+\nAB!DE".to_string()); } #[derive(Debug)] pub struct Seq { pub id: String, pub seq: String, pub qual: String, pub pairid: String, pub thresholds: HashMap<String,f32>, } pub trait Cleanable{ fn new (id: &String, seq: &String, qual: &String) -> Seq; fn blank () -> Seq; fn is_blank (&self) -> bool; fn from_string (seq_str: &String) -> Seq; fn sanitize_id(id: &String) -> String; fn lower_ambiguity_q(&mut self) -> (); fn trim(&mut self) -> (); fn is_high_quality(&mut self) -> bool; fn to_string(&self) -> String; fn print(&self) -> (); } impl Cleanable for Seq { fn new (id: &String, seq: &String, qual: &String) -> Seq{ let id_copy = Self::sanitize_id(&id); let mut thresholds = HashMap::new(); thresholds.insert("min_avg_qual".to_string(),20.0); thresholds.insert("min_length".to_string(),100.0); thresholds.insert("min_trim_qual".to_string(),20.0); return Seq{ id: id_copy, seq: seq.clone(), qual: qual.clone(), pairid: String::new(), thresholds: thresholds, }; } fn blank () -> Seq{ return Seq::new(&String::new(),&String::new(),&String::new()); } fn is_blank (&self) -> bool { if self.seq.len() == 0 && self.qual.len() == 0 { return true; } return false; } fn from_string (seq_str: &String) -> Seq { let mut lines = seq_str.lines(); let id = lines.next().expect("Could not parse ID"); let seq = lines.next().expect("Could not parse sequence"); lines.next().expect("Could not parse +"); let qual_opt = lines.next(); if qual_opt == None { return Seq::blank(); } let qual = qual_opt.expect("Could not read the qual line"); return Seq{ id: id.to_string(), seq: seq.to_string(), qual: qual.to_string(), pairid: String::new(), thresholds: HashMap::new(), } } fn sanitize_id(id: &String) -> String { if id.len() == 0 { return String::new(); } let mut id_copy = id.clone(); if id_copy.chars().nth(0).expect("ID was empty") == '@' { id_copy.pop(); } return id_copy; } fn lower_ambiguity_q(&mut self){ let zero_score:char = 33 as char; let low_score :char = (33 + 0) as u8 as char; let mut low_qual_idx = vec![false; self.seq.len()]; for (i,nt) in self.seq.chars().enumerate(){ if nt == 'N' || nt == 'n' || self.qual.chars().nth(i).expect("Expected a char") < low_score { low_qual_idx[i] = true; } } let mut new_seq =String::new(); let mut new_qual=String::new(); for (i,nt) in self.seq.chars().enumerate(){ if low_qual_idx[i] { new_seq.push('N'); new_qual.push(zero_score); } else{ new_seq.push(nt); new_qual.push_str(&self.qual[i..i+1]); } } self.seq=new_seq; self.qual=new_qual; } fn trim(&mut self) { let min_qual = *self.thresholds.entry("min_trim_qual".to_string()) .or_insert(0.0) as u8; let mut trim5=0; let mut trim3=&self.qual.len()-0; for qual in self.qual.chars(){ if qual as u8 - 33 < min_qual { trim5+=1; } else { break; } } for qual in self.qual.chars().rev() { if qual as u8 - 33 < min_qual { trim3-=1; } else { break; } } if trim5 >= trim3 { self.qual = String::new(); self.seq = String::new(); } else { self.qual = self.qual[trim5..trim3].to_string(); self.seq = self.seq[trim5..trim3].to_string(); } } fn is_high_quality(&mut self) -> bool { let min_length = self.thresholds.get(&"min_length".to_string()).expect("min_length does not look like a number"); let seq_len = self.seq.len() as f32; if seq_len < *min_length { return false; } let mut total_qual = 0; for qual in self.qual.chars() { total_qual += qual as u32; } let avg_qual = (total_qual as f32/seq_len) - 33.0; let min_qual = self.thresholds.get(&"min_avg_qual".to_string()).expect("min_avg_qual does not look like a number"); if avg_qual < *min_qual { return false; } return true; } fn to_string(&self) -> String { let mut entry = String::new(); if self.id.len() > 0 && self.id.chars().nth(0).expect("Seq ID was not set") != '@' { entry.push('@'); } entry.push_str(self.id.trim()); entry.push_str("\n"); entry.push_str(self.seq.trim()); entry.push_str("\n+\n"); entry.push_str(&self.qual.trim()); return entry; } fn print(&self) -> () { println!("{}",self.to_string()); } } impl Clone for Seq { fn clone(&self) -> Seq { return Seq{ id: self.id.clone(), seq: self.seq.clone(), qual: self.qual.clone(), pairid: self.pairid.clone(), thresholds: self.thresholds.clone(), } } }
use std::collections::HashMap; use std::clone::Clone; #[test] fn test_new_seq() { let id = "MY_ID".to_string(); let seq = "AATNGGCC".to_string(); let qual = "#ABCDE!!".to_string(); let cleanable = Seq::new(&id,&seq,&qual); let formatted = format!("@{}\n{}\n+\n{}", &id, &seq, &qual); assert_eq!(cleanable.to_string(), formatted); } #[test] fn test_cleanable() { let id = "MY_ID".to_string(); let seq = "AATNGGCC".to_string(); let qual = "#ABCDE!!".to_string(); let mut cleanable = Seq::new(&id,&seq,&qual); cleanable.lower_ambiguity_q(); cleanable.trim(); assert_eq!(cleanable.to_string(), "@MY_ID\nATNGG\n+\nAB!DE".to_string()); } #[derive(Debug)] pub struct Seq { pub id: String, pub seq: String, pub qual: String, pub pairid: String, pub thresholds: HashMap<String,f32>, } pub trait Cleanable{ fn new (id: &String, seq: &String, qual: &String) -> Seq; fn blank () -> Seq; fn is_blank (&self) -> bool; fn from_string (seq_str: &String) -> Seq; fn sanitize_id(id: &String) -> String; fn lower_ambiguity_q(&mut self) -> (); fn trim(&mut self) -> (); fn is_high_quality(&mut self) -> bool; fn to_string(&self) -> String; fn print(&self) -> (); } impl Cleanable for Seq { fn new (id: &String, seq: &String, qual: &String) -> Seq{ let id_copy = Self::sanitize_id(&id); let mut thresholds = HashMap::new(); thresholds.insert("min_avg_qual".to_string(),20.0); thresholds.insert("min_length".to_string(),100.0); thresholds.insert("min_trim_qual".to_string(),20.0); return Seq{ id: id_copy, seq: seq.clone(), qual: qual.clone(), pairid: String::new(), thresholds: thresholds, }; } fn blank () -> Seq{ return Seq::new(&String::new(),&String::new(),&String::new()); } fn is_blank (&self) -> bool { if self.seq.len() == 0 && self.qual.len() == 0 { return true; }
mber"); let seq_len = self.seq.len() as f32; if seq_len < *min_length { return false; } let mut total_qual = 0; for qual in self.qual.chars() { total_qual += qual as u32; } let avg_qual = (total_qual as f32/seq_len) - 33.0; let min_qual = self.thresholds.get(&"min_avg_qual".to_string()).expect("min_avg_qual does not look like a number"); if avg_qual < *min_qual { return false; } return true; } fn to_string(&self) -> String { let mut entry = String::new(); if self.id.len() > 0 && self.id.chars().nth(0).expect("Seq ID was not set") != '@' { entry.push('@'); } entry.push_str(self.id.trim()); entry.push_str("\n"); entry.push_str(self.seq.trim()); entry.push_str("\n+\n"); entry.push_str(&self.qual.trim()); return entry; } fn print(&self) -> () { println!("{}",self.to_string()); } } impl Clone for Seq { fn clone(&self) -> Seq { return Seq{ id: self.id.clone(), seq: self.seq.clone(), qual: self.qual.clone(), pairid: self.pairid.clone(), thresholds: self.thresholds.clone(), } } }
return false; } fn from_string (seq_str: &String) -> Seq { let mut lines = seq_str.lines(); let id = lines.next().expect("Could not parse ID"); let seq = lines.next().expect("Could not parse sequence"); lines.next().expect("Could not parse +"); let qual_opt = lines.next(); if qual_opt == None { return Seq::blank(); } let qual = qual_opt.expect("Could not read the qual line"); return Seq{ id: id.to_string(), seq: seq.to_string(), qual: qual.to_string(), pairid: String::new(), thresholds: HashMap::new(), } } fn sanitize_id(id: &String) -> String { if id.len() == 0 { return String::new(); } let mut id_copy = id.clone(); if id_copy.chars().nth(0).expect("ID was empty") == '@' { id_copy.pop(); } return id_copy; } fn lower_ambiguity_q(&mut self){ let zero_score:char = 33 as char; let low_score :char = (33 + 0) as u8 as char; let mut low_qual_idx = vec![false; self.seq.len()]; for (i,nt) in self.seq.chars().enumerate(){ if nt == 'N' || nt == 'n' || self.qual.chars().nth(i).expect("Expected a char") < low_score { low_qual_idx[i] = true; } } let mut new_seq =String::new(); let mut new_qual=String::new(); for (i,nt) in self.seq.chars().enumerate(){ if low_qual_idx[i] { new_seq.push('N'); new_qual.push(zero_score); } else{ new_seq.push(nt); new_qual.push_str(&self.qual[i..i+1]); } } self.seq=new_seq; self.qual=new_qual; } fn trim(&mut self) { let min_qual = *self.thresholds.entry("min_trim_qual".to_string()) .or_insert(0.0) as u8; let mut trim5=0; let mut trim3=&self.qual.len()-0; for qual in self.qual.chars(){ if qual as u8 - 33 < min_qual { trim5+=1; } else { break; } } for qual in self.qual.chars().rev() { if qual as u8 - 33 < min_qual { trim3-=1; } else { break; } } if trim5 >= trim3 { self.qual = String::new(); self.seq = String::new(); } else { self.qual = self.qual[trim5..trim3].to_string(); self.seq = self.seq[trim5..trim3].to_string(); } } fn is_high_quality(&mut self) -> bool { let min_length = self.thresholds.get(&"min_length".to_string()).expect("min_length does not look like a nu
random
[ { "content": "/// Propagate an error by printing invalid read(s)\n\npub fn eexit() -> () {\n\n println!(\"{}\\n{}\\n{}\\n{}\",INVALID_ID,INVALID_SEQ,INVALID_PLUS,INVALID_QUAL);\n\n std::process::exit(1);\n\n}\n\n\n\n/// Rewrite print!() so that it doesn't panic on broken\n\n/// pipe.\n\n#[macro_export]\n\...
Rust
build/header_generator.rs
u1roh/dxf-rs
76c334dce6bd863b847882a9581b1fdba67d2b68
extern crate xmltree; use self::xmltree::Element; use crate::ExpectedType; use crate::other_helpers::*; use crate::xml_helpers::*; use std::collections::HashSet; use std::fs::File; use std::io::{BufReader, Write}; use std::iter::Iterator; use std::path::Path; pub fn generate_header(generated_dir: &Path) { let element = load_xml(); let mut fun = String::new(); fun.push_str(" // The contents of this file are automatically generated and should not be modified directly. See the `build` directory. // types from `lib.rs`. use crate::{ CodePair, Color, DxfError, DxfResult, Handle, LineWeight, Point, Vector, }; use crate::code_pair_writer::CodePairWriter; use crate::helper_functions::*; use crate::enums::*; use crate::enum_primitive::FromPrimitive; use std::io::Write; use std::time::Duration; extern crate chrono; use self::chrono::{DateTime, Local, Utc}; extern crate uuid; use self::uuid::Uuid; ".trim_start()); generate_struct(&mut fun, &element); generate_default(&mut fun, &element); fun.push_str("impl Header {\n"); generate_flags(&mut fun, &element); generate_set_defaults(&mut fun, &element); generate_set_header_value(&mut fun, &element); generate_add_code_pairs(&mut fun, &element); fun.push_str("}\n"); let mut file = File::create(generated_dir.join("header.rs")).ok().unwrap(); file.write_all(fun.as_bytes()).ok().unwrap(); } fn generate_struct(fun: &mut String, element: &Element) { let mut seen_fields = HashSet::new(); fun.push_str("/// Contains common properties for the DXF file.\n"); fun.push_str("#[cfg_attr(feature = \"serialize\", derive(Serialize, Deserialize))]\n"); fun.push_str("pub struct Header {\n"); for v in &element.children { let field_name = field(v); if !seen_fields.contains(&field_name) { seen_fields.insert(field_name.clone()); let mut comment = format!("The ${} header variable. {}", name(&v), comment(&v)); if !min_version(&v).is_empty() { comment.push_str(&format!(" Minimum AutoCAD version: {}.", min_version(&v))); } if !max_version(&v).is_empty() { comment.push_str(&format!(" Maximum AutoCAD version: {}.", max_version(&v))); } fun.push_str(&format!(" /// {}\n", comment)); fun.push_str(&format!( " pub {field}: {typ},\n", field = field(&v), typ = typ(&v) )); } } fun.push_str("}\n"); fun.push_str("\n"); } fn generate_default(fun: &mut String, element: &Element) { let mut seen_fields = HashSet::new(); fun.push_str("impl Default for Header {\n"); fun.push_str(" fn default() -> Self {\n"); fun.push_str(" Header {\n"); for v in &element.children { if !seen_fields.contains(&field(&v)) { seen_fields.insert(field(&v)); fun.push_str(&format!( " {field}: {default_value}, // ${name}\n", field = field(&v), default_value = default_value(&v), name = name(&v) )); } } fun.push_str(" }\n"); fun.push_str(" }\n"); fun.push_str("}\n"); fun.push_str("\n"); } fn generate_flags(fun: &mut String, element: &Element) { let mut seen_fields = HashSet::new(); for v in &element.children { if !seen_fields.contains(&field(&v)) { seen_fields.insert(field(&v)); if v.children.len() > 0 { fun.push_str(&format!(" // {} flags\n", field(&v))); } for f in &v.children { let mut comment = format!("{}", comment(&f)); if !min_version(&v).is_empty() { comment.push_str(&format!(" Minimum AutoCAD version: {}.", min_version(&v))); } if !max_version(&v).is_empty() { comment.push_str(&format!(" Maximum AutoCAD version: {}.", max_version(&v))); } fun.push_str(&format!(" /// {}\n", comment)); fun.push_str(&format!( " pub fn get_{flag}(&self) -> bool {{\n", flag = name(&f) )); fun.push_str(&format!( " self.{field} & {mask} != 0\n", field = field(&v), mask = mask(&f) )); fun.push_str(" }\n"); fun.push_str(&format!(" /// {}\n", comment)); fun.push_str(&format!( " pub fn set_{flag}(&mut self, val: bool) {{\n", flag = name(&f) )); fun.push_str(&format!(" if val {{\n")); fun.push_str(&format!( " self.{field} |= {mask};\n", field = field(&v), mask = mask(&f) )); fun.push_str(" }\n"); fun.push_str(" else {\n"); fun.push_str(&format!( " self.{field} &= !{mask};\n", field = field(&v), mask = mask(&f) )); fun.push_str(" }\n"); fun.push_str(" }\n"); } } } } fn generate_set_defaults(fun: &mut String, element: &Element) { let mut seen_fields = HashSet::new(); fun.push_str(" /// Sets the default values on the header.\n"); fun.push_str(" pub fn set_defaults(&mut self) {\n"); for v in &element.children { if !seen_fields.contains(&field(&v)) { seen_fields.insert(field(&v)); fun.push_str(&format!( " self.{field} = {default_value}; // ${name}\n", field = field(&v), default_value = default_value(&v), name = name(&v) )); } } fun.push_str(" }\n"); } fn generate_set_header_value(fun: &mut String, element: &Element) { let mut seen_fields = HashSet::new(); fun.push_str(" #[allow(clippy::cognitive_complexity)] // generated method\n"); fun.push_str(" pub(crate) fn set_header_value(&mut self, variable: &str, pair: &CodePair) -> DxfResult<()> {\n"); fun.push_str(" match variable {\n"); for v in &element.children { if !seen_fields.contains(&field(&v)) { seen_fields.insert(field(&v)); fun.push_str(&format!(" \"${name}\" => {{", name = name(&v))); let variables_with_name: Vec<&Element> = element .children .iter() .filter(|&vv| name(&vv) == name(&v)) .collect(); if variables_with_name.len() == 1 { fun.push_str(" "); if code(&v) < 0 { fun.push_str(&format!("self.{field}.set(&pair)?;", field = field(&v))); } else { let read_cmd = get_read_command(&v); fun.push_str(&format!( "verify_code(&pair, {code})?; self.{field} = {cmd};", code = code(&v), field = field(&v), cmd = read_cmd )); } fun.push_str(" "); } else { fun.push_str("\n"); fun.push_str(" match pair.code {\n"); let expected_codes: Vec<i32> = variables_with_name.iter().map(|&vv| code(&vv)).collect(); for v in &variables_with_name { let read_cmd = get_read_command(&v); fun.push_str(&format!( " {code} => self.{field} = {cmd},\n", code = code(&v), field = field(&v), cmd = read_cmd )); } fun.push_str(&format!(" _ => return Err(DxfError::UnexpectedCodePair(pair.clone(), String::from(\"expected code {:?}\"))),\n", expected_codes)); fun.push_str(" }\n"); fun.push_str(" "); } fun.push_str("},\n"); } } fun.push_str(" _ => (),\n"); fun.push_str(" }\n"); fun.push_str("\n"); fun.push_str(" Ok(())\n"); fun.push_str(" }\n"); } fn get_read_command(element: &Element) -> String { let reader_override = reader_override(&element); if !reader_override.is_empty() { reader_override } else { let expected_type = ExpectedType::get_expected_type(code(element)).unwrap(); let reader_fun = get_reader_function(&expected_type); let converter = if read_converter(&element).is_empty() { String::from("{}") } else { read_converter(&element).clone() }; converter.replace("{}", &format!("pair.{}()?", reader_fun)) } } fn generate_add_code_pairs(fun: &mut String, element: &Element) { fun.push_str(" #[allow(clippy::cognitive_complexity)] // long function, no good way to simplify this\n"); fun.push_str(" pub(crate) fn write_code_pairs<T>(&self, writer: &mut CodePairWriter<T>) -> DxfResult<()>\n"); fun.push_str(" where T: Write + ?Sized {\n"); fun.push_str("\n"); for v in &element.children { if suppress_writing(&v) { continue; } let mut parts = vec![]; if !min_version(&v).is_empty() { parts.push(format!("self.version >= AcadVersion::{}", min_version(&v))); } if !max_version(&v).is_empty() { parts.push(format!("self.version <= AcadVersion::{}", max_version(&v))); } if dont_write_default(&v) { parts.push(format!("self.{} != {}", field(&v), default_value(&v))); } let indent = match parts.len() { 0 => "", _ => " ", }; fun.push_str(&format!(" // ${}\n", name(&v))); if parts.len() > 0 { fun.push_str(&format!(" if {} {{\n", parts.join(" && "))); } fun.push_str(&format!( " {indent}writer.write_code_pair(&CodePair::new_str(9, \"${name}\"))?;\n", name = name(&v), indent = indent )); let write_converter = if write_converter(&v).is_empty() { String::from("{}") } else { write_converter(&v).clone() }; if code(&v) > 0 { let expected_type = get_code_pair_type(&ExpectedType::get_expected_type(code(&v)).unwrap()); let field_name = field(&v); let value = format!("self.{}", field_name); let value = write_converter.replace("{}", &*value); fun.push_str(&format!( " {indent}writer.write_code_pair(&CodePair::new_{typ}({code}, {value}))?;\n", code = code(&v), value = value, typ = expected_type, indent = indent )); } else { for i in 0..code(&v).abs() { let (code, fld) = match i { 0 => (10, "x"), 1 => (20, "y"), 2 => (30, "z"), _ => panic!("unexpected number of values"), }; let value = write_converter.replace("{}", &format!("self.{}.{}", field(&v), fld)); fun.push_str(&format!(" {indent}writer.write_code_pair(&CodePair::new_f64({code}, {value}))?;\n", code=code, value=value, indent=indent)); } } if parts.len() > 0 { fun.push_str(" }\n"); } fun.push_str("\n"); } fun.push_str(" Ok(())\n"); fun.push_str(" }\n"); } fn load_xml() -> Element { let file = File::open("spec/HeaderVariablesSpec.xml").unwrap(); let file = BufReader::new(file); Element::parse(file).unwrap() } fn dont_write_default(element: &Element) -> bool { attr(element, "DontWriteDefault") == "true" } fn field(element: &Element) -> String { attr(element, "Field") } fn mask(element: &Element) -> String { attr(element, "Mask") } fn read_converter(element: &Element) -> String { attr(element, "ReadConverter") } fn reader_override(element: &Element) -> String { attr(element, "ReaderOverride") } fn write_converter(element: &Element) -> String { attr(element, "WriteConverter") }
extern crate xmltree; use self::xmltree::Element; use crate::ExpectedType; use crate::other_helpers::*; use crate::xml_helpers::*; use std::collections::HashSet; use std::fs::File; use std::io::{BufReader, Write}; use std::iter::Iterator; use std::path::Path; pub fn generate_header(generated_dir: &Path) { let element = load_xml(); let mut fun = String::new(); fun.push_str(" // The contents of this file are automatically generated and should not be modified directly. See the `build` directory. // types from `lib.rs`. use crate::{ CodePair, Color, DxfError, DxfResult, Handle, LineWeight, Point, Vector, }; use crate::code_pair_writer::CodePairWriter; use crate::helper_functions::*; use crate::enums::*; use crate::enum_primitive::FromPrimitive; use std::io::Write; use std::time::Duration; extern crate chrono; use self::chrono::{DateTime, Local, Utc}; extern crate uuid; use self::uuid::Uuid; ".trim_start()); generate_struct(&mut fun, &element); generate_default(&mut fun, &element); fun.push_str("impl Header {\n"); generate_flags(&mut fun, &element); generate_set_defaults(&mut fun, &element); generate_set_header_value(&mut fun, &element); generate_add_code_pairs(&mut fun, &element); fun.push_str("}\n"); let mut file = File::create(generated_dir.join("header.rs")).ok().unwrap(); file.write_all(fun.as_bytes()).ok().unwrap(); } fn generate_struct(fun: &mut String, element: &Element) { let mut seen_fields = HashSet::new(); fun.push_str("/// Contains common properties for the DXF file.\n"); fun.push_str("#[cfg_attr(feature = \"serialize\", derive(Serialize, Deserialize))]\n"); fun.push_str("pub struct Header {\n"); for v in &element.children { let field_name = field(v); if !seen_fields.contains(&field_name) { seen_fields.insert(field_name.clone()); let mut comment = format!("The ${} header variable. {}", name(&v), comment(&v)); if !min_version(&v).is_empty() { comment.push_str(&format!(" Minimum AutoCAD version: {}.", min_version(&v))); } if !max_version(&v).is_empty() { comment.push_str(&format!(" Maximum AutoCAD version: {}.", max_version(&v))); } fun.push_str(&format!(" /// {}\n", comment)); fun.push_str(&format!( " pub {field}: {typ},\n", field = field(&v), typ = typ(&v) )); } } fun.push_str("}\n"); fun.push_str("\n"); } fn generate_default(fun: &mut String, element: &Element) { let mut seen_fields = HashSet::new(); fun.push_str("impl Default for Header {\n"); fun.push_str(" fn default() -> Self {\n"); fun.push_str(" Header {\n"); for v in &element.children { if !seen_fields.contains(&field(&v)) { seen_fields.insert(field(&v)); fun.push_str(&format!( " {field}: {default_value}, // ${name}\n", field = field(&v), default_value = default_value(&v), name = name(&v) )); } } fun.push_str(" }\n"); fun.push_str(" }\n"); fun.push_str("}\n"); fun.push_str("\n"); } fn generate_flags(fun: &mut String, element: &Element) { let mut seen_fields = HashSet::new(); for v in &element.children { if !seen_fields.contains(&field(&v)) { seen_fields.insert(field(&v)); if v.children.len() > 0 { fun.push_str(&format!(" // {} flags\n", field(&v))); } for f in &v.children { let mut comment = format!("{}", comment(&f)); if !min_version(&v).is_empty() { comment.push_str(&format!(" Minimum AutoCAD version: {}.", min_version(&v))); } if !max_version(&v).is_empty() { comment.push_str(&format!(" Maximum AutoCAD version: {}.", max_version(&v))); } fun.push_str(&format!(" /// {}\n", comment)); fun.push_str(&format!( " pub fn get_{flag}(&self) -> bool {{\n", flag = name(&f) )); fun.push_str(&format!( " self.{field} & {mask} != 0\n", field = field(&v), mask = mask(&f) )); fun.push_str(" }\n"); fun.push_str(&format!(" /// {}\n", comment)); fun.push_str(&format!( " pub fn set_{flag}(&mut self, val: bool) {{\n", flag = name(&f) )); fun.push_str(&format!(" if val {{\n")); fun.push_str(&format!( " self.{field} |= {mask};\n", field = field(&v), mask = mask(&f) )); fun.push_str(" }\n"); fun.push_str(" else {\n"); fun.push_str(&format!( " self.{field} &= !{mask};\n", field = field(&v), mask = mask(&f) )); fun.push_str(" }\n"); fun.push_str(" }\n"); } } } } fn generate_set_defaults(fun: &mut String, element: &Element) { let mut seen_fields = HashSet::new(); fun.push_str(" /// Sets the default values on the header.\n"); fun.push_str(" pub fn set_defaults(&mut self) {\n"); for v in &element.children { if !seen_fields.contains(&field(&v)) { seen_fields.insert(field(&v)); fun.push_str(&format!( " self.{field} = {default_value}; // ${name}\n", field = field(&v), default_value = default_value(&v), name = name(&v) )); } } fun.push_str(" }\n"); } fn generate_set_header_value(fun: &mut String, element: &Element) { let mut seen_fields = HashSet::new(); fun.push_str(" #[allow(clippy::cognitive_complexity)] // generated method\n"); fun.push_str(" pub(crate) fn set_header_value(&mut self, variable: &str, pair: &CodePair) -> DxfResult<()> {\n"); fun.push_str(" match variable {\n"); for v in &element.children { if !seen_fields.contains(&field(&v)) { seen_fields.insert(field(&v)); fun.push_str(&format!(" \"${name}\" => {{", name = name(&v))); let variables_with_name: Vec<&Element> = element .children .iter() .filter(|&vv| name(&vv) == name(&v)) .collect(); if variables_with_name.len() == 1 { fun.push_str(" ");
fun.push_str(" "); } else { fun.push_str("\n"); fun.push_str(" match pair.code {\n"); let expected_codes: Vec<i32> = variables_with_name.iter().map(|&vv| code(&vv)).collect(); for v in &variables_with_name { let read_cmd = get_read_command(&v); fun.push_str(&format!( " {code} => self.{field} = {cmd},\n", code = code(&v), field = field(&v), cmd = read_cmd )); } fun.push_str(&format!(" _ => return Err(DxfError::UnexpectedCodePair(pair.clone(), String::from(\"expected code {:?}\"))),\n", expected_codes)); fun.push_str(" }\n"); fun.push_str(" "); } fun.push_str("},\n"); } } fun.push_str(" _ => (),\n"); fun.push_str(" }\n"); fun.push_str("\n"); fun.push_str(" Ok(())\n"); fun.push_str(" }\n"); } fn get_read_command(element: &Element) -> String { let reader_override = reader_override(&element); if !reader_override.is_empty() { reader_override } else { let expected_type = ExpectedType::get_expected_type(code(element)).unwrap(); let reader_fun = get_reader_function(&expected_type); let converter = if read_converter(&element).is_empty() { String::from("{}") } else { read_converter(&element).clone() }; converter.replace("{}", &format!("pair.{}()?", reader_fun)) } } fn generate_add_code_pairs(fun: &mut String, element: &Element) { fun.push_str(" #[allow(clippy::cognitive_complexity)] // long function, no good way to simplify this\n"); fun.push_str(" pub(crate) fn write_code_pairs<T>(&self, writer: &mut CodePairWriter<T>) -> DxfResult<()>\n"); fun.push_str(" where T: Write + ?Sized {\n"); fun.push_str("\n"); for v in &element.children { if suppress_writing(&v) { continue; } let mut parts = vec![]; if !min_version(&v).is_empty() { parts.push(format!("self.version >= AcadVersion::{}", min_version(&v))); } if !max_version(&v).is_empty() { parts.push(format!("self.version <= AcadVersion::{}", max_version(&v))); } if dont_write_default(&v) { parts.push(format!("self.{} != {}", field(&v), default_value(&v))); } let indent = match parts.len() { 0 => "", _ => " ", }; fun.push_str(&format!(" // ${}\n", name(&v))); if parts.len() > 0 { fun.push_str(&format!(" if {} {{\n", parts.join(" && "))); } fun.push_str(&format!( " {indent}writer.write_code_pair(&CodePair::new_str(9, \"${name}\"))?;\n", name = name(&v), indent = indent )); let write_converter = if write_converter(&v).is_empty() { String::from("{}") } else { write_converter(&v).clone() }; if code(&v) > 0 { let expected_type = get_code_pair_type(&ExpectedType::get_expected_type(code(&v)).unwrap()); let field_name = field(&v); let value = format!("self.{}", field_name); let value = write_converter.replace("{}", &*value); fun.push_str(&format!( " {indent}writer.write_code_pair(&CodePair::new_{typ}({code}, {value}))?;\n", code = code(&v), value = value, typ = expected_type, indent = indent )); } else { for i in 0..code(&v).abs() { let (code, fld) = match i { 0 => (10, "x"), 1 => (20, "y"), 2 => (30, "z"), _ => panic!("unexpected number of values"), }; let value = write_converter.replace("{}", &format!("self.{}.{}", field(&v), fld)); fun.push_str(&format!(" {indent}writer.write_code_pair(&CodePair::new_f64({code}, {value}))?;\n", code=code, value=value, indent=indent)); } } if parts.len() > 0 { fun.push_str(" }\n"); } fun.push_str("\n"); } fun.push_str(" Ok(())\n"); fun.push_str(" }\n"); } fn load_xml() -> Element { let file = File::open("spec/HeaderVariablesSpec.xml").unwrap(); let file = BufReader::new(file); Element::parse(file).unwrap() } fn dont_write_default(element: &Element) -> bool { attr(element, "DontWriteDefault") == "true" } fn field(element: &Element) -> String { attr(element, "Field") } fn mask(element: &Element) -> String { attr(element, "Mask") } fn read_converter(element: &Element) -> String { attr(element, "ReadConverter") } fn reader_override(element: &Element) -> String { attr(element, "ReaderOverride") } fn write_converter(element: &Element) -> String { attr(element, "WriteConverter") }
if code(&v) < 0 { fun.push_str(&format!("self.{field}.set(&pair)?;", field = field(&v))); } else { let read_cmd = get_read_command(&v); fun.push_str(&format!( "verify_code(&pair, {code})?; self.{field} = {cmd};", code = code(&v), field = field(&v), cmd = read_cmd )); }
if_condition
[ { "content": "/// Formats an `f64` value with up to 12 digits of precision, ensuring at least one trailing digit after the decimal.\n\nfn format_f64(val: f64) -> String {\n\n // format with 12 digits of precision\n\n let mut val = format!(\"{:.12}\", val);\n\n\n\n // trim trailing zeros\n\n while va...
Rust
src/lib.rs
liborty/sets
bef54da79d35c0a56305d122471a7cc386d20bd4
pub mod traitimpls; pub mod mutimpls; use std::ops::{Deref,DerefMut}; use indxvec::{MinMax,wv,Indices,merge::*}; pub fn trivindex(asc:bool,n:usize) -> Vec<usize> { if asc { (0..n).collect() } else { (0..n).rev().collect() } } pub struct Set<T> { pub v: Vec<T> } impl<T: std::fmt::Display> std::fmt::Display for Set<T> where T:Copy { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { writeln!(f, "Unordered Set:\n{}",wv(&self.v)) } } impl<T> Deref for Set<T> { type Target = Vec<T>; fn deref(&self) -> &Self::Target { &self.v } } impl<T> DerefMut for Set<T> { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.v } } impl<T> Set<T> where T: Copy { pub fn from_slice(s: &[T]) -> Self { Set { v: s.to_vec() } } pub fn from_indexed(s: &IndexedSet<T>) -> Self { Set{ v: s.v.to_vec() } } pub fn from_ranked(s: &RankedSet<T>) -> Self { Set{ v: s.v.to_vec() } } } pub struct OrderedSet<T> { pub ascending: bool, pub v: Vec<T>, } impl<T: std::fmt::Display> std::fmt::Display for OrderedSet<T> where T:Copy { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let n = self.v.len(); if n == 0 { return writeln!(f,"[]") } let s = if self.ascending { String::from("Ascending") } else { String::from("Descending") }; writeln!(f, "{} Ordered Set:\n{}", s, wv(&self.v) ) } } impl<T> Deref for OrderedSet<T> { type Target = Vec<T>; fn deref(&self) -> &Self::Target { &self.v } } impl<T> DerefMut for OrderedSet<T> { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.v } } impl<T> OrderedSet<T> { pub fn from_asc_slice(s: &[T]) -> Self where T:PartialOrd+Copy { OrderedSet{ ascending:true, v: s.to_vec() } } pub fn from_desc_slice(s: &[T]) -> Self where T:PartialOrd+Copy { OrderedSet{ ascending:false, v: s.to_vec() } } pub fn from_slice(s: &[T], asc: bool) -> Self where T:PartialOrd+Copy { OrderedSet{ ascending:asc, v: sortm(s,asc) } } pub fn from_set(s: &Set<T>, asc: bool) -> Self where T:PartialOrd+Copy { OrderedSet{ ascending:asc, v: sortm(&s.v,asc) } } pub fn from_indexed(s: &IndexedSet<T>, asc: bool) -> Self where T:PartialOrd+Copy { OrderedSet{ ascending:asc, v: s.i.unindex(&s.v,asc == s.ascending) } } pub fn from_ranked(s: &RankedSet<T>, asc: bool) -> Self where T:PartialOrd+Copy { OrderedSet{ ascending:asc, v: s.i.invindex().unindex(&s.v,asc == s.ascending) } } } pub struct IndexedSet<T> { pub ascending: bool, pub v: Vec<T>, pub i: Vec<usize>, } impl<'a,T: std::fmt::Display> std::fmt::Display for IndexedSet<T> where T:Copy { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let n = self.v.len(); if n == 0 { return writeln!(f,"[]") } let s = if self.ascending { String::from("Ascending") } else { String::from("Descending") }; writeln!(f, "{} Indexed Set\nSet: {}\nIndex: {}", s, wv(&self.v), wv(&self.i) ) } } impl<'a,T> IndexedSet<T> { pub fn from_slice(s: &'a[T], asc:bool) -> Self where T:PartialOrd+Copy { if asc { IndexedSet{ ascending:true, v:s.to_vec(), i:sortidx(s) } } else { IndexedSet{ ascending:false, v:s.to_vec(), i:sortidx(s).revindex() } } } pub fn from_set(s: &'a Set<T>, asc: bool) -> Self where T:PartialOrd+Copy { if asc { IndexedSet{ ascending:true, v:s.v.to_vec(), i:sortidx(&s.v) } } else { IndexedSet{ ascending:false, v:s.v.to_vec(), i:sortidx(&s.v).revindex() } } } pub fn from_ordered(s: &'a OrderedSet<T>, asc: bool) -> Self where T:PartialOrd+Copy { IndexedSet{ ascending:asc, v:s.v.to_vec(), i:trivindex(asc == s.ascending,s.len()) } } pub fn from_ranked(s: &'a RankedSet<T>, asc: bool) -> Self where T:PartialOrd+Copy { if asc == s.ascending { IndexedSet{ ascending: asc, v: s.v.to_vec(), i:s.i.invindex() } } else { IndexedSet{ ascending: asc, v: s.v.to_vec(), i:s.i.complindex().invindex() } } } } pub struct RankedSet<T> { pub ascending: bool, pub v: Vec<T>, pub i: Vec<usize>, } impl<'a,T: std::fmt::Display> std::fmt::Display for RankedSet<T> where T:Copy { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let n = self.v.len(); if n == 0 { return writeln!(f,"[]") } let s = if self.ascending { String::from("Ascending") } else { String::from("Descending") }; writeln!(f, "{} Ranked Set\nSet: {}\nRanks: {}", s, wv(&self.v), wv(&self.i) ) } } impl<T> RankedSet<T> { pub fn from_slice(s: &[T], asc:bool) -> Self where T:PartialOrd+Copy { RankedSet{ ascending:asc, v:s.to_vec(), i:rank(s,asc) } } pub fn from_set(s: &Set<T>, asc: bool) -> Self where T:PartialOrd+Copy { RankedSet{ ascending:asc, v:s.v.to_vec(), i:rank(s,asc) } } pub fn from_ordered(s: &OrderedSet<T>, asc: bool) -> Self where T:PartialOrd+Copy { RankedSet{ ascending:asc, v:s.v.to_vec(), i:trivindex(asc == s.ascending,s.len()) } } pub fn from_indexed(s: &IndexedSet<T>, asc: bool) -> Self where T:PartialOrd+Copy { if asc == s.ascending { RankedSet{ ascending: asc, v: s.v.to_vec(), i:s.i.invindex() } } else { RankedSet{ ascending: asc, v: s.v.to_vec(), i:s.i.invindex().complindex() } } } } pub trait SetOps<T> { fn reverse(&self) -> Self; fn nonrepeat(&self) -> Self; fn infsup(&self) -> MinMax<T>; fn member(&self, m: T) -> bool; fn search(&self, m: T) -> Option<usize>; fn union(&self, s: &Self) -> OrderedSet<T>; fn intersection(&self, s: &Self) -> OrderedSet<T>; fn difference(&self, s: &Self) -> OrderedSet<T>; } pub trait MutSetOps<T> { fn mreverse(&mut self); fn mnonrepeat(&mut self); fn munion(&mut self, s: &Self); fn mintersection(&mut self, s: &Self); fn mdifference(&mut self, s: &Self); }
pub mod traitimpls; pub mod mutimpls; use std::ops::{Deref,DerefMut}; use indxvec::{MinMax,wv,Indices,merge::*}; pub fn trivindex(asc:bool,n:usize) -> Vec<usize> { if asc { (0..n).collect() } else { (0..n).rev().collect() } } pub struct Set<T> { pub v: Vec<T> } impl<T: std::fmt::Display> std::fmt::Display for Set<T> where T:Copy { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { writeln!(f, "Unordered Set:\n{}",wv(&self.v)) } } impl<T> Deref for Set<T> { type Target = Vec<T>; fn deref(&self) -> &Self::Target { &self.v } } impl<T> DerefMut for Set<T> { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.v } } impl<T> Set<T> where T: Copy { pub fn from_slice(s: &[T]) -> Self { Set { v: s.to_vec() } } pub fn from_indexed(s: &IndexedSet<T>) -> Self { Set{ v: s.v.to_vec() } } pub fn from_ranked(s: &RankedSet<T>) -> Self { Set{ v: s.v.to_vec() } } } pub struct OrderedSet<T> { pub ascending: bool, pub v: Vec<T>, } impl<T: std::fmt::Display> std::fmt::Display for OrderedSet<T> where T:Copy { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let n = self.v.len(); if n == 0 { return writeln!(
} impl<T> Deref for OrderedSet<T> { type Target = Vec<T>; fn deref(&self) -> &Self::Target { &self.v } } impl<T> DerefMut for OrderedSet<T> { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.v } } impl<T> OrderedSet<T> { pub fn from_asc_slice(s: &[T]) -> Self where T:PartialOrd+Copy { OrderedSet{ ascending:true, v: s.to_vec() } } pub fn from_desc_slice(s: &[T]) -> Self where T:PartialOrd+Copy { OrderedSet{ ascending:false, v: s.to_vec() } } pub fn from_slice(s: &[T], asc: bool) -> Self where T:PartialOrd+Copy { OrderedSet{ ascending:asc, v: sortm(s,asc) } } pub fn from_set(s: &Set<T>, asc: bool) -> Self where T:PartialOrd+Copy { OrderedSet{ ascending:asc, v: sortm(&s.v,asc) } } pub fn from_indexed(s: &IndexedSet<T>, asc: bool) -> Self where T:PartialOrd+Copy { OrderedSet{ ascending:asc, v: s.i.unindex(&s.v,asc == s.ascending) } } pub fn from_ranked(s: &RankedSet<T>, asc: bool) -> Self where T:PartialOrd+Copy { OrderedSet{ ascending:asc, v: s.i.invindex().unindex(&s.v,asc == s.ascending) } } } pub struct IndexedSet<T> { pub ascending: bool, pub v: Vec<T>, pub i: Vec<usize>, } impl<'a,T: std::fmt::Display> std::fmt::Display for IndexedSet<T> where T:Copy { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let n = self.v.len(); if n == 0 { return writeln!(f,"[]") } let s = if self.ascending { String::from("Ascending") } else { String::from("Descending") }; writeln!(f, "{} Indexed Set\nSet: {}\nIndex: {}", s, wv(&self.v), wv(&self.i) ) } } impl<'a,T> IndexedSet<T> { pub fn from_slice(s: &'a[T], asc:bool) -> Self where T:PartialOrd+Copy { if asc { IndexedSet{ ascending:true, v:s.to_vec(), i:sortidx(s) } } else { IndexedSet{ ascending:false, v:s.to_vec(), i:sortidx(s).revindex() } } } pub fn from_set(s: &'a Set<T>, asc: bool) -> Self where T:PartialOrd+Copy { if asc { IndexedSet{ ascending:true, v:s.v.to_vec(), i:sortidx(&s.v) } } else { IndexedSet{ ascending:false, v:s.v.to_vec(), i:sortidx(&s.v).revindex() } } } pub fn from_ordered(s: &'a OrderedSet<T>, asc: bool) -> Self where T:PartialOrd+Copy { IndexedSet{ ascending:asc, v:s.v.to_vec(), i:trivindex(asc == s.ascending,s.len()) } } pub fn from_ranked(s: &'a RankedSet<T>, asc: bool) -> Self where T:PartialOrd+Copy { if asc == s.ascending { IndexedSet{ ascending: asc, v: s.v.to_vec(), i:s.i.invindex() } } else { IndexedSet{ ascending: asc, v: s.v.to_vec(), i:s.i.complindex().invindex() } } } } pub struct RankedSet<T> { pub ascending: bool, pub v: Vec<T>, pub i: Vec<usize>, } impl<'a,T: std::fmt::Display> std::fmt::Display for RankedSet<T> where T:Copy { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let n = self.v.len(); if n == 0 { return writeln!(f,"[]") } let s = if self.ascending { String::from("Ascending") } else { String::from("Descending") }; writeln!(f, "{} Ranked Set\nSet: {}\nRanks: {}", s, wv(&self.v), wv(&self.i) ) } } impl<T> RankedSet<T> { pub fn from_slice(s: &[T], asc:bool) -> Self where T:PartialOrd+Copy { RankedSet{ ascending:asc, v:s.to_vec(), i:rank(s,asc) } } pub fn from_set(s: &Set<T>, asc: bool) -> Self where T:PartialOrd+Copy { RankedSet{ ascending:asc, v:s.v.to_vec(), i:rank(s,asc) } } pub fn from_ordered(s: &OrderedSet<T>, asc: bool) -> Self where T:PartialOrd+Copy { RankedSet{ ascending:asc, v:s.v.to_vec(), i:trivindex(asc == s.ascending,s.len()) } } pub fn from_indexed(s: &IndexedSet<T>, asc: bool) -> Self where T:PartialOrd+Copy { if asc == s.ascending { RankedSet{ ascending: asc, v: s.v.to_vec(), i:s.i.invindex() } } else { RankedSet{ ascending: asc, v: s.v.to_vec(), i:s.i.invindex().complindex() } } } } pub trait SetOps<T> { fn reverse(&self) -> Self; fn nonrepeat(&self) -> Self; fn infsup(&self) -> MinMax<T>; fn member(&self, m: T) -> bool; fn search(&self, m: T) -> Option<usize>; fn union(&self, s: &Self) -> OrderedSet<T>; fn intersection(&self, s: &Self) -> OrderedSet<T>; fn difference(&self, s: &Self) -> OrderedSet<T>; } pub trait MutSetOps<T> { fn mreverse(&mut self); fn mnonrepeat(&mut self); fn munion(&mut self, s: &Self); fn mintersection(&mut self, s: &Self); fn mdifference(&mut self, s: &Self); }
f,"[]") } let s = if self.ascending { String::from("Ascending") } else { String::from("Descending") }; writeln!(f, "{} Ordered Set:\n{}", s, wv(&self.v) ) }
function_block-function_prefixed
[ { "content": "#[test]\n\nfn conversions() { \n\n let v = vec![1.,14.,2.,13.,3.,12.,4.,11.,5.,10.,10.,6.,9.,7.,8.,16.];\n\n let setv = Set::from_slice(&v); \n\n println!(\"{}\",setv); // Display of Set \n\n println!(\"Slice-> {}\",OrderedSet::from_slice(&v,true)); // sorted data but index lost\n\n pr...
Rust
vrp-cli/src/extensions/generate/plan.rs
valerivp/vrp
27ee30e5f4c44e051e5cec1248e606305b52fc00
#[cfg(test)] #[path = "../../../tests/unit/extensions/generate/plan_test.rs"] mod plan_test; use super::get_random_item; use vrp_core::utils::{DefaultRandom, Random}; use vrp_pragmatic::format::problem::{Job, JobPlace, JobTask, Plan, Problem}; use vrp_pragmatic::format::Location; pub(crate) fn generate_plan( problem_proto: &Problem, locations: Option<Vec<Location>>, jobs_size: usize, area_size: Option<f64>, ) -> Result<Plan, String> { let rnd = DefaultRandom::default(); let get_location_fn = get_location_fn(problem_proto, locations, area_size)?; let time_windows = get_plan_time_windows(&problem_proto.plan); let demands = get_plan_demands(&problem_proto.plan); let durations = get_plan_durations(&problem_proto.plan); let generate_tasks = |tasks: &Option<Vec<JobTask>>, keep_original_demand: bool| { tasks.as_ref().map(|tasks| { tasks .iter() .map(|task| JobTask { places: task .places .iter() .map(|_| JobPlace { location: get_location_fn(&rnd), duration: get_random_item(durations.as_slice(), &rnd).cloned().unwrap(), times: get_random_item(time_windows.as_slice(), &rnd).cloned(), }) .collect(), demand: if keep_original_demand { task.demand.clone() } else { get_random_item(demands.as_slice(), &rnd).cloned() }, tag: None, }) .collect::<Vec<_>>() }) }; let jobs = (1..=jobs_size) .map(|job_idx| { let job_proto = get_random_item(problem_proto.plan.jobs.as_slice(), &rnd).unwrap(); let keep_original_demand = job_proto.pickups.as_ref().map_or(false, |t| !t.is_empty()) && job_proto.deliveries.as_ref().map_or(false, |t| !t.is_empty()); Job { id: format!("job{}", job_idx), pickups: generate_tasks(&job_proto.pickups, keep_original_demand), deliveries: generate_tasks(&job_proto.deliveries, keep_original_demand), replacements: generate_tasks(&job_proto.replacements, false), services: generate_tasks(&job_proto.services, true), priority: job_proto.priority, skills: job_proto.skills.clone(), value: job_proto.value, } }) .collect(); Ok(Plan { jobs, relations: None }) } fn get_location_fn( problem_proto: &Problem, locations: Option<Vec<Location>>, area_size: Option<f64>, ) -> Result<Box<dyn Fn(&DefaultRandom) -> Location>, String> { if let Some(locations) = locations { Ok(Box::new(move |rnd| get_random_item(locations.as_slice(), &rnd).cloned().expect("cannot get any location"))) } else { let bounding_box = if let Some(area_size) = area_size { if area_size > 0. { get_bounding_box_from_size(&problem_proto.plan, area_size) } else { return Err("area size must be positive".to_string()); } } else { get_bounding_box_from_plan(&problem_proto.plan) }; Ok(Box::new(move |rnd| { let lat = rnd.uniform_real((bounding_box.0).0, (bounding_box.1).0); let lng = rnd.uniform_real((bounding_box.0).1, (bounding_box.1).1); Location::Coordinate { lat, lng } })) } } fn get_bounding_box_from_plan(plan: &Plan) -> ((f64, f64), (f64, f64)) { let mut lat_min = f64::MAX; let mut lat_max = f64::MIN; let mut lng_min = f64::MAX; let mut lng_max = f64::MIN; get_plan_places(&plan).map(|job_place| job_place.location.to_lat_lng()).for_each(|(lat, lng)| { lat_min = lat_min.min(lat); lat_max = lat_max.max(lat); lng_min = lng_min.min(lng); lng_max = lng_max.max(lng); }); ((lat_min, lng_min), (lat_max, lng_max)) } fn get_bounding_box_from_size(plan: &Plan, area_size: f64) -> ((f64, f64), (f64, f64)) { const WGS84_A: f64 = 6_378_137.0; const WGS84_B: f64 = 6_356_752.3; let deg_to_rad = |deg| std::f64::consts::PI * deg / 180.; let rad_to_deg = |rad| 180. * rad / std::f64::consts::PI; let ((min_lat, min_lng), (max_lat, max_lng)) = get_bounding_box_from_plan(plan); let center_lat = min_lat + (max_lat - min_lat) / 2.; let center_lng = min_lng + (max_lng - min_lng) / 2.; let lat = deg_to_rad(center_lat); let lng = deg_to_rad(center_lng); let an = WGS84_A * WGS84_A * lat.cos(); let bn = WGS84_B * WGS84_B * lat.sin(); let ad = WGS84_A * lat.cos(); let bd = WGS84_B * lat.sin(); let half_size = area_size; let radius = ((an * an + bn * bn) / (ad * ad + bd * bd)).sqrt(); let pradius = radius * lat.cos(); let lat_min = rad_to_deg(lat - half_size / radius); let lat_max = rad_to_deg(lat + half_size / radius); let lon_min = rad_to_deg(lng - half_size / pradius); let lon_max = rad_to_deg(lng + half_size / pradius); ((lat_min, lon_min), (lat_max, lon_max)) } fn get_plan_time_windows(plan: &Plan) -> Vec<Vec<Vec<String>>> { get_plan_places(&plan).flat_map(|job_place| job_place.times.iter()).cloned().collect() } fn get_plan_demands(plan: &Plan) -> Vec<Vec<i32>> { plan.jobs .iter() .flat_map(|job| get_job_tasks(job)) .filter_map(|job_task| job_task.demand.as_ref()) .cloned() .collect() } fn get_plan_durations(plan: &Plan) -> Vec<f64> { get_plan_places(&plan).map(|job_place| job_place.duration).collect() } fn get_plan_places(plan: &Plan) -> impl Iterator<Item = &JobPlace> { plan.jobs.iter().flat_map(|job| get_job_tasks(job)).flat_map(|job_task| job_task.places.iter()) } fn get_job_tasks(job: &Job) -> impl Iterator<Item = &JobTask> { job.pickups .iter() .flat_map(|tasks| tasks.iter()) .chain(job.deliveries.iter().flat_map(|tasks| tasks.iter())) .chain(job.replacements.iter().flat_map(|tasks| tasks.iter())) .chain(job.services.iter().flat_map(|tasks| tasks.iter())) }
#[cfg(test)] #[path = "../../../tests/unit/extensions/generate/plan_test.rs"] mod plan_test; use super::get_random_item; use vrp_core::utils::{DefaultRandom, Random}; use vrp_pragmatic::format::problem::{Job, JobPlace, JobTask, Plan, Problem}; use vrp_pragmatic::format::Location; pub(crate) fn generate_plan( problem_proto: &Problem, locations: Option<Vec<Location>>, jobs_size: usize, area_size: Option<f64>, ) -> Result<Plan, String> { let rnd = DefaultRandom::default(); let get_location_fn = get_location_fn(problem_proto, locations, area_size)?; let time_windows = get_plan_time_windows(&problem_proto.plan); let demands = get_plan_demands(&problem_proto.plan); let durations = get_plan_durations(&problem_proto.plan); let generate_tasks = |tasks: &Option<Vec<JobTask>>, keep_original_demand: bool| { tasks.as_ref().map(|tasks| { tasks .iter() .map(|task| JobTask { places: task .places .iter() .map(|_| JobPlace { location: get_location_fn(&rnd), duration: get_random_item(durations.as_slice(), &rnd).cloned().unwrap(), times: get_random_item(time_windows.as_slice(), &rnd).cloned(), }) .collect(), demand: if keep_original_demand { task.demand.clone() } else { get_random_item(demands.as_slice(), &rnd).cloned() }, tag: None, }) .collect::<Vec<_>>() }) }; let jobs = (1..=jobs_size) .map(|job_idx| { let job_proto = get_random_item(problem_proto.plan.jobs.as_slice(), &rnd).unwrap(); let keep_original_demand = job_proto.pickups.as_ref().map_or(false, |t| !t.is_empty()) && job_proto.deliveries.as_ref().map_or(false, |t| !t.is_empty()); Job { id: format!("job{}", job_idx), pickups: generate_tasks(&job_proto.pickups, keep_original_demand), deliveries: generate_tasks(&job_proto.deliveries, keep_original_demand), replacements: generate_tasks(&job_proto.replacements, false), services: generate_tasks(&job_proto.services, true), priority: job_proto.priority, skills: job_proto.skills.clone(), value: job_proto.value, } }) .collect(); Ok(Plan { jobs, relations: None }) } fn get_location_fn( problem_proto: &Problem, locations: Option<Vec<Location>>, area_size: Option<f64>, ) -> Result<Box<dyn Fn(&DefaultRandom) -> Location>, String> { if let Some(locations) = locations { Ok(Box::new(move |rnd| get_random_item(locations.as_slice(), &rnd).cloned().expect("cannot get any location"))) } else { let bounding_box = if let Some(area_size) = area_size { if area_size > 0. { get_bounding_box_from_size(&problem_proto.plan, area_size) } else { return Err("area size must be positive".to_string()); } } else { get_bounding_box_from_plan(&problem_proto.plan) }; Ok(Box::new(move |rnd| { let lat = rnd.uniform_real((bounding_box.0).0, (bounding_box.1).0); let lng = rnd.uniform_real((bounding_box.0).1, (bounding_box.1).1); Location::Coordinate { lat, lng } })) } } fn get_bounding_box_from_plan(plan: &Plan) -> ((f64, f64), (f64, f64)) { let mut lat_min = f64::MAX; let mut lat_max = f64::MIN; let mut lng_min = f64::MAX; let mut lng_max = f64::MIN; get_plan_places(&plan).map(|job_place| job_place.location.to_lat_lng()).for_each(|(lat, lng)| { lat_min = lat_min.min(lat); lat_max = lat_max.max(lat); lng_min = lng_min.min(lng); lng_max = lng_max.max(lng); }); ((lat_min, lng_min), (lat_max, lng_max)) } fn get_bounding_box_from_size(plan: &Plan, area_size: f64) -> ((f64, f64), (f64, f64)) { const WGS84_A: f64 = 6_378_137.0; const WGS84_B: f64 = 6_356_752.3; let deg_to_rad = |deg| std::f64::consts::PI * deg / 180.; let rad_to_deg = |rad| 180. * rad / std::f64::consts::PI; let ((min_lat, min_lng), (max_lat, max_lng)) = get_bounding_box_from_plan(plan); let center_lat = min_lat + (max_lat - min_lat) / 2.; let center_lng = min_lng + (max_lng - min_lng) / 2.; let lat = deg_to_rad(center_lat); let lng = deg_to_rad(center_lng);
fn get_plan_time_windows(plan: &Plan) -> Vec<Vec<Vec<String>>> { get_plan_places(&plan).flat_map(|job_place| job_place.times.iter()).cloned().collect() } fn get_plan_demands(plan: &Plan) -> Vec<Vec<i32>> { plan.jobs .iter() .flat_map(|job| get_job_tasks(job)) .filter_map(|job_task| job_task.demand.as_ref()) .cloned() .collect() } fn get_plan_durations(plan: &Plan) -> Vec<f64> { get_plan_places(&plan).map(|job_place| job_place.duration).collect() } fn get_plan_places(plan: &Plan) -> impl Iterator<Item = &JobPlace> { plan.jobs.iter().flat_map(|job| get_job_tasks(job)).flat_map(|job_task| job_task.places.iter()) } fn get_job_tasks(job: &Job) -> impl Iterator<Item = &JobTask> { job.pickups .iter() .flat_map(|tasks| tasks.iter()) .chain(job.deliveries.iter().flat_map(|tasks| tasks.iter())) .chain(job.replacements.iter().flat_map(|tasks| tasks.iter())) .chain(job.services.iter().flat_map(|tasks| tasks.iter())) }
let an = WGS84_A * WGS84_A * lat.cos(); let bn = WGS84_B * WGS84_B * lat.sin(); let ad = WGS84_A * lat.cos(); let bd = WGS84_B * lat.sin(); let half_size = area_size; let radius = ((an * an + bn * bn) / (ad * ad + bd * bd)).sqrt(); let pradius = radius * lat.cos(); let lat_min = rad_to_deg(lat - half_size / radius); let lat_max = rad_to_deg(lat + half_size / radius); let lon_min = rad_to_deg(lng - half_size / pradius); let lon_max = rad_to_deg(lng + half_size / pradius); ((lat_min, lon_min), (lat_max, lon_max)) }
function_block-function_prefix_line
[ { "content": "pub fn create_pickup_delivery_job(id: &str, pickup_location: Vec<f64>, delivery_location: Vec<f64>) -> Job {\n\n Job {\n\n pickups: Some(vec![JobTask { tag: Some(\"p1\".to_string()), ..create_task(pickup_location.clone()) }]),\n\n deliveries: Some(vec![JobTask { tag: Some(\"d1\".t...
Rust
src/container/initials.rs
olehbozhok/rsmorphy
fa23ba0306af4df8b1cd867d46415e2baac82837
use std::{borrow::Cow, fmt}; use crate::{ analyzer::MorphAnalyzer, container::{abc::*, decode::*, paradigm::ParadigmId, stack::StackSource, Lex, Score}, opencorpora::tag::OpencorporaTagReg, }; #[derive(Debug, Clone, Copy, PartialEq)] pub enum InitialsKind { FirstName, Patronym, } #[derive(Debug, Clone, PartialEq)] pub struct Initials { pub letter: Cow<'static, str>, pub kind: InitialsKind, pub tag_idx: u8, } impl Initials { pub fn iter_lexeme<'s: 'i, 'm: 'i, 'i>( &'s self, morph: &'m MorphAnalyzer, ) -> impl Iterator<Item = Lex> + 'i { let base: u8 = match self.kind { InitialsKind::FirstName => 0, InitialsKind::Patronym => 12, }; (0..morph.units.initials.tags.len() / 2).map(move |tag_idx| { let container = Initials { tag_idx: base + tag_idx as u8, ..self.clone() }; Lex::from_stack(morph, StackSource::from(container)) }) } } impl Source for Initials { fn score(&self) -> Score { Score::Real(0.1) } fn is_lemma(&self) -> bool { unimplemented!() } fn is_known(&self) -> bool { unimplemented!() } fn get_word(&self) -> Cow<str> { self.letter.clone() } fn get_normal_form(&self, _morph: &MorphAnalyzer) -> Cow<str> { self.letter.clone() } fn get_tag<'a>(&self, morph: &'a MorphAnalyzer) -> &'a OpencorporaTagReg { &morph.units.initials.tags[self.tag_idx as usize].0 } fn try_get_para_id(&self) -> Option<ParadigmId> { None } fn write_word<W: fmt::Write>(&self, f: &mut W) -> fmt::Result { write!(f, "{}", self.letter) } fn write_normal_form<W: fmt::Write>(&self, f: &mut W, _morph: &MorphAnalyzer) -> fmt::Result { write!(f, "{}", self.letter) } fn get_lexeme(&self, morph: &MorphAnalyzer) -> Vec<Lex> { self.iter_lexeme(morph).collect() } fn get_lemma(&self, morph: &MorphAnalyzer) -> Lex { self.iter_lexeme(morph).next().unwrap() } } impl MorphySerde for Initials { fn encode<W: fmt::Write>(&self, f: &mut W) -> fmt::Result { write!( f, "i:{}{}{},{}", match self.kind { InitialsKind::FirstName => "n", InitialsKind::Patronym => "p", }, match (self.tag_idx / 6) % 2 { 0 => "m", 1 => "f", _ => unreachable!(), }, self.tag_idx % 6, self.letter ) } fn decode(s: &str) -> Result<(&str, Self), DecodeError> { let s = follow_str(s, "i").map_err(|_| DecodeError::UnknownPartType)?; let s = follow_str(s, ":")?; let (s, kind) = take_1_char(s)?; let (s, gender) = take_1_char(s)?; let (s, case) = take_1_char(s)?; let (s, word) = take_str_until_char_is(follow_str(s, ",")?, ';')?; let letter = Cow::from(word.to_string()); let tag_idx = decode_tag_idx(kind, gender, case)?; let kind = match kind { 'n' => InitialsKind::FirstName, 'p' => InitialsKind::Patronym, _ => Err(DecodeError::UnknownPartType)?, }; Ok(( s, Initials { kind, tag_idx, letter, }, )) } } fn decode_tag_idx(kind: char, gender: char, case: char) -> Result<u8, DecodeError> { let kind = match kind { 'n' => 0, 'p' => 1, _ => Err(DecodeError::UnknownPartType)?, }; let gender = match gender { 'm' => 0, 'f' => 1, _ => Err(DecodeError::UnknownPartType)?, }; let case = match case { '0'..='5' => case as u8 - b'0', _ => Err(DecodeError::UnknownPartType)?, }; Ok(kind * 12 + gender * 6 + case) }
use std::{borrow::Cow, fmt}; use crate::{ analyzer::MorphAnalyzer, container::{abc::*, decode::*, paradigm::ParadigmId, stack::StackSource, Lex, Score}, opencorpora::tag::OpencorporaTagReg, }; #[derive(Debug, Clone, Copy, PartialEq)] pub enum InitialsKind { FirstName, Patronym, } #[derive(Debug, Clone, PartialEq)] pub struct Initials { pub letter: Cow<'static, str>, pub kind: InitialsKind, pub tag_idx: u8, } impl Initials { pub fn iter_lexeme<'s: 'i, 'm: 'i, 'i>( &'s self, morph: &'m MorphAnalyzer, ) -> impl Iterator<Item = Lex> + 'i { let base: u8 = match self.kind { InitialsKind::FirstName => 0, InitialsKind::Patronym => 12, }; (0..morph.units.initials.tags.len() / 2).map(move |tag_idx| { let container = Initials { tag_idx: base + tag_idx as u8, ..self.clone() }; Lex::from_stack(morph, StackSource::from(container)) }) } } impl Source for Initials { fn score(&self) -> Score { Score::Real(0.1) } fn is_lemma(&self) -> bool { unimplemented!() } fn is_known(&self) -> bool { unimplemented!() } fn get_word(&self) -> Cow<str> { self.letter.clone() } fn get_normal_form(&self, _morph: &MorphAnalyzer) -> Cow<str> { self.letter.clone() } fn get_tag<'a>(&self, morph: &'a MorphAnalyzer) -> &'a OpencorporaTagReg { &morph.units.initials.tags[self.tag_idx as usize].0 } fn try_get_para_id(&self) -> Option<ParadigmId> { None } fn write_word<W: fmt::Write>(&self, f: &mut W) -> fmt::Result { write!(f, "{}", self.letter) } fn write_normal_form<W: fmt::Write>(&self, f: &mut W, _morph: &MorphAnalyzer) -> fmt::Result { write!(f, "{}", self.letter) } fn get_lexeme(&self, morph: &MorphAnalyzer) -> Vec<Lex> { self.iter_lexeme(morph).collect() } fn get_lemma(&self, morph: &MorphAnalyzer) -> Lex { self.iter_lexeme(morph).next().unwrap() } } impl MorphySerde for Initials { fn encode<W: fmt::Write>(&self, f: &mut W) -> fmt::Result { write!( f, "i:{}{}{},{}", match self.kind { InitialsKind::FirstName => "n", InitialsKind::Patronym => "p", }, match (self.tag_idx / 6) % 2 { 0 => "m", 1 => "f", _ => unreachable!(), }, self.tag_idx % 6, self.letter ) }
} fn decode_tag_idx(kind: char, gender: char, case: char) -> Result<u8, DecodeError> { let kind = match kind { 'n' => 0, 'p' => 1, _ => Err(DecodeError::UnknownPartType)?, }; let gender = match gender { 'm' => 0, 'f' => 1, _ => Err(DecodeError::UnknownPartType)?, }; let case = match case { '0'..='5' => case as u8 - b'0', _ => Err(DecodeError::UnknownPartType)?, }; Ok(kind * 12 + gender * 6 + case) }
fn decode(s: &str) -> Result<(&str, Self), DecodeError> { let s = follow_str(s, "i").map_err(|_| DecodeError::UnknownPartType)?; let s = follow_str(s, ":")?; let (s, kind) = take_1_char(s)?; let (s, gender) = take_1_char(s)?; let (s, case) = take_1_char(s)?; let (s, word) = take_str_until_char_is(follow_str(s, ",")?, ';')?; let letter = Cow::from(word.to_string()); let tag_idx = decode_tag_idx(kind, gender, case)?; let kind = match kind { 'n' => InitialsKind::FirstName, 'p' => InitialsKind::Patronym, _ => Err(DecodeError::UnknownPartType)?, }; Ok(( s, Initials { kind, tag_idx, letter, }, )) }
function_block-full_function
[ { "content": "pub fn take_str_until<P>(s: &str, mut predicate: P) -> Result<(&str, &str), DecodeError>\n\nwhere\n\n P: FnMut(char) -> bool,\n\n{\n\n let mut pos = 0;\n\n for ch in s.chars() {\n\n if (predicate)(ch) {\n\n break;\n\n } else {\n\n pos += ch.len_utf8();\...
Rust
tests/unit_test.rs
ShadowPower/shadow-music-cloud
719b0e3aa59126efdf54020213b629e1e7072452
use std::{ collections::HashMap, fs, path::{Path, PathBuf}, }; use anyhow::Result; use radix_fmt::radix; use rayon::prelude::*; use shadow_music_cloud::repository::file_info; use shadow_music_cloud::{ action, command::actor::act, infra::transcoder, model::dto::FileInfo, }; use shadow_music_cloud::{ command::{ action::{Action, ContextData}, command::Command, }, config::app_config, infra::{file_utils, hash_utils}, }; struct WriteValueCommand; impl Command for WriteValueCommand { fn execute(&self, context: &mut HashMap<&str, ContextData>) -> Result<()> { println!("execute TestCommand"); std::thread::sleep(std::time::Duration::from_millis(1000)); context.insert( "data", ContextData::String("string from another command".to_string()), ); Ok(()) } } struct ReadValueCommand; impl Command for ReadValueCommand { fn execute(&self, context: &mut HashMap<&str, ContextData>) -> Result<()> { match context.get("data") { Some(ContextData::String(s)) => println!("{}", s), _ => println!("no value"), } Ok(()) } } #[test] fn test_action() { let test_action = action![WriteValueCommand, ReadValueCommand]; act(test_action); std::thread::sleep(std::time::Duration::from_millis(1000)); } #[test] fn test_file_hash() { let audio_file_info_list = file_utils::list_audio_file(); for audio_file_info in audio_file_info_list { let hash = hash_utils::hash_media_file_info(&audio_file_info); println!("{}", base62::encode(hash)); println!("{}", radix(hash, 36)); } } #[test] fn test_audio_hash() -> Result<()> { let audio_file_info_list = file_utils::list_audio_file(); audio_file_info_list.par_iter().for_each(|audio_file_info| { let mut path = PathBuf::new(); path.push(Path::new(app_config::AUDIO_PATH)); path.push(audio_file_info.path.clone()); println!("{}", audio_file_info.path.display()); match hash_utils::hash_audio_data(&path) { Ok(hash) => println!("{}", base62::encode(hash)), Err(e) => println!("{}", e), } }); Ok(()) } #[test] fn test_audio_transcode() -> Result<()> { ffmpeg_next::util::log::set_level(ffmpeg_next::util::log::Level::Error); let audio_file_info_list = file_utils::list_audio_file(); audio_file_info_list.par_iter().for_each(|audio_file_info| { let path = PathBuf::from(app_config::AUDIO_PATH).join(&audio_file_info.path); let transcoder = transcoder::Transcoder { output_filter_spec: None, codec: Some("libopus".to_string()), channels: Some(2), sample_rate: Some(48000), bit_rate: Some(96000), max_bit_rate: Some(320000), }; let mut output_path = PathBuf::new(); output_path.push(Path::new(app_config::OTHER_AUDIO_QUALITY_PATH)); let mut output_file_path = audio_file_info.path.clone(); output_file_path.set_extension("opus"); output_path.push(output_file_path); fs::create_dir_all(&output_path.parent().unwrap()).unwrap(); let start = std::time::SystemTime::now(); transcoder.transcode(&path, &output_path).unwrap(); let end = std::time::SystemTime::now(); println!("{}, {}", end.duration_since(start).unwrap().as_secs(), audio_file_info.path.display()); }); Ok(()) } #[test] fn test_storage() { let test_data = FileInfo { path: ["test", "test2"] .into_iter() .map(|s| s.to_string()) .collect(), file_type: "audio".to_string(), size: 1000, last_modified: 2000, file_info_hash: "TestData".to_string(), cue_media_path: None, cue_media_file_info_hash: None, cover_hash: Some("TestData".to_string()), medias: vec![], }; file_info::set(&"TestData".to_string(), &test_data); let data_from_storage = file_info::get(&"TestData".to_string()).unwrap(); println!("{:?}", data_from_storage); } #[test] fn test_media_info() { let audio_file_info_list = file_utils::list_audio_file(); audio_file_info_list.par_iter().for_each(|audio_file_info| { println!("{:?} \n", FileInfo::from_simple(audio_file_info)); }); }
use std::{ collections::HashMap, fs, path::{Path, PathBuf}, }; use anyhow::Result; use radix_fmt::radix; use rayon::prelude::*; use shadow_music_cloud::repository::file_info; use shadow_music_cloud::{ action, command::actor::act, infra::transcoder, model::dto::FileInfo, }; use shadow_music_cloud::{ command::{ action::{Action, ContextData}, command::Command, }, config::app_config, infra::{file_utils, hash_utils}, }; struct WriteValueCommand; impl Command for WriteValueCommand { fn execute(&self, context: &mut HashMap<&str, ContextData>) -> Result<()> { println!("execute TestCommand"); std::thread::sleep(std::time::Duration::from_millis(1000)); context.insert( "data", ContextData::String("string from another command".to_string()), ); Ok(()) } } struct ReadValueCommand; impl Command for ReadValueCommand { fn execute(&self, context: &mut HashMap<&str, ContextData>) -> Result<()> { match context.get("data") { Some(ContextData::String(s)) => println!("{}", s), _ => println!("no value"), } Ok(()) } } #[test] fn test_action() { let test_action = action![WriteValueCommand, ReadValueCommand]; act(test_action); std::thread::sleep(std::time::Duration::from_millis(1000)); } #[test] fn test_file_hash() { let audio_file_info_list = file_utils::list_audio_file(); for audio_file_info in audio_file_info_list { let hash = hash_utils::hash_media_file_info(&audi
tils::list_audio_file(); audio_file_info_list.par_iter().for_each(|audio_file_info| { let path = PathBuf::from(app_config::AUDIO_PATH).join(&audio_file_info.path); let transcoder = transcoder::Transcoder { output_filter_spec: None, codec: Some("libopus".to_string()), channels: Some(2), sample_rate: Some(48000), bit_rate: Some(96000), max_bit_rate: Some(320000), }; let mut output_path = PathBuf::new(); output_path.push(Path::new(app_config::OTHER_AUDIO_QUALITY_PATH)); let mut output_file_path = audio_file_info.path.clone(); output_file_path.set_extension("opus"); output_path.push(output_file_path); fs::create_dir_all(&output_path.parent().unwrap()).unwrap(); let start = std::time::SystemTime::now(); transcoder.transcode(&path, &output_path).unwrap(); let end = std::time::SystemTime::now(); println!("{}, {}", end.duration_since(start).unwrap().as_secs(), audio_file_info.path.display()); }); Ok(()) } #[test] fn test_storage() { let test_data = FileInfo { path: ["test", "test2"] .into_iter() .map(|s| s.to_string()) .collect(), file_type: "audio".to_string(), size: 1000, last_modified: 2000, file_info_hash: "TestData".to_string(), cue_media_path: None, cue_media_file_info_hash: None, cover_hash: Some("TestData".to_string()), medias: vec![], }; file_info::set(&"TestData".to_string(), &test_data); let data_from_storage = file_info::get(&"TestData".to_string()).unwrap(); println!("{:?}", data_from_storage); } #[test] fn test_media_info() { let audio_file_info_list = file_utils::list_audio_file(); audio_file_info_list.par_iter().for_each(|audio_file_info| { println!("{:?} \n", FileInfo::from_simple(audio_file_info)); }); }
o_file_info); println!("{}", base62::encode(hash)); println!("{}", radix(hash, 36)); } } #[test] fn test_audio_hash() -> Result<()> { let audio_file_info_list = file_utils::list_audio_file(); audio_file_info_list.par_iter().for_each(|audio_file_info| { let mut path = PathBuf::new(); path.push(Path::new(app_config::AUDIO_PATH)); path.push(audio_file_info.path.clone()); println!("{}", audio_file_info.path.display()); match hash_utils::hash_audio_data(&path) { Ok(hash) => println!("{}", base62::encode(hash)), Err(e) => println!("{}", e), } }); Ok(()) } #[test] fn test_audio_transcode() -> Result<()> { ffmpeg_next::util::log::set_level(ffmpeg_next::util::log::Level::Error); let audio_file_info_list = file_u
random
[ { "content": "/// 计算 Hash 值\n\nfn hash(f: &dyn Fn(&mut Xxh3)) -> u128 {\n\n let mut hasher = Xxh3::with_seed(HASH_SEED);\n\n f(&mut hasher);\n\n hasher.digest128()\n\n}\n\n\n", "file_path": "src/infra/hash_utils.rs", "rank": 3, "score": 111744.02618786754 }, { "content": "/// 计算媒体文件...
Rust
qsharp-ast/src/ast/specialization.rs
msoeken/qsharp
1c1d9b81b7e97af516749574bf92eb99d420d2a5
use itertools::Itertools; use proc_macro2::Ident; use syn::{ parenthesized, parse::{Parse, ParseStream}, punctuated::Punctuated, token::Paren, Result, Token, }; use crate::ast::{kw, utilities::peek_and_consume, Scope}; #[derive(Debug, PartialEq, Clone)] pub enum SpecializationParameter { Identifier(String), Dots, } #[derive(Debug, PartialEq)] pub enum SpecializationGenerator { Auto, Self_, Invert, Distribute, Intrinsic, Provided(Option<Vec<SpecializationParameter>>, Scope), } impl SpecializationGenerator { pub fn scope(&self) -> Option<&Scope> { match self { Self::Provided(_, scope) => Some(scope), _ => None, } } } #[derive(Debug, PartialEq, PartialOrd, Clone, Copy)] pub enum SpecializationKind { Body, Adjoint, Controlled, ControlledAdjoint, } #[derive(Debug, PartialEq)] pub struct Specialization { kind: SpecializationKind, generator: SpecializationGenerator, } impl PartialOrd for Specialization { fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { self.kind.partial_cmp(&other.kind) } } impl Specialization { pub fn body(scope: Scope) -> Self { Self { kind: SpecializationKind::Body, generator: SpecializationGenerator::Provided(None, scope), } } pub fn adjoint(scope: Scope) -> Self { Self { kind: SpecializationKind::Adjoint, generator: SpecializationGenerator::Provided(None, scope), } } pub fn controlled(scope: Scope) -> Self { Self { kind: SpecializationKind::Controlled, generator: SpecializationGenerator::Provided( Some(vec![SpecializationParameter::Identifier("ctls".into())]), scope, ), } } pub fn controlled_adjoint(scope: Scope) -> Self { Self { kind: SpecializationKind::ControlledAdjoint, generator: SpecializationGenerator::Provided( Some(vec![SpecializationParameter::Identifier("ctls".into())]), scope, ), } } pub fn body_with(generator: SpecializationGenerator) -> Self { Self { kind: SpecializationKind::Body, generator, } } pub fn adjoint_with(generator: SpecializationGenerator) -> Self { Self { kind: SpecializationKind::Adjoint, generator, } } pub fn controlled_with(generator: SpecializationGenerator) -> Self { Self { kind: SpecializationKind::Controlled, generator, } } pub fn controlled_adjoint_with(generator: SpecializationGenerator) -> Self { Self { kind: SpecializationKind::ControlledAdjoint, generator, } } pub fn kind(&self) -> SpecializationKind { self.kind } pub fn generator(&self) -> &SpecializationGenerator { &self.generator } pub fn generator_mut(&mut self) -> &mut SpecializationGenerator { &mut self.generator } pub fn set_generator(&mut self, generator: SpecializationGenerator) { self.generator = generator; } } impl Parse for SpecializationParameter { fn parse(input: ParseStream) -> Result<Self> { if input.peek(Token![.]) { input.parse::<Token![.]>()?; input.parse::<Token![.]>()?; input.parse::<Token![.]>()?; Ok(SpecializationParameter::Dots) } else { let ident: Ident = input.parse()?; Ok(SpecializationParameter::Identifier(ident.to_string())) } } } impl Parse for SpecializationGenerator { fn parse(input: ParseStream) -> Result<Self> { if input.peek(kw::auto) { input.parse::<kw::auto>()?; input.parse::<Token![;]>()?; Ok(SpecializationGenerator::Auto) } else if input.peek(Token![self]) { input.parse::<Token![self]>()?; input.parse::<Token![;]>()?; Ok(SpecializationGenerator::Self_) } else if input.peek(kw::invert) { input.parse::<kw::invert>()?; input.parse::<Token![;]>()?; Ok(SpecializationGenerator::Invert) } else if input.peek(kw::distribute) { input.parse::<kw::distribute>()?; input.parse::<Token![;]>()?; Ok(SpecializationGenerator::Distribute) } else if input.peek(kw::intrinsic) { input.parse::<kw::intrinsic>()?; input.parse::<Token![;]>()?; Ok(SpecializationGenerator::Intrinsic) } else { let tuple = if input.peek(Paren) { let buffer; parenthesized!(buffer in input); let items: Punctuated<SpecializationParameter, Token![,]> = Punctuated::parse_separated_nonempty(&buffer)?; Some(items.into_iter().collect_vec()) } else { None }; let scope = input.parse()?; Ok(SpecializationGenerator::Provided(tuple, scope)) } } } impl Parse for Specialization { fn parse(input: ParseStream) -> Result<Self> { let mut is_body = 0; let mut is_adjoint = 0; let mut is_controlled = 0; loop { if peek_and_consume(input, kw::body)? { is_body += 1; } else if peek_and_consume(input, kw::adjoint)? { is_adjoint += 1; } else if peek_and_consume(input, kw::controlled)? { is_controlled += 1; } else { break; } } let kind = match (is_body, is_adjoint, is_controlled) { (1, 0, 0) => SpecializationKind::Body, (0, 1, 0) => SpecializationKind::Adjoint, (0, 0, 1) => SpecializationKind::Controlled, (0, 1, 1) => SpecializationKind::ControlledAdjoint, _ => { return Err(input.error("invalid specialization keyword")); } }; let generator: SpecializationGenerator = input.parse()?; Ok(Specialization { kind, generator }) } } #[cfg(test)] mod tests { use super::*; use syn::Result; fn parse_specialization(s: &str) -> Result<Specialization> { syn::parse_str(s) } #[test] fn test_specializations() -> Result<()> { parse_specialization("body intrinsic;")?; parse_specialization("adjoint controlled distribute;")?; Ok(()) } }
use itertools::Itertools; use proc_macro2::Ident; use syn::{ parenthesized, parse::{Parse, ParseStream}, punctuated::Punctuated, token::Paren, Result, Token, }; use crate::ast::{kw, utilities::peek_and_consume, Scope}; #[derive(Debug, PartialEq, Clone)] pub enum SpecializationParameter { Identifier(String), Dots, } #[derive(Debug, PartialEq)] pub enum SpecializationGenerator { Auto, Self_, Invert, Distribute, Intrinsic, Provided(Option<Vec<SpecializationParameter>>, Scope), } impl SpecializationGenerator { pub fn scope(&self) -> Option<&Scope> { match self { Self::Provided(_, scope) => Some(scope), _ => None, } } } #[derive(Debug, PartialEq, PartialOrd, Clone, Copy)] pub enum SpecializationKind { Body, Adjoint, Controlled, ControlledAdjoint, } #[derive(Debug, PartialEq)] pub struct Specialization { kind: SpecializationKind, generator: SpecializationGenerator, } impl PartialOrd for Specialization { fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { self.kind.partial_cmp(&other.kind) } } impl Specialization { pub fn body(scope: Scope) -> Self { Self { kind: SpecializationKind::Body, generator: SpecializationGenerator::Provided(None, scope), } } pub fn adjoint(scope: Scope) -> Self { Self { kind: SpecializationKind::Adjoint, generator: SpecializationGenerator::Provided(None, scope), } }
pub fn controlled_adjoint(scope: Scope) -> Self { Self { kind: SpecializationKind::ControlledAdjoint, generator: SpecializationGenerator::Provided( Some(vec![SpecializationParameter::Identifier("ctls".into())]), scope, ), } } pub fn body_with(generator: SpecializationGenerator) -> Self { Self { kind: SpecializationKind::Body, generator, } } pub fn adjoint_with(generator: SpecializationGenerator) -> Self { Self { kind: SpecializationKind::Adjoint, generator, } } pub fn controlled_with(generator: SpecializationGenerator) -> Self { Self { kind: SpecializationKind::Controlled, generator, } } pub fn controlled_adjoint_with(generator: SpecializationGenerator) -> Self { Self { kind: SpecializationKind::ControlledAdjoint, generator, } } pub fn kind(&self) -> SpecializationKind { self.kind } pub fn generator(&self) -> &SpecializationGenerator { &self.generator } pub fn generator_mut(&mut self) -> &mut SpecializationGenerator { &mut self.generator } pub fn set_generator(&mut self, generator: SpecializationGenerator) { self.generator = generator; } } impl Parse for SpecializationParameter { fn parse(input: ParseStream) -> Result<Self> { if input.peek(Token![.]) { input.parse::<Token![.]>()?; input.parse::<Token![.]>()?; input.parse::<Token![.]>()?; Ok(SpecializationParameter::Dots) } else { let ident: Ident = input.parse()?; Ok(SpecializationParameter::Identifier(ident.to_string())) } } } impl Parse for SpecializationGenerator { fn parse(input: ParseStream) -> Result<Self> { if input.peek(kw::auto) { input.parse::<kw::auto>()?; input.parse::<Token![;]>()?; Ok(SpecializationGenerator::Auto) } else if input.peek(Token![self]) { input.parse::<Token![self]>()?; input.parse::<Token![;]>()?; Ok(SpecializationGenerator::Self_) } else if input.peek(kw::invert) { input.parse::<kw::invert>()?; input.parse::<Token![;]>()?; Ok(SpecializationGenerator::Invert) } else if input.peek(kw::distribute) { input.parse::<kw::distribute>()?; input.parse::<Token![;]>()?; Ok(SpecializationGenerator::Distribute) } else if input.peek(kw::intrinsic) { input.parse::<kw::intrinsic>()?; input.parse::<Token![;]>()?; Ok(SpecializationGenerator::Intrinsic) } else { let tuple = if input.peek(Paren) { let buffer; parenthesized!(buffer in input); let items: Punctuated<SpecializationParameter, Token![,]> = Punctuated::parse_separated_nonempty(&buffer)?; Some(items.into_iter().collect_vec()) } else { None }; let scope = input.parse()?; Ok(SpecializationGenerator::Provided(tuple, scope)) } } } impl Parse for Specialization { fn parse(input: ParseStream) -> Result<Self> { let mut is_body = 0; let mut is_adjoint = 0; let mut is_controlled = 0; loop { if peek_and_consume(input, kw::body)? { is_body += 1; } else if peek_and_consume(input, kw::adjoint)? { is_adjoint += 1; } else if peek_and_consume(input, kw::controlled)? { is_controlled += 1; } else { break; } } let kind = match (is_body, is_adjoint, is_controlled) { (1, 0, 0) => SpecializationKind::Body, (0, 1, 0) => SpecializationKind::Adjoint, (0, 0, 1) => SpecializationKind::Controlled, (0, 1, 1) => SpecializationKind::ControlledAdjoint, _ => { return Err(input.error("invalid specialization keyword")); } }; let generator: SpecializationGenerator = input.parse()?; Ok(Specialization { kind, generator }) } } #[cfg(test)] mod tests { use super::*; use syn::Result; fn parse_specialization(s: &str) -> Result<Specialization> { syn::parse_str(s) } #[test] fn test_specializations() -> Result<()> { parse_specialization("body intrinsic;")?; parse_specialization("adjoint controlled distribute;")?; Ok(()) } }
pub fn controlled(scope: Scope) -> Self { Self { kind: SpecializationKind::Controlled, generator: SpecializationGenerator::Provided( Some(vec![SpecializationParameter::Identifier("ctls".into())]), scope, ), } }
function_block-full_function
[ { "content": "pub fn peek_and_consume<T: Peek>(input: ParseStream, token: T) -> Result<bool>\n\nwhere\n\n T::Token: Parse,\n\n{\n\n Ok(if input.peek(token) {\n\n input.parse::<T::Token>()?;\n\n true\n\n } else {\n\n false\n\n })\n\n}\n\n\n", "file_path": "qsharp-ast/src/ast/...
Rust
src/lib/ui/carnelian/src/app/strategies/base.rs
dahlia-os/fuchsia-pine64-pinephone
57aace6f0b0bd75306426c98ab9eb3ff4524a61d
use crate::{ app::{ strategies::{framebuffer::FrameBufferAppStrategy, scenic::ScenicAppStrategy}, AppAssistantPtr, FrameBufferPtr, InternalSender, MessageInternal, RenderOptions, }, geometry::IntSize, input::{self}, view::{ strategies::base::{FrameBufferParams, ViewStrategyParams, ViewStrategyPtr}, ViewKey, }, }; use anyhow::Error; use async_trait::async_trait; use fidl_fuchsia_input_report as hid_input_report; use fidl_fuchsia_ui_scenic::ScenicMarker; use fidl_fuchsia_ui_scenic::ScenicProxy; use fuchsia_async::{self as fasync}; use fuchsia_component::client::connect_to_service; use fuchsia_framebuffer::{FrameBuffer, FrameUsage, VSyncMessage}; use fuchsia_zircon::{Duration, Time}; use futures::{ channel::mpsc::{unbounded, UnboundedSender}, StreamExt, TryFutureExt, }; use std::{cell::RefCell, collections::HashMap, rc::Rc}; #[async_trait(?Send)] pub(crate) trait AppStrategy { async fn create_view_strategy( &self, key: ViewKey, render_options: RenderOptions, app_sender: UnboundedSender<MessageInternal>, strategy_params: ViewStrategyParams, ) -> Result<ViewStrategyPtr, Error>; fn supports_scenic(&self) -> bool; fn create_view_for_testing(&self, _: &UnboundedSender<MessageInternal>) -> Result<(), Error> { Ok(()) } fn start_services( &self, _outgoing_services_names: Vec<&'static str>, _app_sender: UnboundedSender<MessageInternal>, ) -> Result<(), Error> { Ok(()) } fn get_scenic_proxy(&self) -> Option<&ScenicProxy>; fn get_frame_buffer(&self) -> Option<FrameBufferPtr> { None } fn get_frame_buffer_size(&self) -> Option<IntSize>; fn get_pixel_size(&self) -> u32; fn get_pixel_format(&self) -> fuchsia_framebuffer::PixelFormat; fn get_linear_stride_bytes(&self) -> u32; async fn post_setup( &mut self, _pixel_format: fuchsia_framebuffer::PixelFormat, _internal_sender: &InternalSender, ) -> Result<(), Error>; fn handle_input_report( &mut self, _device_id: &input::DeviceId, _input_report: &hid_input_report::InputReport, ) -> Vec<input::Event> { Vec::new() } fn handle_register_input_device( &mut self, _device_id: &input::DeviceId, _device_descriptor: &hid_input_report::DeviceDescriptor, ) { } } pub(crate) type AppStrategyPtr = Box<dyn AppStrategy>; pub(crate) async fn create_app_strategy( assistant: &AppAssistantPtr, next_view_key: ViewKey, internal_sender: &InternalSender, ) -> Result<AppStrategyPtr, Error> { let render_options = assistant.get_render_options(); let usage = if render_options.use_spinel { FrameUsage::Gpu } else { FrameUsage::Cpu }; let (sender, mut receiver) = unbounded::<VSyncMessage>(); let fb = FrameBuffer::new(usage, None, Some(sender)).await; if fb.is_err() { let scenic = connect_to_service::<ScenicMarker>()?; Ok::<AppStrategyPtr, Error>(Box::new(ScenicAppStrategy { scenic })) } else { let fb = fb.unwrap(); let vsync_interval = Duration::from_nanos(100_000_000_000 / fb.get_config().refresh_rate_e2 as i64); let vsync_internal_sender = internal_sender.clone(); fasync::Task::local( async move { while let Some(VSyncMessage { display_id: _, timestamp, cookie, .. }) = receiver.next().await { vsync_internal_sender .unbounded_send(MessageInternal::HandleVSyncParametersChanged( Time::from_nanos(timestamp as i64), vsync_interval, cookie, )) .expect("unbounded_send"); vsync_internal_sender .unbounded_send(MessageInternal::RenderAllViews) .expect("unbounded_send"); } Ok(()) } .unwrap_or_else(|e: anyhow::Error| { println!("error {:#?}", e); }), ) .detach(); let config = fb.get_config(); let size = IntSize::new(config.width as i32, config.height as i32); let frame_buffer_ptr = Rc::new(RefCell::new(fb)); let strat = FrameBufferAppStrategy { frame_buffer: frame_buffer_ptr.clone(), view_key: next_view_key, input_report_handlers: HashMap::new(), }; internal_sender .unbounded_send(MessageInternal::CreateView(ViewStrategyParams::FrameBuffer( FrameBufferParams { frame_buffer: frame_buffer_ptr, pixel_format: strat.get_pixel_format(), size, }, ))) .unwrap_or_else(|err| panic!("unbounded send failed: {}", err)); Ok(Box::new(strat)) } }
use crate::{ app::{ strategies::{framebuffer::FrameBufferAppStrategy, scenic::ScenicAppStrategy}, AppAssistantPtr, FrameBufferPtr, InternalSender, MessageInternal, RenderOptions, }, geometry::IntSize, input::{self}, view::{ strategies::base::{FrameBufferParams, ViewStrategyParams, ViewStrategyPtr}, ViewKey, }, }; use anyhow::Error; use async_trait::async_trait; use fidl_fuchsia_input_report as hid_input_report; use fidl_fuchsia_ui_scenic::ScenicMarker; use fidl_fuchsia_ui_scenic::ScenicProxy; use fuchsia_async::{self as fasync}; use fuchsia_component::client::connect_to_service; use fuchsia_framebuffer::{FrameBuffer, FrameUsage, VSyncMessage}; use fuchsia_zircon::{Duration, Time}; use futures::{ channel::mpsc::{unbounded, UnboundedSender}, StreamExt, TryFutureExt, }; use std::{cell::RefCell, collections::HashMap, rc::Rc}; #[async_trait(?Send)] pub(crate) trait AppStrategy { async fn create_view_strategy( &self, key: ViewKey, render_options: RenderOptions, app_sender: UnboundedSender<MessageInternal>, strategy_params: ViewStrategyParams, ) -> Result<ViewStrategyPtr, Error>; fn supports_scenic(&self) -> bool; fn create_view_for_testing(&self, _: &UnboundedSender<MessageInternal>) -> Result<(), Error> { Ok(()) } fn start_services( &self, _outgoing_services_names: Vec<&'static str>, _app_sender: UnboundedSender<MessageInternal>, ) -> Result<(), Error> { Ok(()) } fn get_scenic_proxy(&self) -> Option<&ScenicProxy>; fn get_frame_buffer(&self) -> Option<FrameBufferPtr> { None } fn get_frame_buffer_size(&self) -> Option<IntSize>; fn get_pixel_size(&self) -> u32; fn get_pixel_format(&self) -> fuchsia_framebuffer::PixelFormat; fn get_linear_stride_bytes(&self) -> u32; async fn post_setup( &mut self, _pixel_format: fuchsia_framebuffer::PixelFormat, _internal_sender: &InternalSender, ) -> Result<(), Error>; fn handle_input_report( &mut self, _device_id: &input::DeviceId, _input_report: &hid_input_report::InputReport, ) -> Vec<input::Event> { Vec::new() } fn handle_register_input_device( &mut self, _device_id: &input::DeviceId, _device_descriptor: &hid_input_report::DeviceDescriptor, ) { } } pub(crate) type AppStrategyPtr = Box<dyn AppStrategy>;
pub(crate) async fn create_app_strategy( assistant: &AppAssistantPtr, next_view_key: ViewKey, internal_sender: &InternalSender, ) -> Result<AppStrategyPtr, Error> { let render_options = assistant.get_render_options(); let usage = if render_options.use_spinel { FrameUsage::Gpu } else { FrameUsage::Cpu }; let (sender, mut receiver) = unbounded::<VSyncMessage>(); let fb = FrameBuffer::new(usage, None, Some(sender)).await; if fb.is_err() { let scenic = connect_to_service::<ScenicMarker>()?; Ok::<AppStrategyPtr, Error>(Box::new(ScenicAppStrategy { scenic })) } else { let fb = fb.unwrap(); let vsync_interval = Duration::from_nanos(100_000_000_000 / fb.get_config().refresh_rate_e2 as i64); let vsync_internal_sender = internal_sender.clone(); fasync::Task::local( async move { while let Some(VSyncMessage { display_id: _, timestamp, cookie, .. }) = receiver.next().await { vsync_internal_sender .unbounded_send(MessageInternal::HandleVSyncParametersChanged( Time::from_nanos(timestamp as i64), vsync_interval, cookie, )) .expect("unbounded_send"); vsync_internal_sender .unbounded_send(MessageInternal::RenderAllViews) .expect("unbounded_send"); } Ok(()) } .unwrap_or_else(|e: anyhow::Error| { println!("error {:#?}", e); }), ) .detach(); let config = fb.get_config(); let size = IntSize::new(config.width as i32, config.height as i32); let frame_buffer_ptr = Rc::new(RefCell::new(fb)); let strat = FrameBufferAppStrategy { frame_buffer: frame_buffer_ptr.clone(), view_key: next_view_key, input_report_handlers: HashMap::new(), }; internal_sender .unbounded_send(MessageInternal::CreateView(ViewStrategyParams::FrameBuffer( FrameBufferParams { frame_buffer: frame_buffer_ptr, pixel_format: strat.get_pixel_format(), size, }, ))) .unwrap_or_else(|err| panic!("unbounded send failed: {}", err)); Ok(Box::new(strat)) } }
function_block-full_function
[]
Rust
mailin/src/smtp.rs
trevyn/mailin
368bfe2b97b94ca19b9234d05ba0030cfacc6b3f
use std::net::IpAddr; use std::str; use crate::fsm::StateMachine; use crate::response::*; use crate::{AuthMechanism, Handler}; use either::{Left, Right}; #[derive(Clone)] pub enum Cmd<'a> { Ehlo { domain: &'a str, }, Helo { domain: &'a str, }, Mail { reverse_path: &'a str, is8bit: bool, }, Rcpt { forward_path: &'a str, }, Data, Rset, Noop, StartTls, Quit, Vrfy, AuthPlain { authorization_id: String, authentication_id: String, password: String, }, AuthPlainEmpty, AuthResponse { response: &'a [u8], }, DataEnd, StartedTls, } pub(crate) struct Credentials { pub authorization_id: String, pub authentication_id: String, pub password: String, } pub struct Session<H: Handler> { name: String, handler: H, fsm: StateMachine, } #[derive(Clone)] pub struct SessionBuilder { name: String, start_tls_extension: bool, auth_mechanisms: Vec<AuthMechanism>, } impl SessionBuilder { pub fn new<S: Into<String>>(name: S) -> Self { Self { name: name.into(), start_tls_extension: false, auth_mechanisms: Vec::with_capacity(4), } } pub fn enable_start_tls(&mut self) -> &mut Self { self.start_tls_extension = true; self } pub fn enable_auth(&mut self, auth: AuthMechanism) -> &mut Self { self.auth_mechanisms.push(auth); self } pub fn build<H: Handler>(&self, remote: IpAddr, handler: H) -> Session<H> { Session { name: self.name.clone(), handler, fsm: StateMachine::new( remote, self.auth_mechanisms.clone(), self.start_tls_extension, ), } } } impl<H: Handler> Session<H> { pub fn greeting(&self) -> Response { Response::dynamic(220, format!("{} ESMTP", self.name), Vec::new()) } pub fn tls_active(&mut self) { self.command(Cmd::StartedTls); } pub fn process(&mut self, line: &[u8]) -> Response { let response = match self.fsm.process_line(&mut self.handler, line) { Left(cmd) => self.command(cmd), Right(res) => res, }; response.log(); response } fn command(&mut self, cmd: Cmd) -> Response { self.fsm.command(&mut self.handler, cmd) } } #[cfg(test)] mod tests { use super::*; use crate::fsm::SmtpState; use std::net::Ipv4Addr; use ternop::ternary; struct EmptyHandler {} impl Handler for EmptyHandler {} struct DataHandler(Vec<u8>); impl Handler for DataHandler { fn data(&mut self, buf: &[u8]) -> std::io::Result<()> { self.0.extend(buf); Ok(()) } } macro_rules! assert_state { ($val:expr, $n:pat ) => {{ assert!( match $val { $n => true, _ => false, }, "{:?} !~ {}", $val, stringify!($n) ) }}; } fn new_session() -> Session<EmptyHandler> { let addr = IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)); SessionBuilder::new("some.name").build(addr, EmptyHandler {}) } fn new_data_session() -> Session<DataHandler> { let addr = IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)); SessionBuilder::new("some.name").build(addr, DataHandler(vec![])) } #[test] fn helo_ehlo() { let mut session = new_session(); let res1 = session.process(b"helo a.domain\r\n"); assert_eq!(res1.code, 250); assert_state!(session.fsm.current_state(), SmtpState::Hello); let res2 = session.process(b"ehlo b.domain\r\n"); assert_eq!(res2.code, 250); assert_state!(session.fsm.current_state(), SmtpState::Hello); } #[test] fn mail_from() { let mut session = new_session(); session.process(b"helo a.domain\r\n"); let res = session.process(b"mail from:<ship@sea.com>\r\n"); assert_eq!(res.code, 250); assert_state!(session.fsm.current_state(), SmtpState::Mail); } #[test] fn domain_badchars() { let mut session = new_session(); let res = session.process(b"helo world\x40\xff\r\n"); assert_eq!(res.code, 500); assert_state!(session.fsm.current_state(), SmtpState::Idle); } #[test] fn rcpt_to() { let mut session = new_session(); session.process(b"helo a.domain\r\n"); session.process(b"mail from:<ship@sea.com>\r\n"); let res1 = session.process(b"rcpt to:<fish@sea.com>\r\n"); assert_eq!(res1.code, 250); let res2 = session.process(b"rcpt to:<kraken@sea.com>\r\n"); assert_eq!(res2.code, 250); assert_state!(session.fsm.current_state(), SmtpState::Rcpt); } #[test] fn data() { let mut session = new_data_session(); session.process(b"helo a.domain\r\n"); session.process(b"mail from:<ship@sea.com>\r\n"); session.process(b"rcpt to:<fish@sea.com>\r\n"); let res1 = session.process(b"data\r\n"); assert_eq!(res1.code, 354); let res2 = session.process(b"Hello World\r\n"); assert_eq!(res2.action, Action::NoReply); let res3 = session.process(b".\r\n"); assert_eq!(res3.code, 250); assert_state!(session.fsm.current_state(), SmtpState::Hello); assert_eq!(&session.handler.0, b"Hello World\r\n"); } #[test] fn dot_stuffed_data() { let mut session = new_data_session(); session.process(b"helo a.domain\r\n"); session.process(b"mail from:<ship@sea.com>\r\n"); session.process(b"rcpt to:<fish@sea.com>\r\n"); let res1 = session.process(b"data\r\n"); assert_eq!(res1.code, 354); let res2 = session.process(b"Hello World\r\n"); assert_eq!(res2.action, Action::NoReply); let res3 = session.process(b"..\r\n"); assert_eq!(res3.action, Action::NoReply); let res3 = session.process(b".\r\n"); assert_eq!(res3.code, 250); assert_state!(session.fsm.current_state(), SmtpState::Hello); assert_eq!(&session.handler.0, b"Hello World\r\n.\r\n"); } #[test] fn data_8bit() { let mut session = new_session(); session.process(b"helo a.domain\r\n"); session.process(b"mail from:<ship@sea.com> body=8bitmime\r\n"); session.process(b"rcpt to:<fish@sea.com>\r\n"); let res1 = session.process(b"data\r\n"); assert_eq!(res1.code, 354); let res2 = session.process(b"Hello 8bit world \x40\x7f\r\n"); assert_eq!(res2.action, Action::NoReply); let res3 = session.process(b".\r\n"); assert_eq!(res3.code, 250); assert_state!(session.fsm.current_state(), SmtpState::Hello); } #[test] fn rset_hello() { let mut session = new_session(); session.process(b"helo some.domain\r\n"); session.process(b"mail from:<ship@sea.com>\r\n"); let res = session.process(b"rset\r\n"); assert_eq!(res.code, 250); assert_state!(session.fsm.current_state(), SmtpState::Hello); } #[test] fn rset_idle() { let mut session = new_session(); let res = session.process(b"rset\r\n"); assert_eq!(res.code, 250); assert_state!(session.fsm.current_state(), SmtpState::Idle); } #[test] fn quit() { let mut session = new_session(); session.process(b"helo a.domain\r\n"); session.process(b"mail from:<ship@sea.com>\r\n"); let res = session.process(b"quit\r\n"); assert_eq!(res.code, 221); assert_eq!(res.action, Action::Close); assert_state!(session.fsm.current_state(), SmtpState::Invalid); } #[test] fn vrfy() { let mut session = new_session(); session.process(b"helo a.domain\r\n"); let res1 = session.process(b"vrfy kraken\r\n"); assert_eq!(res1.code, 252); assert_state!(session.fsm.current_state(), SmtpState::Hello); session.process(b"mail from:<ship@sea.com>\r\n"); let res2 = session.process(b"vrfy boat\r\n"); assert_eq!(res2.code, 503); assert_state!(session.fsm.current_state(), SmtpState::Mail); } struct AuthHandler {} impl Handler for AuthHandler { fn auth_plain( &mut self, authorization_id: &str, authentication_id: &str, password: &str, ) -> Response { ternary!( authorization_id == "test" && authentication_id == "test" && password == "1234", AUTH_OK, INVALID_CREDENTIALS ) } } fn new_auth_session(with_start_tls: bool) -> Session<AuthHandler> { let addr = IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)); let mut builder = SessionBuilder::new("some.domain"); builder.enable_auth(AuthMechanism::Plain); if with_start_tls { builder.enable_start_tls(); } builder.build(addr, AuthHandler {}) } fn start_tls(session: &mut Session<AuthHandler>) { let res = session.process(b"ehlo a.domain\r\n"); assert_eq!(res.code, 250); assert_state!(session.fsm.current_state(), SmtpState::HelloAuth); let res = session.process(b"starttls\r\n"); assert_eq!(res.code, 220); session.tls_active(); } #[test] fn noauth_denied() { let mut session = new_auth_session(true); session.process(b"ehlo a.domain\r\n"); let res = session.process(b"mail from:<ship@sea.com>\r\n"); assert_eq!(res.code, 503); assert_state!(session.fsm.current_state(), SmtpState::HelloAuth); } #[test] fn auth_plain_param() { let mut session = new_auth_session(true); start_tls(&mut session); let mut res = session.process(b"ehlo a.domain\r\n"); assert_eq!(res.code, 250); assert_state!(session.fsm.current_state(), SmtpState::HelloAuth); res = session.process(b"auth plain dGVzdAB0ZXN0ADEyMzQ=\r\n"); assert_eq!(res.code, 235); assert_state!(session.fsm.current_state(), SmtpState::Hello); } #[test] fn bad_auth_plain_param() { let mut session = new_auth_session(true); start_tls(&mut session); let mut res = session.process(b"ehlo a.domain\r\n"); assert_eq!(res.code, 250); assert_state!(session.fsm.current_state(), SmtpState::HelloAuth); res = session.process(b"auth plain eGVzdAB0ZXN0ADEyMzQ=\r\n"); assert_eq!(res.code, 535); assert_state!(session.fsm.current_state(), SmtpState::HelloAuth); } #[test] fn auth_plain_challenge() { let mut session = new_auth_session(true); start_tls(&mut session); let res = session.process(b"ehlo a.domain\r\n"); assert_eq!(res.code, 250); assert_state!(session.fsm.current_state(), SmtpState::HelloAuth); let res = session.process(b"auth plain\r\n"); assert_eq!(res.code, 334); if res != EMPTY_AUTH_CHALLENGE { panic!("Server did not send empty challenge"); } assert_state!(session.fsm.current_state(), SmtpState::Auth); let res = session.process(b"dGVzdAB0ZXN0ADEyMzQ=\r\n"); assert_eq!(res.code, 235); assert_state!(session.fsm.current_state(), SmtpState::Hello); } #[test] fn auth_without_tls() { let mut session = new_auth_session(true); let mut res = session.process(b"ehlo a.domain\r\n"); assert_eq!(res.code, 250); assert_state!(session.fsm.current_state(), SmtpState::HelloAuth); res = session.process(b"auth plain dGVzdAB0ZXN0ADEyMzQ=\r\n"); assert_eq!(res.code, 503); } #[test] fn bad_auth_plain_challenge() { let mut session = new_auth_session(true); start_tls(&mut session); session.process(b"ehlo a.domain\r\n"); session.process(b"auth plain\r\n"); let res = session.process(b"eGVzdAB0ZXN0ADEyMzQ=\r\n"); assert_eq!(res.code, 535); assert_state!(session.fsm.current_state(), SmtpState::HelloAuth); } #[test] fn rset_with_auth() { let mut session = new_auth_session(true); start_tls(&mut session); let res = session.process(b"ehlo some.domain\r\n"); assert_eq!(res.code, 250); let res = session.process(b"auth plain dGVzdAB0ZXN0ADEyMzQ=\r\n"); assert_eq!(res.code, 235); let res = session.process(b"mail from:<ship@sea.com>\r\n"); assert_eq!(res.code, 250); let res = session.process(b"rset\r\n"); assert_eq!(res.code, 250); assert_state!(session.fsm.current_state(), SmtpState::HelloAuth); } }
use std::net::IpAddr; use std::str; use crate::fsm::StateMachine; use crate::response::*; use crate::{AuthMechanism, Handler}; use either::{Left, Right}; #[derive(Clone)] pub enum Cmd<'a> { Ehlo { domain: &'a str, }, Helo { domain: &'a str, }, Mail { reverse_path: &'a str, is8bit: bool, }, Rcpt { forward_path: &'a str, }, Data, Rset, Noop, StartTls, Quit, Vrfy, AuthPlain { authorization_id: String, authentication_id: String, password: String, }, AuthPlainEmpty, AuthResponse { response: &'a [u8], }, DataEnd, StartedTls, } pub(crate) struct Credentials { pub authorization_id: String, pub authentication_id: String, pub password: String, } pub struct Session<H: Handler> { name: String, handler: H, fsm: StateMachine, } #[derive(Clone)] pub struct SessionBuilder { name: String, start_tls_extension: bool, auth_mechanisms: Vec<AuthMechanism>, } impl SessionBuilder { pub fn new<S: Into<String>>(name: S) -> Self { Self { name: name.into(), start_tls_extension: false, auth_mech
dr; use ternop::ternary; struct EmptyHandler {} impl Handler for EmptyHandler {} struct DataHandler(Vec<u8>); impl Handler for DataHandler { fn data(&mut self, buf: &[u8]) -> std::io::Result<()> { self.0.extend(buf); Ok(()) } } macro_rules! assert_state { ($val:expr, $n:pat ) => {{ assert!( match $val { $n => true, _ => false, }, "{:?} !~ {}", $val, stringify!($n) ) }}; } fn new_session() -> Session<EmptyHandler> { let addr = IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)); SessionBuilder::new("some.name").build(addr, EmptyHandler {}) } fn new_data_session() -> Session<DataHandler> { let addr = IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)); SessionBuilder::new("some.name").build(addr, DataHandler(vec![])) } #[test] fn helo_ehlo() { let mut session = new_session(); let res1 = session.process(b"helo a.domain\r\n"); assert_eq!(res1.code, 250); assert_state!(session.fsm.current_state(), SmtpState::Hello); let res2 = session.process(b"ehlo b.domain\r\n"); assert_eq!(res2.code, 250); assert_state!(session.fsm.current_state(), SmtpState::Hello); } #[test] fn mail_from() { let mut session = new_session(); session.process(b"helo a.domain\r\n"); let res = session.process(b"mail from:<ship@sea.com>\r\n"); assert_eq!(res.code, 250); assert_state!(session.fsm.current_state(), SmtpState::Mail); } #[test] fn domain_badchars() { let mut session = new_session(); let res = session.process(b"helo world\x40\xff\r\n"); assert_eq!(res.code, 500); assert_state!(session.fsm.current_state(), SmtpState::Idle); } #[test] fn rcpt_to() { let mut session = new_session(); session.process(b"helo a.domain\r\n"); session.process(b"mail from:<ship@sea.com>\r\n"); let res1 = session.process(b"rcpt to:<fish@sea.com>\r\n"); assert_eq!(res1.code, 250); let res2 = session.process(b"rcpt to:<kraken@sea.com>\r\n"); assert_eq!(res2.code, 250); assert_state!(session.fsm.current_state(), SmtpState::Rcpt); } #[test] fn data() { let mut session = new_data_session(); session.process(b"helo a.domain\r\n"); session.process(b"mail from:<ship@sea.com>\r\n"); session.process(b"rcpt to:<fish@sea.com>\r\n"); let res1 = session.process(b"data\r\n"); assert_eq!(res1.code, 354); let res2 = session.process(b"Hello World\r\n"); assert_eq!(res2.action, Action::NoReply); let res3 = session.process(b".\r\n"); assert_eq!(res3.code, 250); assert_state!(session.fsm.current_state(), SmtpState::Hello); assert_eq!(&session.handler.0, b"Hello World\r\n"); } #[test] fn dot_stuffed_data() { let mut session = new_data_session(); session.process(b"helo a.domain\r\n"); session.process(b"mail from:<ship@sea.com>\r\n"); session.process(b"rcpt to:<fish@sea.com>\r\n"); let res1 = session.process(b"data\r\n"); assert_eq!(res1.code, 354); let res2 = session.process(b"Hello World\r\n"); assert_eq!(res2.action, Action::NoReply); let res3 = session.process(b"..\r\n"); assert_eq!(res3.action, Action::NoReply); let res3 = session.process(b".\r\n"); assert_eq!(res3.code, 250); assert_state!(session.fsm.current_state(), SmtpState::Hello); assert_eq!(&session.handler.0, b"Hello World\r\n.\r\n"); } #[test] fn data_8bit() { let mut session = new_session(); session.process(b"helo a.domain\r\n"); session.process(b"mail from:<ship@sea.com> body=8bitmime\r\n"); session.process(b"rcpt to:<fish@sea.com>\r\n"); let res1 = session.process(b"data\r\n"); assert_eq!(res1.code, 354); let res2 = session.process(b"Hello 8bit world \x40\x7f\r\n"); assert_eq!(res2.action, Action::NoReply); let res3 = session.process(b".\r\n"); assert_eq!(res3.code, 250); assert_state!(session.fsm.current_state(), SmtpState::Hello); } #[test] fn rset_hello() { let mut session = new_session(); session.process(b"helo some.domain\r\n"); session.process(b"mail from:<ship@sea.com>\r\n"); let res = session.process(b"rset\r\n"); assert_eq!(res.code, 250); assert_state!(session.fsm.current_state(), SmtpState::Hello); } #[test] fn rset_idle() { let mut session = new_session(); let res = session.process(b"rset\r\n"); assert_eq!(res.code, 250); assert_state!(session.fsm.current_state(), SmtpState::Idle); } #[test] fn quit() { let mut session = new_session(); session.process(b"helo a.domain\r\n"); session.process(b"mail from:<ship@sea.com>\r\n"); let res = session.process(b"quit\r\n"); assert_eq!(res.code, 221); assert_eq!(res.action, Action::Close); assert_state!(session.fsm.current_state(), SmtpState::Invalid); } #[test] fn vrfy() { let mut session = new_session(); session.process(b"helo a.domain\r\n"); let res1 = session.process(b"vrfy kraken\r\n"); assert_eq!(res1.code, 252); assert_state!(session.fsm.current_state(), SmtpState::Hello); session.process(b"mail from:<ship@sea.com>\r\n"); let res2 = session.process(b"vrfy boat\r\n"); assert_eq!(res2.code, 503); assert_state!(session.fsm.current_state(), SmtpState::Mail); } struct AuthHandler {} impl Handler for AuthHandler { fn auth_plain( &mut self, authorization_id: &str, authentication_id: &str, password: &str, ) -> Response { ternary!( authorization_id == "test" && authentication_id == "test" && password == "1234", AUTH_OK, INVALID_CREDENTIALS ) } } fn new_auth_session(with_start_tls: bool) -> Session<AuthHandler> { let addr = IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)); let mut builder = SessionBuilder::new("some.domain"); builder.enable_auth(AuthMechanism::Plain); if with_start_tls { builder.enable_start_tls(); } builder.build(addr, AuthHandler {}) } fn start_tls(session: &mut Session<AuthHandler>) { let res = session.process(b"ehlo a.domain\r\n"); assert_eq!(res.code, 250); assert_state!(session.fsm.current_state(), SmtpState::HelloAuth); let res = session.process(b"starttls\r\n"); assert_eq!(res.code, 220); session.tls_active(); } #[test] fn noauth_denied() { let mut session = new_auth_session(true); session.process(b"ehlo a.domain\r\n"); let res = session.process(b"mail from:<ship@sea.com>\r\n"); assert_eq!(res.code, 503); assert_state!(session.fsm.current_state(), SmtpState::HelloAuth); } #[test] fn auth_plain_param() { let mut session = new_auth_session(true); start_tls(&mut session); let mut res = session.process(b"ehlo a.domain\r\n"); assert_eq!(res.code, 250); assert_state!(session.fsm.current_state(), SmtpState::HelloAuth); res = session.process(b"auth plain dGVzdAB0ZXN0ADEyMzQ=\r\n"); assert_eq!(res.code, 235); assert_state!(session.fsm.current_state(), SmtpState::Hello); } #[test] fn bad_auth_plain_param() { let mut session = new_auth_session(true); start_tls(&mut session); let mut res = session.process(b"ehlo a.domain\r\n"); assert_eq!(res.code, 250); assert_state!(session.fsm.current_state(), SmtpState::HelloAuth); res = session.process(b"auth plain eGVzdAB0ZXN0ADEyMzQ=\r\n"); assert_eq!(res.code, 535); assert_state!(session.fsm.current_state(), SmtpState::HelloAuth); } #[test] fn auth_plain_challenge() { let mut session = new_auth_session(true); start_tls(&mut session); let res = session.process(b"ehlo a.domain\r\n"); assert_eq!(res.code, 250); assert_state!(session.fsm.current_state(), SmtpState::HelloAuth); let res = session.process(b"auth plain\r\n"); assert_eq!(res.code, 334); if res != EMPTY_AUTH_CHALLENGE { panic!("Server did not send empty challenge"); } assert_state!(session.fsm.current_state(), SmtpState::Auth); let res = session.process(b"dGVzdAB0ZXN0ADEyMzQ=\r\n"); assert_eq!(res.code, 235); assert_state!(session.fsm.current_state(), SmtpState::Hello); } #[test] fn auth_without_tls() { let mut session = new_auth_session(true); let mut res = session.process(b"ehlo a.domain\r\n"); assert_eq!(res.code, 250); assert_state!(session.fsm.current_state(), SmtpState::HelloAuth); res = session.process(b"auth plain dGVzdAB0ZXN0ADEyMzQ=\r\n"); assert_eq!(res.code, 503); } #[test] fn bad_auth_plain_challenge() { let mut session = new_auth_session(true); start_tls(&mut session); session.process(b"ehlo a.domain\r\n"); session.process(b"auth plain\r\n"); let res = session.process(b"eGVzdAB0ZXN0ADEyMzQ=\r\n"); assert_eq!(res.code, 535); assert_state!(session.fsm.current_state(), SmtpState::HelloAuth); } #[test] fn rset_with_auth() { let mut session = new_auth_session(true); start_tls(&mut session); let res = session.process(b"ehlo some.domain\r\n"); assert_eq!(res.code, 250); let res = session.process(b"auth plain dGVzdAB0ZXN0ADEyMzQ=\r\n"); assert_eq!(res.code, 235); let res = session.process(b"mail from:<ship@sea.com>\r\n"); assert_eq!(res.code, 250); let res = session.process(b"rset\r\n"); assert_eq!(res.code, 250); assert_state!(session.fsm.current_state(), SmtpState::HelloAuth); } }
anisms: Vec::with_capacity(4), } } pub fn enable_start_tls(&mut self) -> &mut Self { self.start_tls_extension = true; self } pub fn enable_auth(&mut self, auth: AuthMechanism) -> &mut Self { self.auth_mechanisms.push(auth); self } pub fn build<H: Handler>(&self, remote: IpAddr, handler: H) -> Session<H> { Session { name: self.name.clone(), handler, fsm: StateMachine::new( remote, self.auth_mechanisms.clone(), self.start_tls_extension, ), } } } impl<H: Handler> Session<H> { pub fn greeting(&self) -> Response { Response::dynamic(220, format!("{} ESMTP", self.name), Vec::new()) } pub fn tls_active(&mut self) { self.command(Cmd::StartedTls); } pub fn process(&mut self, line: &[u8]) -> Response { let response = match self.fsm.process_line(&mut self.handler, line) { Left(cmd) => self.command(cmd), Right(res) => res, }; response.log(); response } fn command(&mut self, cmd: Cmd) -> Response { self.fsm.command(&mut self.handler, cmd) } } #[cfg(test)] mod tests { use super::*; use crate::fsm::SmtpState; use std::net::Ipv4Ad
random
[ { "content": "fn handle_rset(fsm: &StateMachine, domain: &str) -> (Response, Option<Box<dyn State>>) {\n\n match fsm.auth_state {\n\n AuthState::Unavailable => (\n\n OK,\n\n Some(Box::new(Hello {\n\n domain: domain.to_string(),\n\n })),\n\n ),\n\n...
Rust
src/output.rs
sthagen/mitsuhiko-insta
c27c8bde0bbcf38da480feefdf3eba18138ef9a3
use std::{path::Path, time::Duration}; use similar::{Algorithm, ChangeTag, TextDiff}; use crate::snapshot::Snapshot; use crate::utils::{format_rust_expression, style, term_width}; pub fn print_snapshot_summary( workspace_root: &Path, snapshot: &Snapshot, snapshot_file: Option<&Path>, mut line: Option<u32>, ) { if line.is_none() { line = snapshot.metadata().assertion_line(); } if let Some(snapshot_file) = snapshot_file { let snapshot_file = workspace_root .join(snapshot_file) .strip_prefix(workspace_root) .ok() .map(|x| x.to_path_buf()) .unwrap_or_else(|| snapshot_file.to_path_buf()); println!( "Snapshot file: {}", style(snapshot_file.display()).cyan().underlined() ); } if let Some(name) = snapshot.snapshot_name() { println!("Snapshot: {}", style(name).yellow()); } else { println!("Snapshot: {}", style("<inline>").dim()); } if let Some(ref value) = snapshot.metadata().get_relative_source(workspace_root) { println!( "Source: {}{}", style(value.display()).cyan(), if let Some(line) = line { format!(":{}", style(line).bold()) } else { "".to_string() } ); } if let Some(ref value) = snapshot.metadata().input_file() { println!("Input file: {}", style(value).cyan()); } } pub fn print_snapshot_diff( workspace_root: &Path, new: &Snapshot, old_snapshot: Option<&Snapshot>, snapshot_file: Option<&Path>, mut line: Option<u32>, ) { if line.is_none() { line = new.metadata().assertion_line(); } print_snapshot_summary(workspace_root, new, snapshot_file, line); let old_contents = old_snapshot.as_ref().map_or("", |x| x.contents_str()); let new_contents = new.contents_str(); if !old_contents.is_empty() { println!("{}", style("-old snapshot").red()); println!("{}", style("+new results").green()); } else { println!("{}", style("+new results").green()); } print_changeset( old_contents, new_contents, new.metadata().expression.as_deref(), ); } pub fn print_snapshot_diff_with_title( workspace_root: &Path, new_snapshot: &Snapshot, old_snapshot: Option<&Snapshot>, line: u32, snapshot_file: Option<&Path>, ) { let width = term_width(); println!( "{title:━^width$}", title = style(" Snapshot Differences ").bold(), width = width ); print_snapshot_diff( workspace_root, new_snapshot, old_snapshot, snapshot_file, Some(line), ); } pub fn print_snapshot_summary_with_title( workspace_root: &Path, new_snapshot: &Snapshot, old_snapshot: Option<&Snapshot>, line: u32, snapshot_file: Option<&Path>, ) { let _old_snapshot = old_snapshot; let width = term_width(); println!( "{title:━^width$}", title = style(" Snapshot Summary ").bold(), width = width ); print_snapshot_summary(workspace_root, new_snapshot, snapshot_file, Some(line)); println!("{title:━^width$}", title = "", width = width); } pub fn print_changeset(old: &str, new: &str, expr: Option<&str>) { let width = term_width(); let diff = TextDiff::configure() .algorithm(Algorithm::Patience) .timeout(Duration::from_millis(500)) .diff_lines(old, new); if let Some(expr) = expr { println!("{:─^1$}", "", width,); println!("{}", style(format_rust_expression(expr))); } println!("────────────┬{:─^1$}", "", width.saturating_sub(13)); let mut has_changes = false; for (idx, group) in diff.grouped_ops(4).iter().enumerate() { if idx > 0 { println!("┈┈┈┈┈┈┈┈┈┈┈┈┼{:┈^1$}", "", width.saturating_sub(13)); } for op in group { for change in diff.iter_inline_changes(&op) { match change.tag() { ChangeTag::Insert => { has_changes = true; print!( "{:>5} {:>5} │{}", "", style(change.new_index().unwrap()).cyan().dim().bold(), style("+").green(), ); for &(emphasized, change) in change.values() { if emphasized { print!("{}", style(change).green().underlined()); } else { print!("{}", style(change).green()); } } } ChangeTag::Delete => { has_changes = true; print!( "{:>5} {:>5} │{}", style(change.old_index().unwrap()).cyan().dim(), "", style("-").red(), ); for &(emphasized, change) in change.values() { if emphasized { print!("{}", style(change).red().underlined()); } else { print!("{}", style(change).red()); } } } ChangeTag::Equal => { print!( "{:>5} {:>5} │ ", style(change.old_index().unwrap()).cyan().dim(), style(change.new_index().unwrap()).cyan().dim().bold(), ); for &(_, change) in change.values() { print!("{}", style(change).dim()); } } } if change.missing_newline() { println!(); } } } } if !has_changes { println!( "{:>5} {:>5} │{}", "", style("-").dim(), style(" snapshots are matching").cyan(), ); } println!("────────────┴{:─^1$}", "", width.saturating_sub(13),); }
use std::{path::Path, time::Duration}; use similar::{Algorithm, ChangeTag, TextDiff}; use crate::snapshot::Snapshot; use crate::utils::{format_rust_expression, style, term_width}; pub fn print_snapshot_summary( workspace_root: &Path, snapshot: &Snapshot, snapshot_file: Option<&Path>, mut line: Option<u32>, ) { if line.is_none() { line = snapshot.metadata().assertion_line(); } if let Some(snapshot_file) = snapshot_file { let snapshot_file = workspace_root .join(snapshot_file) .strip_prefix(workspace_root) .ok() .map(|x| x.to_path_buf()) .unwrap_or_else(|| snapshot_file.to_path_buf()); println!( "Snapshot file: {}", style(snapshot_file.display()).cyan().underlined() ); } if let Some(name) = snapshot.snapshot_name() { println!("Snapshot: {}", style(name).yellow()); } else { println!("Snapshot: {}", style("<inline>").dim()); } if let Some(ref value) = snapshot.metadata().get_relative_source(workspace_root) { println!( "Source: {}{}", style(value.display()).cyan(), if let Some(line) = line { format!(":{}", style(line).bold()) } else { "".to_string() } ); } if let Some(ref value) = snapshot.metadata().input_file() { println!("Input file: {}", style(value).cyan()); } } pub fn print_snapshot_diff( workspace_root: &Path, new: &Snapshot, old_snapshot: Option<&Snapshot>, snapshot_file: Option<&Path>, mut line: Option<u32>, ) { if line.is_none() { line = new.metadata().assertion_line(); } print_snapshot_summary(workspace_root, new, snapshot_file, line); let old_contents = old_snapshot.as_ref().map_or("", |x| x.contents_str()); let new_contents = new.contents_str(); if !old_contents.is_empty() { println!("{}", style("-old snapshot").red()); println!("{}", style("+new results").green()); } else { println!("{}", style("+new results").green()); } print_changeset( old_contents, new_contents, new.metadata().expression.as_deref(), ); } pub fn print_snapshot_diff_with_title( workspace_root: &Path, new_snapshot: &Snapshot, old_snapshot: Option<&Snapshot>, line: u32, snapshot_file: Option<&Path>, ) { let width = term_width(); println!( "{title:━^width$}", title = style(" Snapshot Differences ").bold(), width = width ); print_snapshot_diff( workspace_root, new_snapshot, old_snapshot, snapshot_file, Some(line), ); } pub fn print_snapshot_summary_with_title( wo
width = term_width(); println!( "{title:━^width$}", title = style(" Snapshot Summary ").bold(), width = width ); print_snapshot_summary(workspace_root, new_snapshot, snapshot_file, Some(line)); println!("{title:━^width$}", title = "", width = width); } pub fn print_changeset(old: &str, new: &str, expr: Option<&str>) { let width = term_width(); let diff = TextDiff::configure() .algorithm(Algorithm::Patience) .timeout(Duration::from_millis(500)) .diff_lines(old, new); if let Some(expr) = expr { println!("{:─^1$}", "", width,); println!("{}", style(format_rust_expression(expr))); } println!("────────────┬{:─^1$}", "", width.saturating_sub(13)); let mut has_changes = false; for (idx, group) in diff.grouped_ops(4).iter().enumerate() { if idx > 0 { println!("┈┈┈┈┈┈┈┈┈┈┈┈┼{:┈^1$}", "", width.saturating_sub(13)); } for op in group { for change in diff.iter_inline_changes(&op) { match change.tag() { ChangeTag::Insert => { has_changes = true; print!( "{:>5} {:>5} │{}", "", style(change.new_index().unwrap()).cyan().dim().bold(), style("+").green(), ); for &(emphasized, change) in change.values() { if emphasized { print!("{}", style(change).green().underlined()); } else { print!("{}", style(change).green()); } } } ChangeTag::Delete => { has_changes = true; print!( "{:>5} {:>5} │{}", style(change.old_index().unwrap()).cyan().dim(), "", style("-").red(), ); for &(emphasized, change) in change.values() { if emphasized { print!("{}", style(change).red().underlined()); } else { print!("{}", style(change).red()); } } } ChangeTag::Equal => { print!( "{:>5} {:>5} │ ", style(change.old_index().unwrap()).cyan().dim(), style(change.new_index().unwrap()).cyan().dim().bold(), ); for &(_, change) in change.values() { print!("{}", style(change).dim()); } } } if change.missing_newline() { println!(); } } } } if !has_changes { println!( "{:>5} {:>5} │{}", "", style("-").dim(), style(" snapshots are matching").cyan(), ); } println!("────────────┴{:─^1$}", "", width.saturating_sub(13),); }
rkspace_root: &Path, new_snapshot: &Snapshot, old_snapshot: Option<&Snapshot>, line: u32, snapshot_file: Option<&Path>, ) { let _old_snapshot = old_snapshot; let
function_block-random_span
[ { "content": "/// Memoizes a snapshot file in the reference file.\n\npub fn memoize_snapshot_file(snapshot_file: &Path) {\n\n if let Ok(path) = env::var(\"INSTA_SNAPSHOT_REFERENCES_FILE\") {\n\n let mut f = fs::OpenOptions::new()\n\n .write(true)\n\n .append(true)\n\n ...
Rust
tests/geometry/dual_quaternion.rs
zyansheep/nalgebra
e913beca889dc278d1c0d6cadd2008d3f9bcc0af
#![cfg(feature = "proptest-support")] #![allow(non_snake_case)] use na::{DualQuaternion, Point3, Unit, UnitDualQuaternion, UnitQuaternion, Vector3}; use crate::proptest::*; use proptest::{prop_assert, proptest}; proptest!( #[test] fn isometry_equivalence(iso in isometry3(), p in point3(), v in vector3()) { let dq = UnitDualQuaternion::from_isometry(&iso); prop_assert!(relative_eq!(iso * p, dq * p, epsilon = 1.0e-7)); prop_assert!(relative_eq!(iso * v, dq * v, epsilon = 1.0e-7)); } #[test] fn inverse_is_identity(i in unit_dual_quaternion(), p in point3(), v in vector3()) { let ii = i.inverse(); prop_assert!(relative_eq!(i * ii, UnitDualQuaternion::identity(), epsilon = 1.0e-7) && relative_eq!(ii * i, UnitDualQuaternion::identity(), epsilon = 1.0e-7) && relative_eq!((i * ii) * p, p, epsilon = 1.0e-7) && relative_eq!((ii * i) * p, p, epsilon = 1.0e-7) && relative_eq!((i * ii) * v, v, epsilon = 1.0e-7) && relative_eq!((ii * i) * v, v, epsilon = 1.0e-7)); } #[cfg_attr(rustfmt, rustfmt_skip)] #[test] fn multiply_equals_alga_transform( dq in unit_dual_quaternion(), v in vector3(), p in point3() ) { prop_assert!(dq * v == dq.transform_vector(&v) && dq * p == dq.transform_point(&p) && relative_eq!( dq.inverse() * v, dq.inverse_transform_vector(&v), epsilon = 1.0e-7 ) && relative_eq!( dq.inverse() * p, dq.inverse_transform_point(&p), epsilon = 1.0e-7 )); } #[cfg_attr(rustfmt, rustfmt_skip)] #[test] fn composition( dq in unit_dual_quaternion(), uq in unit_quaternion(), t in translation3(), v in vector3(), p in point3() ) { prop_assert!(relative_eq!((uq * dq) * v, uq * (dq * v), epsilon = 1.0e-7)); prop_assert!(relative_eq!((uq * dq) * p, uq * (dq * p), epsilon = 1.0e-7)); prop_assert!(relative_eq!((dq * uq) * v, dq * (uq * v), epsilon = 1.0e-7)); prop_assert!(relative_eq!((dq * uq) * p, dq * (uq * p), epsilon = 1.0e-7)); prop_assert!(relative_eq!((t * dq) * v, (dq * v), epsilon = 1.0e-7)); prop_assert!(relative_eq!((t * dq) * p, t * (dq * p), epsilon = 1.0e-7)); prop_assert!(relative_eq!((dq * t) * v, dq * v, epsilon = 1.0e-7)); prop_assert!(relative_eq!((dq * t) * p, dq * (t * p), epsilon = 1.0e-7)); } #[cfg_attr(rustfmt, rustfmt_skip)] #[test] fn sclerp_is_defined_for_identical_orientations( dq in unit_dual_quaternion(), s in -1.0f64..2.0f64, t in translation3(), ) { prop_assert!(relative_eq!(dq.sclerp(&dq, 0.0), dq, epsilon = 1.0e-7)); prop_assert!(relative_eq!(dq.sclerp(&dq, 0.5), dq, epsilon = 1.0e-7)); prop_assert!(relative_eq!(dq.sclerp(&dq, 1.0), dq, epsilon = 1.0e-7)); prop_assert!(relative_eq!(dq.sclerp(&dq, s), dq, epsilon = 1.0e-7)); let unit = UnitDualQuaternion::identity(); prop_assert!(relative_eq!(unit.sclerp(&unit, 0.0), unit, epsilon = 1.0e-7)); prop_assert!(relative_eq!(unit.sclerp(&unit, 0.5), unit, epsilon = 1.0e-7)); prop_assert!(relative_eq!(unit.sclerp(&unit, 1.0), unit, epsilon = 1.0e-7)); prop_assert!(relative_eq!(unit.sclerp(&unit, s), unit, epsilon = 1.0e-7)); let dq2 = t * dq; prop_assert!(relative_eq!(dq.sclerp(&dq2, 0.0).real, dq.real, epsilon = 1.0e-7)); prop_assert!(relative_eq!(dq.sclerp(&dq2, 0.5).real, dq.real, epsilon = 1.0e-7)); prop_assert!(relative_eq!(dq.sclerp(&dq2, 1.0).real, dq.real, epsilon = 1.0e-7)); prop_assert!(relative_eq!(dq.sclerp(&dq2, s).real, dq.real, epsilon = 1.0e-7)); prop_assert!(relative_eq!( dq.sclerp(&dq2, s).translation().vector, dq.translation().vector.lerp(&dq2.translation().vector, s), epsilon = 1.0e-7 )); let unit2 = t * unit; prop_assert!(relative_eq!(unit.sclerp(&unit2, 0.0).real, unit.real, epsilon = 1.0e-7)); prop_assert!(relative_eq!(unit.sclerp(&unit2, 0.5).real, unit.real, epsilon = 1.0e-7)); prop_assert!(relative_eq!(unit.sclerp(&unit2, 1.0).real, unit.real, epsilon = 1.0e-7)); prop_assert!(relative_eq!(unit.sclerp(&unit2, s).real, unit.real, epsilon = 1.0e-7)); prop_assert!(relative_eq!( unit.sclerp(&unit2, s).translation().vector, unit.translation().vector.lerp(&unit2.translation().vector, s), epsilon = 1.0e-7 )); } #[cfg_attr(rustfmt, rustfmt_skip)] #[test] fn sclerp_is_not_defined_for_opposite_orientations( dq in unit_dual_quaternion(), s in 0.1f64..0.9f64, t in translation3(), t2 in translation3(), v in vector3(), ) { let iso = dq.to_isometry(); let rot = iso.rotation; if let Some((axis, angle)) = rot.axis_angle() { let flipped = UnitQuaternion::from_axis_angle(&axis, angle + std::f64::consts::PI); let dqf = flipped * rot.inverse() * dq.clone(); prop_assert!(dq.try_sclerp(&dqf, 0.5, 1.0e-7).is_none()); prop_assert!(dq.try_sclerp(&dqf, s, 1.0e-7).is_none()); } let dq2 = t * dq; let iso2 = dq2.to_isometry(); let rot2 = iso2.rotation; if let Some((axis, angle)) = rot2.axis_angle() { let flipped = UnitQuaternion::from_axis_angle(&axis, angle + std::f64::consts::PI); let dq3f = t2 * flipped * rot.inverse() * dq.clone(); prop_assert!(dq2.try_sclerp(&dq3f, 0.5, 1.0e-7).is_none()); prop_assert!(dq2.try_sclerp(&dq3f, s, 1.0e-7).is_none()); } if let Some(axis) = Unit::try_new(v, 1.0e-7) { let unit = UnitDualQuaternion::identity(); let flip = UnitQuaternion::from_axis_angle(&axis, std::f64::consts::PI); let unitf = flip * unit; prop_assert!(unit.try_sclerp(&unitf, 0.5, 1.0e-7).is_none()); prop_assert!(unit.try_sclerp(&unitf, s, 1.0e-7).is_none()); let unit2f = t * unit * flip; prop_assert!(unit.try_sclerp(&unit2f, 0.5, 1.0e-7).is_none()); prop_assert!(unit.try_sclerp(&unit2f, s, 1.0e-7).is_none()); } } #[cfg_attr(rustfmt, rustfmt_skip)] #[test] fn all_op_exist( dq in dual_quaternion(), udq in unit_dual_quaternion(), uq in unit_quaternion(), s in PROPTEST_F64, t in translation3(), v in vector3(), p in point3() ) { let dqMs: DualQuaternion<_> = dq * s; let dqMdq: DualQuaternion<_> = dq * dq; let dqMudq: DualQuaternion<_> = dq * udq; let udqMdq: DualQuaternion<_> = udq * dq; let iMi: UnitDualQuaternion<_> = udq * udq; let iMuq: UnitDualQuaternion<_> = udq * uq; let iDi: UnitDualQuaternion<_> = udq / udq; let iDuq: UnitDualQuaternion<_> = udq / uq; let iMp: Point3<_> = udq * p; let iMv: Vector3<_> = udq * v; let iMt: UnitDualQuaternion<_> = udq * t; let tMi: UnitDualQuaternion<_> = t * udq; let uqMi: UnitDualQuaternion<_> = uq * udq; let uqDi: UnitDualQuaternion<_> = uq / udq; let mut dqMs1 = dq; let mut dqMdq1 = dq; let mut dqMdq2 = dq; let mut dqMudq1 = dq; let mut dqMudq2 = dq; let mut iMt1 = udq; let mut iMt2 = udq; let mut iMi1 = udq; let mut iMi2 = udq; let mut iMuq1 = udq; let mut iMuq2 = udq; let mut iDi1 = udq; let mut iDi2 = udq; let mut iDuq1 = udq; let mut iDuq2 = udq; dqMs1 *= s; dqMdq1 *= dq; dqMdq2 *= &dq; dqMudq1 *= udq; dqMudq2 *= &udq; iMt1 *= t; iMt2 *= &t; iMi1 *= udq; iMi2 *= &udq; iMuq1 *= uq; iMuq2 *= &uq; iDi1 /= udq; iDi2 /= &udq; iDuq1 /= uq; iDuq2 /= &uq; prop_assert!(dqMs == dqMs1 && dqMdq == dqMdq1 && dqMdq == dqMdq2 && dqMudq == dqMudq1 && dqMudq == dqMudq2 && iMt == iMt1 && iMt == iMt2 && iMi == iMi1 && iMi == iMi2 && iMuq == iMuq1 && iMuq == iMuq2 && iDi == iDi1 && iDi == iDi2 && iDuq == iDuq1 && iDuq == iDuq2 && dqMs == &dq * s && dqMdq == &dq * &dq && dqMdq == dq * &dq && dqMdq == &dq * dq && dqMudq == &dq * &udq && dqMudq == dq * &udq && dqMudq == &dq * udq && udqMdq == &udq * &dq && udqMdq == udq * &dq && udqMdq == &udq * dq && iMi == &udq * &udq && iMi == udq * &udq && iMi == &udq * udq && iMuq == &udq * &uq && iMuq == udq * &uq && iMuq == &udq * uq && iDi == &udq / &udq && iDi == udq / &udq && iDi == &udq / udq && iDuq == &udq / &uq && iDuq == udq / &uq && iDuq == &udq / uq && iMp == &udq * &p && iMp == udq * &p && iMp == &udq * p && iMv == &udq * &v && iMv == udq * &v && iMv == &udq * v && iMt == &udq * &t && iMt == udq * &t && iMt == &udq * t && tMi == &t * &udq && tMi == t * &udq && tMi == &t * udq && uqMi == &uq * &udq && uqMi == uq * &udq && uqMi == &uq * udq && uqDi == &uq / &udq && uqDi == uq / &udq && uqDi == &uq / udq) } );
#![cfg(feature = "proptest-support")] #![allow(non_snake_case)] use na::{DualQuaternion, Point3, Unit, UnitDualQuaternion, UnitQuaternion, Vector3}; use crate::proptest::*; use proptest::{prop_assert, proptest}; proptest!( #[test] fn isometry_equivalence(iso in isometry3(), p in point3(), v in vector3()) { let dq = UnitDualQuaternion::from_isometry(&iso); prop_assert!(relative_eq!(iso * p, dq * p, epsilon = 1.0e-7)); prop_assert!(relative_eq!(iso * v, dq * v, epsilon = 1.0e-7)); } #[test] fn inverse_is_identity(i in unit_dual_quaternion(), p in point3(), v in vector3()) { let ii = i.inverse(); prop_assert!(relative_eq!(i * ii, UnitDualQuaternion::identity(), epsilon = 1.0e-7) && relative_eq!(ii * i, UnitDualQuaternion::identity(), epsilon = 1.0e-7) && relative_eq!((i * ii) * p, p, epsilon = 1.0e-7) && relative_eq!((ii * i) * p, p, epsilon = 1.0e-7) && relative_eq!((i * ii) * v, v, epsilon = 1.0e-7) && relative_eq!((ii * i) * v, v, epsilon = 1.0e-7)); } #[cfg_attr(rustfmt, rustfmt_skip)] #[test] fn multiply_equals_alga_transform( dq in unit_dual_quaternion(), v in vector3(), p in point3() ) { prop_assert!(dq * v == dq.transform_vector(&v) && dq * p == dq.transform_point(&p) && relative_eq!( dq.inverse() * v, dq.inverse_transform_vector(&v), epsilon = 1.0e-7 ) && relative_eq!( dq.inverse() * p, dq.inverse_transform_point(&p), epsilon = 1.0e-7 )); } #[cfg_attr(rustfmt, rustfmt_skip)] #[test] fn composition( dq in unit_dual_quaternion(), uq in unit_quaternion(), t in translation3(), v in vector3(), p in point3() ) { prop_assert!(relative_eq!((uq * dq) * v, uq * (dq * v), epsilon = 1.0e-7)); prop_assert!(relative_eq!((uq * dq) * p, uq * (dq * p), epsilon = 1.0e-7)); prop_assert!(relative_eq!((dq * uq) * v, dq * (uq * v), epsilon = 1.0e-7)); prop_assert!(relative_eq!((dq * uq) * p, dq * (uq * p), epsilon = 1.0e-7)); prop_assert!(relative_eq!((t * dq) * v, (dq * v), epsilon = 1.0e-7)); prop_assert!(relative_eq!((t * dq) * p, t * (dq * p), epsilon = 1.0e-7)); prop_assert!(relative_eq!((dq * t) * v, dq * v, epsilon = 1.0e-7)); prop_assert!(relative_eq!((dq * t) * p, dq * (t * p), epsilon = 1.0e-7)); } #[cfg_attr(rustfmt, rustfmt_skip)] #[test] fn sclerp_is_defined_for_identical_orientations( dq in unit_dual_quaternion(), s in -1.0f64..2.0f64, t in translation3(), ) { prop_assert!(relative_eq!(dq.sclerp(&dq, 0.0), dq, epsilon = 1.0e-7)); prop_assert!(relative_eq!(dq.sclerp(&dq, 0.5), dq, epsilon = 1.0e-7)); prop_assert!(relative_eq!(dq.sclerp(&dq, 1.0), dq, epsilon = 1.0e-7)); prop_assert!(relative_eq!(dq.sclerp(&dq, s), dq, epsilon = 1.0e-7)); let unit = UnitDualQuaternion::identity(); prop_assert!(relative_eq!(unit.sclerp(&unit, 0.0), unit, epsilon = 1.0e-7)); prop_assert!(relative_eq!(unit.sclerp(&unit, 0.5), unit, epsilon = 1.0e-7)); prop_assert!(relative_eq!(unit.sclerp(&unit, 1.0), unit, epsilon = 1.0e-7)); prop_assert!(relative_eq!(unit.sclerp(&unit, s), unit, epsilon = 1.0e-7)); let dq2 = t * dq; prop_assert!(relative_eq!(dq.sclerp(&dq2, 0.0).real, dq.real, epsilon = 1.0e-7)); prop_assert!(relative_eq!(dq.sclerp(&dq2, 0.5).real, dq.real, epsilon = 1.0e-7)); prop_assert!(relative_eq!(dq.sclerp(&dq2, 1.0).real, dq.real, epsilon = 1.0e-7)); prop_assert!(relative_eq!(dq.sclerp(&dq2, s).real, dq.real, epsilon = 1.0e-7)); prop_assert!(relative_eq!( dq.sclerp(&dq2, s).translation().vector, dq.translation().vector.lerp(&dq2.translation().vector, s), epsilon = 1.0e-7 )); let unit2 = t * unit; prop_assert!(relative_eq!(unit.sclerp(&unit2, 0.0).real, unit.real, epsilon =
fn all_op_exist( dq in dual_quaternion(), udq in unit_dual_quaternion(), uq in unit_quaternion(), s in PROPTEST_F64, t in translation3(), v in vector3(), p in point3() ) { let dqMs: DualQuaternion<_> = dq * s; let dqMdq: DualQuaternion<_> = dq * dq; let dqMudq: DualQuaternion<_> = dq * udq; let udqMdq: DualQuaternion<_> = udq * dq; let iMi: UnitDualQuaternion<_> = udq * udq; let iMuq: UnitDualQuaternion<_> = udq * uq; let iDi: UnitDualQuaternion<_> = udq / udq; let iDuq: UnitDualQuaternion<_> = udq / uq; let iMp: Point3<_> = udq * p; let iMv: Vector3<_> = udq * v; let iMt: UnitDualQuaternion<_> = udq * t; let tMi: UnitDualQuaternion<_> = t * udq; let uqMi: UnitDualQuaternion<_> = uq * udq; let uqDi: UnitDualQuaternion<_> = uq / udq; let mut dqMs1 = dq; let mut dqMdq1 = dq; let mut dqMdq2 = dq; let mut dqMudq1 = dq; let mut dqMudq2 = dq; let mut iMt1 = udq; let mut iMt2 = udq; let mut iMi1 = udq; let mut iMi2 = udq; let mut iMuq1 = udq; let mut iMuq2 = udq; let mut iDi1 = udq; let mut iDi2 = udq; let mut iDuq1 = udq; let mut iDuq2 = udq; dqMs1 *= s; dqMdq1 *= dq; dqMdq2 *= &dq; dqMudq1 *= udq; dqMudq2 *= &udq; iMt1 *= t; iMt2 *= &t; iMi1 *= udq; iMi2 *= &udq; iMuq1 *= uq; iMuq2 *= &uq; iDi1 /= udq; iDi2 /= &udq; iDuq1 /= uq; iDuq2 /= &uq; prop_assert!(dqMs == dqMs1 && dqMdq == dqMdq1 && dqMdq == dqMdq2 && dqMudq == dqMudq1 && dqMudq == dqMudq2 && iMt == iMt1 && iMt == iMt2 && iMi == iMi1 && iMi == iMi2 && iMuq == iMuq1 && iMuq == iMuq2 && iDi == iDi1 && iDi == iDi2 && iDuq == iDuq1 && iDuq == iDuq2 && dqMs == &dq * s && dqMdq == &dq * &dq && dqMdq == dq * &dq && dqMdq == &dq * dq && dqMudq == &dq * &udq && dqMudq == dq * &udq && dqMudq == &dq * udq && udqMdq == &udq * &dq && udqMdq == udq * &dq && udqMdq == &udq * dq && iMi == &udq * &udq && iMi == udq * &udq && iMi == &udq * udq && iMuq == &udq * &uq && iMuq == udq * &uq && iMuq == &udq * uq && iDi == &udq / &udq && iDi == udq / &udq && iDi == &udq / udq && iDuq == &udq / &uq && iDuq == udq / &uq && iDuq == &udq / uq && iMp == &udq * &p && iMp == udq * &p && iMp == &udq * p && iMv == &udq * &v && iMv == udq * &v && iMv == &udq * v && iMt == &udq * &t && iMt == udq * &t && iMt == &udq * t && tMi == &t * &udq && tMi == t * &udq && tMi == &t * udq && uqMi == &uq * &udq && uqMi == uq * &udq && uqMi == &uq * udq && uqDi == &uq / &udq && uqDi == uq / &udq && uqDi == &uq / udq) } );
1.0e-7)); prop_assert!(relative_eq!(unit.sclerp(&unit2, 0.5).real, unit.real, epsilon = 1.0e-7)); prop_assert!(relative_eq!(unit.sclerp(&unit2, 1.0).real, unit.real, epsilon = 1.0e-7)); prop_assert!(relative_eq!(unit.sclerp(&unit2, s).real, unit.real, epsilon = 1.0e-7)); prop_assert!(relative_eq!( unit.sclerp(&unit2, s).translation().vector, unit.translation().vector.lerp(&unit2.translation().vector, s), epsilon = 1.0e-7 )); } #[cfg_attr(rustfmt, rustfmt_skip)] #[test] fn sclerp_is_not_defined_for_opposite_orientations( dq in unit_dual_quaternion(), s in 0.1f64..0.9f64, t in translation3(), t2 in translation3(), v in vector3(), ) { let iso = dq.to_isometry(); let rot = iso.rotation; if let Some((axis, angle)) = rot.axis_angle() { let flipped = UnitQuaternion::from_axis_angle(&axis, angle + std::f64::consts::PI); let dqf = flipped * rot.inverse() * dq.clone(); prop_assert!(dq.try_sclerp(&dqf, 0.5, 1.0e-7).is_none()); prop_assert!(dq.try_sclerp(&dqf, s, 1.0e-7).is_none()); } let dq2 = t * dq; let iso2 = dq2.to_isometry(); let rot2 = iso2.rotation; if let Some((axis, angle)) = rot2.axis_angle() { let flipped = UnitQuaternion::from_axis_angle(&axis, angle + std::f64::consts::PI); let dq3f = t2 * flipped * rot.inverse() * dq.clone(); prop_assert!(dq2.try_sclerp(&dq3f, 0.5, 1.0e-7).is_none()); prop_assert!(dq2.try_sclerp(&dq3f, s, 1.0e-7).is_none()); } if let Some(axis) = Unit::try_new(v, 1.0e-7) { let unit = UnitDualQuaternion::identity(); let flip = UnitQuaternion::from_axis_angle(&axis, std::f64::consts::PI); let unitf = flip * unit; prop_assert!(unit.try_sclerp(&unitf, 0.5, 1.0e-7).is_none()); prop_assert!(unit.try_sclerp(&unitf, s, 1.0e-7).is_none()); let unit2f = t * unit * flip; prop_assert!(unit.try_sclerp(&unit2f, 0.5, 1.0e-7).is_none()); prop_assert!(unit.try_sclerp(&unit2f, s, 1.0e-7).is_none()); } } #[cfg_attr(rustfmt, rustfmt_skip)] #[test]
random
[ { "content": "fn length_on_direction_with_unit(v: &Vector3<f32>, dir: &Unit<Vector3<f32>>) -> f32 {\n\n // No need to normalize `dir`: we know that it is non-zero and normalized.\n\n v.dot(dir.as_ref())\n\n}\n\n\n", "file_path": "examples/unit_wrapper.rs", "rank": 0, "score": 256287.2837648631...
Rust
tiled/src/json_parser.rs
lenscas/hv-dev
7ae07cb889b20f382acd7293d80dfe3319a91123
use crate::*; use hv::prelude::*; impl Tileset { pub fn json_parse_tileset( v: &Value, first_gid: u32, path_prefix: Option<&str>, tileset_number: u8, slab: &mut slab::Slab<Object>, filename: String, ) -> Result<Self> { let json_obj = v .as_object() .ok_or(anyhow!("Tileset file did not contain a json dictionary"))?; let tile_array = json_obj .get("tiles") .map_or::<Result<&[Value]>, _>(Ok(&[][..]), |t_arr| { Ok(t_arr .as_array() .ok_or_else(|| anyhow!("Tiles are not an array"))? .as_slice()) })?; let mut tiles = HashMap::new(); for tile_obj in tile_array.iter() { let tile = Tile::json_parse_tile(tile_obj, tileset_number, slab)?; tiles.insert(tile.id, tile); } Ok(Tileset { columns: json_obj .get("columns") .ok_or_else(|| anyhow!("Should've gotten columns"))? .as_u64() .ok_or_else(|| anyhow!("Columns value wasn't a u64"))? .try_into() .expect("Bruh how many columns does your tileset have"), images: vec![Image::from_json(json_obj, path_prefix)?], tilecount: json_obj .get("tilecount") .ok_or_else(|| anyhow!("Should've gotten tilecount"))? .as_u64() .ok_or_else(|| anyhow!("Tilecount value wasn't a u64"))? .try_into() .expect("Bruh how many tiles does your tileset have"), tile_width: json_obj .get("tilewidth") .ok_or_else(|| anyhow!("Should've gotten tilewidth"))? .as_u64() .ok_or_else(|| anyhow!("Tilewidth value wasn't a u64"))? .try_into() .expect("Tiles are too thicc"), tile_height: json_obj .get("tileheight") .ok_or_else(|| anyhow!("Should've gotten tileheight"))? .as_u64() .ok_or_else(|| anyhow!("Tileheight value wasn't a u64"))? .try_into() .expect("Tiles are too tall owo"), spacing: json_obj .get("spacing") .ok_or_else(|| anyhow!("Should've gotten spacing"))? .as_u64() .ok_or_else(|| anyhow!("Spacing value wasn't a u64"))? .try_into() .expect( "God help you if you actually have 2,147,483,647 pixels in between each tile", ), name: json_obj .get("name") .ok_or_else(|| anyhow!("Should've gotten a name"))? .as_str() .ok_or_else(|| anyhow!("Name wasn't a valid string"))? .to_owned(), margin: json_obj .get("margin") .ok_or_else(|| anyhow!("Should've gotten a margin"))? .as_u64() .ok_or_else(|| anyhow!("Margin value wasn't a u64"))? .try_into() .expect( "God help you if you actually have 2,147,483,647 pixels AROUND your tileset", ), properties: Properties::json_parse_properties(v)?, filename: Some(filename), tiles, first_gid, }) } } impl Animation { fn json_parse_animation(v: &[Value], tileset: u8) -> Result<Self> { let mut animation_frames = Vec::with_capacity(v.len()); for entry in v.iter() { animation_frames.push(( TileId( entry .get("tileid") .ok_or_else(|| anyhow!("Couldn't find a tileid in the animation"))? .as_u64() .ok_or_else(|| anyhow!("Tileid should be a u64"))? .try_into() .expect("Tile ids should fit into u32s probably"), TileMetaData::new(tileset, false, false, false), ), entry .get("duration") .ok_or_else(|| anyhow!("Couldn't find a duration in the animation"))? .as_u64() .ok_or_else(|| anyhow!("Duration should be a u64"))? .try_into() .expect("Duration should probably fit in a u32"), )); } Ok(Animation(animation_frames)) } } impl Properties { fn json_parse_properties(v: &Value) -> Result<Self> { let mut properties = HashMap::new(); if let Some(p) = v.get("properties") { let properties_arr = p .as_array() .ok_or_else(|| anyhow!("Couldn't turn properties into an array"))?; if properties_arr.len() > 1 { return Err(anyhow!( "Properties array was greater than 1, not sure if this is expected" )); } for (k, v) in properties_arr[0] .as_object() .ok_or_else(|| { anyhow!("Properties first element couldn't be turned into an object") })? .iter() { properties.insert(k.clone(), Property::from_json_entry(v)?); } } Ok(Properties(properties)) } } impl Tile { fn json_parse_tile(v: &Value, tileset_num: u8, slab: &mut slab::Slab<Object>) -> Result<Self> { let objectgroup = match v.get("objectGroup") { Some(v) => { Some(ObjectGroup::json_parse_object_group(v, u32::MAX, false, slab, None)?.0) } None => None, }; let tile_id: u32 = v .get("id") .ok_or_else(|| anyhow!("Tile entry had no tile id"))? .as_u64() .ok_or_else(|| anyhow!("Could not turn tile id into u64"))? .try_into() .expect("Tile id greater than max u32"); Ok(Tile { id: TileId( tile_id + 1, TileMetaData::new(tileset_num, false, false, false), ), tile_type: v .get("type") .map(|s| { s.as_str() .map(ToOwned::to_owned) .ok_or_else(|| anyhow!("Tile type wasn't a string")) }) .transpose()?, probability: v .get("probability") .map(Value::as_f64) .unwrap_or(Some(0.0)) .ok_or_else(|| anyhow!("Probability wasn't a float"))? as f32, properties: Properties::json_parse_properties(v)?, animation: v .get("animation") .map(|a| { Animation::json_parse_animation( a.as_array() .ok_or_else(|| anyhow!("Animation values weren't an array"))?, tileset_num, ) }) .transpose()?, objectgroup, }) } } impl ObjectGroup { fn json_parse_object_group( objg_obj: &Value, llid: u32, from_obj_layer: bool, slab: &mut slab::Slab<Object>, tileset_ids: Option<&[u8]>, ) -> Result<(ObjectGroup, Vec<(ObjectId, ObjectRef)>), Error> { let mut obj_ids_and_refs = Vec::new(); let mut object_name_map = HashMap::new(); for object in objg_obj .get("objects") .ok_or_else(|| anyhow!("Didn't find objects in the objectgroup"))? .as_array() .ok_or_else(|| anyhow!("Couldn't retrieve objects as an array"))? .iter() { let object = Object::json_parse_object(object, from_obj_layer, tileset_ids)?; let val = object_name_map .entry(object.name.clone()) .or_insert_with(Vec::new); val.push(object.id); obj_ids_and_refs.push((object.id, ObjectRef(slab.insert(object)))); } Ok(( ObjectGroup { name: objg_obj .get("name") .ok_or_else(|| anyhow!("Object group did not have a name"))? .as_str() .ok_or_else(|| anyhow!("Name couldn't be converted to a string"))? .to_owned(), opacity: objg_obj .get("opacity") .ok_or_else(|| anyhow!("Object group did not have an opacity"))? .as_f64() .ok_or_else(|| anyhow!("Opacity couldn't be converted to a f64"))? as f32, visible: objg_obj .get("visible") .ok_or_else(|| anyhow!("Object group did not have a visibility"))? .as_bool() .ok_or_else(|| anyhow!("Visibility couldn't be converted to a bool"))?, obj_group_type: ObjGroupType::json_parse_obj_group_type(objg_obj)?, properties: Properties::json_parse_properties(objg_obj)?, draworder: DrawOrder::json_parse_draw_order(objg_obj)?, id: ObjectLayerId { glid: objg_obj .get("id") .ok_or_else(|| anyhow!("Object group did not have an id"))? .as_u64() .ok_or_else(|| anyhow!("Id couldn't be converted to a u64"))? .try_into() .expect("Too many objects"), llid, }, layer_index: objg_obj .get("layer_index") .map(|l_i| { l_i.as_u64() .ok_or_else(|| anyhow!("Layer index couldn't be turned into a u64")) .map(|n| n.try_into().expect("Layer indexes too large")) }) .transpose()?, off_x: objg_obj .get("x") .ok_or_else(|| anyhow!("Didn't find x offset in object group"))? .as_u64() .ok_or_else(|| anyhow!("Couldn't turn x offset to u64"))? .try_into() .expect("X offset too large"), off_y: objg_obj .get("y") .ok_or_else(|| anyhow!("Didn't find y offset in object group"))? .as_u64() .ok_or_else(|| anyhow!("Couldn't turn y offset to u64"))? .try_into() .expect("Y offset too large"), color: objg_obj.get("color").map_or( Ok(Color::from_rgb(0xA0, 0xA0, 0xA4)), |c| { c.as_str() .ok_or_else(|| anyhow!("Color wasn't a string")) .and_then(Color::from_tiled_hex) }, )?, tintcolor: objg_obj .get("tintcolor") .map(|s| { s.as_str() .ok_or_else(|| anyhow!("Tintcolor value wasn't a string")) .and_then(Color::from_tiled_hex) }) .transpose()?, object_refs: obj_ids_and_refs.iter().map(|i| i.1).collect(), object_name_map, }, obj_ids_and_refs, )) } } impl ObjGroupType { fn json_parse_obj_group_type(v: &Value) -> Result<Self> { match v .get("type") .ok_or_else(|| anyhow!("Object group did not contain key type"))? .as_str() .ok_or_else(|| anyhow!("Object group type couldn't be turned into a string"))? { "objectgroup" => Ok(ObjGroupType::ObjectGroup), s => Err(anyhow!("Unsupported object group type: {}", s)), } } } impl DrawOrder { fn json_parse_draw_order(v: &Value) -> Result<Self> { match v .get("draworder") .ok_or_else(|| anyhow!("Object group did not contain draworder"))? .as_str() .ok_or_else(|| anyhow!("Draworder couldn't be turned into a string"))? { "index" => Ok(DrawOrder::Index), s => Err(anyhow!("Unsupported draworder: {}", s)), } } } impl Object { fn json_parse_object( object: &Value, from_obj_layer: bool, _tileset_ids: Option<&[u8]>, ) -> Result<Self> { Ok(Object { name: object .get("name") .ok_or_else(|| anyhow!("Object did not have a name"))? .as_str() .ok_or_else(|| anyhow!("Name couldn't be converted to a string"))? .to_owned(), visible: object .get("visible") .ok_or_else(|| anyhow!("Object did not have a visibility"))? .as_bool() .ok_or_else(|| anyhow!("Visibility couldn't be converted to a bool"))?, obj_type: object .get("type") .ok_or_else(|| anyhow!("Object did not have a type"))? .as_str() .ok_or_else(|| anyhow!("Visibility couldn't be converted to a bool"))? .to_owned(), height: object .get("height") .ok_or_else(|| anyhow!("Object did not have a height"))? .as_f64() .ok_or_else(|| anyhow!("Height couldn't be converted to a f64"))? as f32, width: object .get("width") .ok_or_else(|| anyhow!("Object did not have a width"))? .as_f64() .ok_or_else(|| anyhow!("Width couldn't be converted to a f64"))? as f32, rotation: object .get("rotation") .ok_or_else(|| anyhow!("Object did not have a rotation"))? .as_f64() .ok_or_else(|| anyhow!("Rotation couldn't be converted to a f64"))? as f32, x: object .get("x") .ok_or_else(|| anyhow!("Object did not have an x pos"))? .as_f64() .ok_or_else(|| anyhow!("X pos couldn't be converted to a f64"))? as f32, y: object .get("y") .ok_or_else(|| anyhow!("Object did not have an y pos"))? .as_f64() .ok_or_else(|| anyhow!("Y pos couldn't be converted to a f64"))? as f32, properties: Properties::json_parse_properties(object)?, text: None, tile_id: None, id: ObjectId::new( object .get("id") .ok_or_else(|| anyhow!("Object did not have an ID"))? .as_u64() .ok_or_else(|| anyhow!("ID couldn't be represented as u64"))? .try_into() .expect("ID greater than u32 MAX"), from_obj_layer, ), shape: Some(ObjectShape::from_json(object)?), }) } }
use crate::*; use hv::prelude::*; impl Tileset { pub fn json_parse_tileset( v: &Value, first_gid: u32, path_prefix: Option<&str>, tileset_number: u8, slab: &mut slab::Slab<Object>, filename: String, ) -> Result<Self> { let json_obj = v .as_object() .ok_or(anyhow!("Tileset file did not contain a json dictionary"))?; let tile_array = json_obj .get("tiles") .map_or::<Result<&[Value]>, _>(Ok(&[][..]), |t_arr| { Ok(t_arr .as_array() .ok_or_else(|| anyhow!("Tiles are not an array"))? .as_slice()) })?; let mut tiles = HashMap::new(); for tile_obj in tile_array.iter() { let tile = Tile::json_parse_tile(tile_obj, tileset_number, slab)?; tiles.insert(tile.id, tile); } Ok(Tileset { columns: json_obj .get("columns") .ok_or_else(|| anyhow!("Should've gotten columns"))? .as_u64() .ok_or_else(|| anyhow!("Columns value wasn't a u64"))? .try_into() .expect("Bruh how many columns does your tileset have"), images: vec![Image::from_json(json_obj, path_prefix)?], tilecount: json_obj .get("tilecount") .ok_or_else(|| anyhow!("Should've gotten tilecount"))? .as_u64() .ok_or_else(|| anyhow!("Tilecount value wasn't a u64"))? .try_into() .expect("Bruh how many tiles does your tileset have"), tile_width: json_obj .get("tilewidth") .ok_or_else(|| anyhow!("Should've gotten tilewidth"))? .as_u64() .ok_or_else(|| anyhow!("Tilewidth value wasn't a u64"))? .try_into() .expect("Tiles are too thicc"), tile_height: json_obj .get("tileheight") .ok_or_else(|| anyhow!("Should've gotten tileheight"))? .as_u64() .ok_or_else(|| anyhow!("Tileheight value wasn't a u64"))? .try_into() .expect("Tiles are too tall owo"), spacing: json_obj .get("spacing") .ok_or_else(|| anyhow!("Should've gotten spacing"))? .as_u64() .ok_or_else(|| anyhow!("Spacing value wasn't a u64"))? .try_into() .expect( "God help you if you actually have 2,147,483,647 pixels in between each tile", ), name: json_obj .get("name") .ok_or_else(|| anyhow!("Should've gotten a name"))? .as_str() .ok_or_else(|| anyhow!("Name wasn't a valid string"))? .to_owned(), margin: json_obj .get("margin") .ok_or_else(|| anyhow!("Should've gotten a margin"))? .as_u64() .ok_or_else(|| anyhow!("Margin value wasn't a u64"))? .try_into() .expect( "God help you if you actually have 2,147,483,647 pixels AROUND your tileset", ), properties: Properties::json_parse_properties(v)?, filename: Some(filename), tiles, first_gid, }) } } impl Animation { fn json_parse_animation(v: &[Value], tileset: u8) -> Result<Self> { let mut animation_frames = Vec::with_capacity(v.len()); for entry in v.iter() { animation_frames.push(( TileId( entry .get("tileid") .ok_or_else(|| anyhow!("Couldn't find a tileid in the animation"))? .as_u64() .ok_or_else(|| anyhow!("Tileid should be a u64"))? .try_into() .expect("Tile ids should fit into u32s probably"), TileMetaData::new(tileset, false, false, false), ), entry .get("duration") .ok_or_else(|| anyhow!("Couldn't find a duration in the animation"))? .as_u64() .ok_or_else(|| anyhow!("Duration should be a u64"))? .try_into() .expect("Duration should probably fit in a u32"), )); } Ok(Animation(animation_frames)) } } impl Properties { fn json_parse_properties(v: &Value) -> Result<Self> { let mut properties = HashMap::new(); if let Some(p) = v.get("properties") { let properties_arr = p .as_array() .ok_or_else(|| anyhow!("Couldn't turn properties into an array"))?; if properties_arr.len() > 1 { return Err(anyhow!( "Properties array was greater than 1, not sure if this is expected" )); } for (k, v) in properties_arr[0] .as_object() .ok_or_else(|| { anyhow!("Properties first element couldn't be turned into an object") })? .iter() { properties.insert(k.clone(), Property::from_json_entry(v)?); } } Ok(Properties(properties)) } } impl Tile { fn json_parse_tile(v: &Value, tileset_num: u8, slab: &mut slab::Slab<Object>) -> Result<Self> { let objectgroup = match v.get("objectGroup") { Some(v) => { Some(ObjectGroup::json_parse_object_group(v, u32::MAX, false, slab, None)?.0) } None => None, }; let tile_id: u32 = v .get("id") .ok_or_else(|| anyhow!("Tile entry had no tile id"))? .as_u64() .ok_or_else(|| anyhow!("Could not turn tile id into u64"))? .try_into() .expect("Tile id greater than max u32"); Ok(Tile { id: TileId( tile_id + 1, TileMetaData::new(tileset_num, false, false, false), ), tile_type: v .get("type") .map(|s| { s.as_str() .map(ToOwned::to_owned) .ok_or_else(|| anyhow!("Tile type wasn't a string")) }) .transpose()?, probability: v .get("probability") .map(Value::as_f64) .unwrap_or(Some(0.0)) .ok_or_else(|| anyhow!("Probability wasn't a float"))? as f32, properties: Properties::json_parse_properties(v)?, animation: v .get("animation") .map(|a| { Animation::json_parse_animation( a.as_array() .ok_or_else(|| anyhow!("Animation values weren't an array"))?, tileset_num, ) }) .transpose()?, objectgroup, }) } } impl ObjectGroup { fn json_parse_object_group( objg_obj: &Value, llid: u32, from_obj_layer: bool, slab: &mut slab::Slab<Object>, tileset_ids: Option<&[u8]>, ) -> Result<(ObjectGroup, Vec<(ObjectId, ObjectRef)>), Error> { let mut obj_ids_and_refs = Vec::new(); let mut object_name_map = HashMap::new(); for object in objg_obj .get("objects") .ok_or_else(|| anyhow!("Didn't find objects in the objectgroup"))? .as_array() .ok_or_else(|| anyhow!("Couldn't retrieve objects as an array"))? .iter() { let object = Object::json_parse_object(object, from_obj_layer, tileset_ids)?; let val = object_name_map .entry(object.name.clone()) .or_insert_with(Vec::new); val.push(object.id); obj_ids_and_refs.push((object.id, ObjectRef(slab.insert(object)))); } Ok(( ObjectGroup { name: objg_obj .get("name") .ok_or_else(|| anyhow!("Object group did not have a name"))? .as_str() .ok_or_else(|| anyhow!("Name couldn't be converted to a string"))? .to_owned(), opacity: objg_obj .get("opacity") .ok_or_else(|| anyhow!("Object group did not have an opacity"))? .as_f64() .ok_or_else(|| anyhow!("Opacity couldn't be converted to a f64"))? as f32, visible: objg_obj .get("visible") .ok_or_else(|| anyhow!("Object group did not have a visibility"))? .as_bool() .ok_or_else(|| anyhow!("Visibility couldn't be converted to a bool"))?, obj_group_type: ObjGroupType::json_parse_obj_group_type(objg_obj)?, properties: Properties::json_parse_properties(objg_obj)?, draworder: DrawOrder::json_parse_draw_order(objg_obj)?, id: ObjectLayerId { glid: objg_obj .get("id") .ok_or_else(|| anyhow!("Object group did not have an id"))? .as_u64() .ok_or_else(|| anyhow!("Id couldn't be converted to a u64"))? .try_into() .expect("Too many objects"), llid, }, layer_index: objg_obj .get("layer_index") .map(|l_i| { l_i.as_u64() .ok_or_else(|| anyhow!("Layer index couldn't be turned into a u64")) .map(|n| n.try_into().expect("Layer indexes too large")) }) .transpose()?, off_x: objg_obj .get("x") .ok_or_else(|| anyhow!("Didn't find x offset in object group"))? .as_u64() .ok_or_else(|| anyhow!("Couldn't turn x offset to u64"))? .try_into() .expect("X offset too large"), off_y: objg_obj .get("y") .ok_or_else(|| anyhow!("Didn't find y offset in object group"))? .as_u64() .ok_or_else(|| anyhow!("Couldn't turn y offset to u64"))? .try_into() .expect("Y offset too large"), color: objg_obj.get("color").map_or( Ok(Color::from_rgb(0xA0, 0xA0, 0xA4)), |c| { c.as_str() .ok_or_else(|| anyhow!("Color wasn't a string")) .and_then(Color::from_tiled_hex) }, )?, tintcolor: objg_obj .get("tintcolor") .map(|s| { s.as_str() .ok_or_else(|| anyhow!("Tintcolor value wasn't a string")) .and_then(Color::from_tiled_hex) }) .transpose()?, object_refs: obj_ids_and_refs.iter().map(|i| i.1).collect(), object_name_map, }, obj_ids_and_refs, )) } } impl ObjGroupType { fn json_parse_obj_group_type(v: &Value) -> Result<Self> { match v .get("type") .ok_or_else(|| anyhow!("Object group did not contain key type"))? .as_str() .ok_or_else(|| anyhow!("Object group type couldn't be turned into a string"))? { "objectgroup" => Ok(ObjGroupType::ObjectGroup), s => Err(anyhow!("Unsupported object group type: {}", s)), } } } impl DrawOrder { fn json_parse_draw_order(v: &Value) -> Result<Self> { match v .get("draworder") .ok_or_else(|| anyhow!("Object group did not contain draworder"))? .as_str() .ok_or_else(|| anyhow!("Draworder couldn't be turned into a string"))? { "index" => Ok(DrawOrder::Index), s => Err(anyhow!("Unsupported draworder: {}", s)), } } } impl Object { fn json_parse_object( object: &Value, from_obj_layer: bool, _tileset_ids: Option<&[u8]>, ) -> Result<Self> {
} }
Ok(Object { name: object .get("name") .ok_or_else(|| anyhow!("Object did not have a name"))? .as_str() .ok_or_else(|| anyhow!("Name couldn't be converted to a string"))? .to_owned(), visible: object .get("visible") .ok_or_else(|| anyhow!("Object did not have a visibility"))? .as_bool() .ok_or_else(|| anyhow!("Visibility couldn't be converted to a bool"))?, obj_type: object .get("type") .ok_or_else(|| anyhow!("Object did not have a type"))? .as_str() .ok_or_else(|| anyhow!("Visibility couldn't be converted to a bool"))? .to_owned(), height: object .get("height") .ok_or_else(|| anyhow!("Object did not have a height"))? .as_f64() .ok_or_else(|| anyhow!("Height couldn't be converted to a f64"))? as f32, width: object .get("width") .ok_or_else(|| anyhow!("Object did not have a width"))? .as_f64() .ok_or_else(|| anyhow!("Width couldn't be converted to a f64"))? as f32, rotation: object .get("rotation") .ok_or_else(|| anyhow!("Object did not have a rotation"))? .as_f64() .ok_or_else(|| anyhow!("Rotation couldn't be converted to a f64"))? as f32, x: object .get("x") .ok_or_else(|| anyhow!("Object did not have an x pos"))? .as_f64() .ok_or_else(|| anyhow!("X pos couldn't be converted to a f64"))? as f32, y: object .get("y") .ok_or_else(|| anyhow!("Object did not have an y pos"))? .as_f64() .ok_or_else(|| anyhow!("Y pos couldn't be converted to a f64"))? as f32, properties: Properties::json_parse_properties(object)?, text: None, tile_id: None, id: ObjectId::new( object .get("id") .ok_or_else(|| anyhow!("Object did not have an ID"))? .as_u64() .ok_or_else(|| anyhow!("ID couldn't be represented as u64"))? .try_into() .expect("ID greater than u32 MAX"), from_obj_layer, ), shape: Some(ObjectShape::from_json(object)?), })
call_expression
[ { "content": "pub fn to_chunks(data: &[TileId], width: u32, height: u32) -> Chunks {\n\n let mut chunks = Chunks::default();\n\n for y in 0..height {\n\n for x in 0..width {\n\n let (chunk_x, chunk_y, tile_x, tile_y) =\n\n to_chunk_indices_and_subindices(x as i32, y as i32...
Rust
src/sql/src/plan/scope.rs
jtcohen6/materialize
88815e32ea26a5ee3abf04c2bbb49ee27ace22d4
use itertools::Itertools; use repr::ColumnName; use crate::names::PartialName; use crate::plan::error::PlanError; use crate::plan::expr::ColumnRef; use sql_parser::ast::Raw; #[derive(Debug, Clone, PartialEq)] pub struct ScopeItemName { pub table_name: Option<PartialName>, pub column_name: Option<ColumnName>, pub priority: bool, } #[derive(Debug, Clone, PartialEq)] pub struct ScopeItem { pub names: Vec<ScopeItemName>, pub expr: Option<sql_parser::ast::Expr<Raw>>, pub nameable: bool, } #[derive(Debug, Clone, PartialEq)] pub struct Scope { pub items: Vec<ScopeItem>, pub outer_scope: Option<Box<Scope>>, } impl ScopeItem { pub fn from_column_name(column_name: Option<ColumnName>) -> Self { ScopeItem { names: vec![ScopeItemName { table_name: None, column_name, priority: false, }], expr: None, nameable: true, } } pub fn is_from_table(&self, table_name: &PartialName) -> bool { self.names.iter().find_map(|n| n.table_name.as_ref()) == Some(table_name) } pub fn short_display_name(&self) -> String { match self.names.get(0) { None => "?".into(), Some(name) => { let column_name = match &name.column_name { None => "?column?", Some(column_name) => column_name.as_str(), }; match &name.table_name { None => column_name.into(), Some(table_name) => format!("{}.{}", table_name.item, column_name), } } } } } impl Scope { pub fn empty(outer_scope: Option<Scope>) -> Self { Scope { items: vec![], outer_scope: outer_scope.map(Box::new), } } pub fn from_source<I, N>( table_name: Option<PartialName>, column_names: I, outer_scope: Option<Scope>, ) -> Self where I: IntoIterator<Item = Option<N>>, N: Into<ColumnName>, { let mut scope = Scope::empty(outer_scope); scope.items = column_names .into_iter() .map(|column_name| ScopeItem { names: vec![ScopeItemName { table_name: table_name.clone(), column_name: column_name.map(|n| n.into()), priority: false, }], expr: None, nameable: true, }) .collect(); scope } pub fn column_names(&self) -> impl Iterator<Item = Option<&ColumnName>> { self.items.iter().map(|item| { item.names .iter() .filter_map(|n| n.column_name.as_ref()) .next() }) } pub fn len(&self) -> usize { self.items.len() } pub fn all_items(&self) -> Vec<(usize, usize, &ScopeItem)> { let mut items = vec![]; let mut level = 0; let mut scope = self; loop { for (column, item) in scope.items.iter().enumerate() { items.push((level, column, item)); } if let Some(outer_scope) = &scope.outer_scope { scope = outer_scope; level += 1; } else { break; } } items } fn resolve<'a, Matches>( &'a self, matches: Matches, name_in_error: &str, ) -> Result<(ColumnRef, &'a ScopeItemName), PlanError> where Matches: Fn(&ScopeItemName) -> bool, { let mut results = self .all_items() .into_iter() .flat_map(|(level, column, item)| { item.names .iter() .map(move |name| (level, column, item, name)) }) .filter(|(_level, _column, item, name)| (matches)(name) && item.nameable) .sorted_by_key(|(level, _column, _item, name)| (*level, !name.priority)); match results.next() { None => Err(PlanError::UnknownColumn(name_in_error.to_owned())), Some((level, column, _item, name)) => { if results .find(|(level2, column2, item, name2)| { column != *column2 && level == *level2 && item.nameable && name.priority == name2.priority }) .is_none() { Ok((ColumnRef { level, column }, name)) } else { Err(PlanError::AmbiguousColumn(name_in_error.to_owned())) } } } } pub fn resolve_column<'a>( &'a self, column_name: &ColumnName, ) -> Result<(ColumnRef, &'a ScopeItemName), PlanError> { self.resolve( |item: &ScopeItemName| item.column_name.as_ref() == Some(column_name), column_name.as_str(), ) } pub fn resolve_table_column<'a>( &'a self, table_name: &PartialName, column_name: &ColumnName, ) -> Result<(ColumnRef, &'a ScopeItemName), PlanError> { self.resolve( |item: &ScopeItemName| { item.table_name.as_ref() == Some(table_name) && item.column_name.as_ref() == Some(column_name) }, &format!("{}.{}", table_name, column_name), ) } pub fn resolve_expr<'a>(&'a self, expr: &sql_parser::ast::Expr<Raw>) -> Option<ColumnRef> { self.items .iter() .enumerate() .find(|(_, item)| item.expr.as_ref() == Some(expr)) .map(|(i, _)| ColumnRef { level: 0, column: i, }) } pub fn product(self, right: Self) -> Self { Scope { items: self .items .into_iter() .chain(right.items.into_iter()) .collect(), outer_scope: self.outer_scope, } } pub fn project(&self, columns: &[usize]) -> Self { Scope { items: columns.iter().map(|&i| self.items[i].clone()).collect(), outer_scope: self.outer_scope.clone(), } } }
use itertools::Itertools; use repr::ColumnName; use crate::names::PartialName; use crate::plan::error::PlanError; use crate::plan::expr::ColumnRef; use sql_parser::ast::Raw; #[derive(Debug, Clone, PartialEq)] pub struct ScopeItemName { pub table_name: Option<PartialName>, pub column_name: Option<ColumnName>, pub priority: bool, } #[derive(Debug, Clone, PartialEq)] pub struct ScopeItem { pub names: Vec<ScopeItemName>, pub expr: Option<sql_parser::ast::Expr<Raw>>, pub nameable: bool, } #[derive(Debug, Clone, PartialEq)] pub struct Scope { pub items: Vec<ScopeItem>, pub outer_scope: Option<Box<Scope>>, } impl ScopeItem { pub fn from_column_name(column_name: Option<ColumnName>) -> Self { ScopeItem { names: vec![ScopeItemName { table_name: None, column_name, priority: false, }], expr: None, nameable: true, } } pub fn is_from_table(&self, table_name: &PartialName) -> bool { self.names.iter().find_map(|n| n.table_name.as_ref()) == Some(table_name) } pub fn short_display_name(&self) -> String { match self.names.get(0) { None => "?".into(), Some(name) => { let column_name = match &name.column_name { None => "?column?", Some(column_name) => column_name.as_str(), }; match &name.table_name { None => column_name.into(), Some(table_name) => format!("{}.{}", table_name.item, column_name), } } } } } impl Scope { pub fn empty(outer_scope: Option<Scope>) -> Self { Scope { items: vec![], outer_scope: outer_scope.map(Box::new), } } pub fn from_source<I, N>( table_name: Option<PartialName>, column_names: I, outer_scope: Option<Scope>, ) -> Self where I: IntoIterator<Item = Option<N>>, N: Into<ColumnName>, { let mut scope = Scope::empty(outer_scope); scope.items = column_names .into_iter() .map(|column_name| ScopeItem { names: vec![ScopeItemName { table_name: table_name.clone(), column_name: column_name.map(|n| n.into()), priority: false, }], expr: None, nameable: true, }) .collect(); scope } pub fn column_names(&self) -> impl Iterator<Item = Option<&ColumnName>> { self.items.iter().map(|item| { item.names .iter() .filter_map(|n| n.column_name.as_ref()) .next() }) } pub fn len(&self) -> usize { self.items.len() } pub fn all_items(&self) -> Vec<(usize, usize, &ScopeItem)> { let mut items = vec![]; let mut level = 0; let mut scope = self; loop { for (column, item) in scope.items.iter().enumerate() { items.push((level, column, item)); } if let Some(outer_scope) = &scope.outer_scope { scope = outer_scope; level += 1; } else { break; } } items } fn resolve<'a, Matches>( &'a self, matches: Matches, name_in_error: &str, ) -> Result<(ColumnRef, &'a ScopeItemName), PlanError> where Matches: Fn(&ScopeItemName) -> bool, { let mut results = self .all_items() .into_iter() .flat_map(|(level, column, item)| { item.names .iter() .map(move |name| (level, column, item, name)) }) .filter(|(_level, _column, item, name)| (matches)(name) && item.nameable) .sorted_by_key(|(level, _column, _item, name)| (*level, !name.priority)); match results.next() { None => Err(PlanError::UnknownColumn(name_in_error.to_owned())), Some((level, column, _item, name)) => { if results .find(|(level2, column2, item, name2)| { column != *column2 && level == *level2 && item.nameable && name.priority == name2.priority }) .is_none() { Ok((ColumnRef { level, column }, name)) } else { Err(PlanError::AmbiguousColumn(name_in_error.to_owned())) } } } }
pub fn resolve_table_column<'a>( &'a self, table_name: &PartialName, column_name: &ColumnName, ) -> Result<(ColumnRef, &'a ScopeItemName), PlanError> { self.resolve( |item: &ScopeItemName| { item.table_name.as_ref() == Some(table_name) && item.column_name.as_ref() == Some(column_name) }, &format!("{}.{}", table_name, column_name), ) } pub fn resolve_expr<'a>(&'a self, expr: &sql_parser::ast::Expr<Raw>) -> Option<ColumnRef> { self.items .iter() .enumerate() .find(|(_, item)| item.expr.as_ref() == Some(expr)) .map(|(i, _)| ColumnRef { level: 0, column: i, }) } pub fn product(self, right: Self) -> Self { Scope { items: self .items .into_iter() .chain(right.items.into_iter()) .collect(), outer_scope: self.outer_scope, } } pub fn project(&self, columns: &[usize]) -> Self { Scope { items: columns.iter().map(|&i| self.items[i].clone()).collect(), outer_scope: self.outer_scope.clone(), } } }
pub fn resolve_column<'a>( &'a self, column_name: &ColumnName, ) -> Result<(ColumnRef, &'a ScopeItemName), PlanError> { self.resolve( |item: &ScopeItemName| item.column_name.as_ref() == Some(column_name), column_name.as_str(), ) }
function_block-full_function
[ { "content": "fn pad_formats(formats: Vec<pgrepr::Format>, n: usize) -> Result<Vec<pgrepr::Format>, String> {\n\n match (formats.len(), n) {\n\n (0, e) => Ok(vec![pgrepr::Format::Text; e]),\n\n (1, e) => Ok(iter::repeat(formats[0]).take(e).collect()),\n\n (a, e) if a == e => Ok(formats),...
Rust
matrix_sdk_appservice/tests/tests.rs
DevinR528/matrix-rust-sdk
4c09c6272bb3636e20d99177357cd31b80a2c1bf
use std::env; use matrix_sdk::{ api_appservice, api_appservice::Registration, async_trait, events::{room::member::MemberEventContent, AnyEvent, AnyStateEvent, SyncStateEvent}, room::Room, EventHandler, Raw, }; use matrix_sdk_appservice::*; use matrix_sdk_test::async_test; use serde_json::json; fn registration_string() -> String { include_str!("../tests/registration.yaml").to_owned() } async fn appservice(registration: Option<Registration>) -> Result<Appservice> { env::set_var("RUST_LOG", "mockito=debug,matrix_sdk=debug"); let _ = tracing_subscriber::fmt::try_init(); let registration = match registration { Some(registration) => registration.into(), None => AppserviceRegistration::try_from_yaml_str(registration_string()).unwrap(), }; let homeserver_url = mockito::server_url(); let server_name = "localhost"; Ok(Appservice::new(homeserver_url.as_ref(), server_name, registration).await?) } fn member_json() -> serde_json::Value { json!({ "content": { "avatar_url": null, "displayname": "example", "membership": "join" }, "event_id": "$151800140517rfvjc:localhost", "membership": "join", "origin_server_ts": 151800140, "room_id": "!ahpSDaDUPCCqktjUEF:localhost", "sender": "@example:localhost", "state_key": "@example:localhost", "type": "m.room.member", "prev_content": { "avatar_url": null, "displayname": "example", "membership": "invite" }, "unsigned": { "age": 297036, "replaces_state": "$151800111315tsynI:localhost" } }) } #[async_test] async fn test_event_handler() -> Result<()> { let appservice = appservice(None).await?; struct Example {} impl Example { pub fn new() -> Self { Self {} } } #[async_trait] impl EventHandler for Example { async fn on_state_member(&self, room: Room, event: &SyncStateEvent<MemberEventContent>) { dbg!(room, event); } } appservice .client() .set_event_handler(Box::new(Example::new())) .await; let event = serde_json::from_value::<AnyStateEvent>(member_json()).unwrap(); let event: Raw<AnyEvent> = AnyEvent::State(event).into(); let events = vec![event]; let incoming = api_appservice::event::push_events::v1::IncomingRequest::new( "any_txn_id".to_owned(), events, ); appservice.client().receive_transaction(incoming).await?; Ok(()) } #[async_test] async fn test_transaction() -> Result<()> { let appservice = appservice(None).await?; let event = serde_json::from_value::<AnyStateEvent>(member_json()).unwrap(); let event: Raw<AnyEvent> = AnyEvent::State(event).into(); let events = vec![event]; let incoming = api_appservice::event::push_events::v1::IncomingRequest::new( "any_txn_id".to_owned(), events, ); appservice.client().receive_transaction(incoming).await?; Ok(()) } #[async_test] async fn test_verify_hs_token() -> Result<()> { let appservice = appservice(None).await?; let registration = appservice.registration(); assert!(appservice.hs_token_matches(&registration.hs_token)); Ok(()) } mod registration { use super::*; #[test] fn test_registration() -> Result<()> { let registration: Registration = serde_yaml::from_str(&registration_string())?; let registration: AppserviceRegistration = registration.into(); assert_eq!(registration.id, "appservice"); Ok(()) } #[test] fn test_registration_from_yaml_file() -> Result<()> { let registration = AppserviceRegistration::try_from_yaml_file("./tests/registration.yaml")?; assert_eq!(registration.id, "appservice"); Ok(()) } #[test] fn test_registration_from_yaml_str() -> Result<()> { let registration = AppserviceRegistration::try_from_yaml_str(registration_string())?; assert_eq!(registration.id, "appservice"); Ok(()) } }
use std::env; use matrix_sdk::{ api_appservice, api_appservice::Registration, async_trait, events::{room::member::MemberEventContent, AnyEvent, AnyStateEvent, SyncStateEvent}, room::Room, EventHandler, Raw, }; use matrix_sdk_appservice::*; use matrix_sdk_test::async_test; use serde_json::json; fn registration_string() -> String { include_str!("../tests/registration.yaml").to_owned() } async fn appservice(registration: Option<Registration>) -> Result<Appservice> { env::set_var("RUST_LOG", "mockito=debug,matrix_sdk=debug"); let _ = tracing_subscriber::fmt::try_init(); let registration =
; let homeserver_url = mockito::server_url(); let server_name = "localhost"; Ok(Appservice::new(homeserver_url.as_ref(), server_name, registration).await?) } fn member_json() -> serde_json::Value { json!({ "content": { "avatar_url": null, "displayname": "example", "membership": "join" }, "event_id": "$151800140517rfvjc:localhost", "membership": "join", "origin_server_ts": 151800140, "room_id": "!ahpSDaDUPCCqktjUEF:localhost", "sender": "@example:localhost", "state_key": "@example:localhost", "type": "m.room.member", "prev_content": { "avatar_url": null, "displayname": "example", "membership": "invite" }, "unsigned": { "age": 297036, "replaces_state": "$151800111315tsynI:localhost" } }) } #[async_test] async fn test_event_handler() -> Result<()> { let appservice = appservice(None).await?; struct Example {} impl Example { pub fn new() -> Self { Self {} } } #[async_trait] impl EventHandler for Example { async fn on_state_member(&self, room: Room, event: &SyncStateEvent<MemberEventContent>) { dbg!(room, event); } } appservice .client() .set_event_handler(Box::new(Example::new())) .await; let event = serde_json::from_value::<AnyStateEvent>(member_json()).unwrap(); let event: Raw<AnyEvent> = AnyEvent::State(event).into(); let events = vec![event]; let incoming = api_appservice::event::push_events::v1::IncomingRequest::new( "any_txn_id".to_owned(), events, ); appservice.client().receive_transaction(incoming).await?; Ok(()) } #[async_test] async fn test_transaction() -> Result<()> { let appservice = appservice(None).await?; let event = serde_json::from_value::<AnyStateEvent>(member_json()).unwrap(); let event: Raw<AnyEvent> = AnyEvent::State(event).into(); let events = vec![event]; let incoming = api_appservice::event::push_events::v1::IncomingRequest::new( "any_txn_id".to_owned(), events, ); appservice.client().receive_transaction(incoming).await?; Ok(()) } #[async_test] async fn test_verify_hs_token() -> Result<()> { let appservice = appservice(None).await?; let registration = appservice.registration(); assert!(appservice.hs_token_matches(&registration.hs_token)); Ok(()) } mod registration { use super::*; #[test] fn test_registration() -> Result<()> { let registration: Registration = serde_yaml::from_str(&registration_string())?; let registration: AppserviceRegistration = registration.into(); assert_eq!(registration.id, "appservice"); Ok(()) } #[test] fn test_registration_from_yaml_file() -> Result<()> { let registration = AppserviceRegistration::try_from_yaml_file("./tests/registration.yaml")?; assert_eq!(registration.id, "appservice"); Ok(()) } #[test] fn test_registration_from_yaml_str() -> Result<()> { let registration = AppserviceRegistration::try_from_yaml_str(registration_string())?; assert_eq!(registration.id, "appservice"); Ok(()) } }
match registration { Some(registration) => registration.into(), None => AppserviceRegistration::try_from_yaml_str(registration_string()).unwrap(), }
if_condition
[ { "content": "fn encode_key_info(info: &RequestedKeyInfo) -> String {\n\n format!(\n\n \"{}{}{}{}\",\n\n info.room_id, info.sender_key, info.algorithm, info.session_id\n\n )\n\n}\n\n\n\n/// An in-memory only store that will forget all the E2EE key once it's dropped.\n\n#[derive(Debug, Clone)...
Rust
old/server/src/main.rs
icefoxen/WorldDocCode
45cb146ebdceca077bb910ca2af40c16232848a4
#[macro_use] extern crate rouille; extern crate lazy_static; extern crate serde; extern crate rustc_serialize; extern crate ring; extern crate untrusted; extern crate base64; use std::collections::HashMap; use std::sync::RwLock; use rouille::Response; extern crate protocol; use protocol::*; use ring::{signature, rand}; #[derive(Debug, Default, Clone)] struct ServerData { names: HashMap<String, UpdateMessage>, keys: HashMap<String, Vec<u8>>, } impl ServerData { fn get_name(&self, name: &str) -> Option<&UpdateMessage> { self.names.get(name) } fn get_id_key(&self, id: &str) -> Option<&[u8]> { self.keys.get(id).map(|x| x.as_ref()) } fn add_id(&mut self, id: &str, key: &[u8]) { self.keys.insert(id.into(), key.into()); } fn validate_update(&self, msg: &UpdateMessage) -> Result<(), ValidationError> { match self.keys.get(&msg.user) { Some(key) => msg.verify_signature(key), None => Err(ValidationError::UnknownUser(msg.user.clone())) } } fn update_name(&mut self, name: &str, contents: &UpdateMessage) { self.names.insert(name.to_string(), contents.clone()); } fn apply_update_if_valid(&mut self, dest: &str, msg: &UpdateMessage) -> Result<(), ValidationError> { let _ = self.validate_update(msg)?; self.update_name(dest, &msg); Ok(()) } fn add_user(&mut self, username: &str) { let rng = rand::SystemRandom::new(); let pkcs8_bytes = signature::Ed25519KeyPair::generate_pkcs8(&rng).unwrap(); let keypair = signature::Ed25519KeyPair::from_pkcs8( untrusted::Input::from(&pkcs8_bytes) ).unwrap(); let encoded_privkey = base64::encode(&pkcs8_bytes[..]); println!("Private key for {} is: {}", username, encoded_privkey); let pubkey_bytes = keypair.public_key_bytes(); self.add_id(username, pubkey_bytes); } fn run(server: ServerData, addr: &str) { let server = RwLock::new(server); server.write().unwrap().add_user("icefox"); rouille::start_server(addr, move |request| { router!( request, (GET) (/id/{name:String}) => { if let Some(n) = server.read().unwrap().get_id_key(&name) { Response::text(base64::encode(n)) } else { Response::empty_404() } }, (GET) (/name/{name:String}) => { println!("Got get to {}", &name); if let Some(n) = server.read().unwrap().get_name(&name) { Response::json(n) } else { Response::empty_404() } }, (POST) (/name/{name:String}) => { println!("Got post to {}", &name); let rename_request: UpdateMessage = try_or_400!(rouille::input::json_input(request)); println!("Got post to {}: {:?}", &name, rename_request); match server.write().unwrap().apply_update_if_valid(&name, &rename_request) { Ok(_) => Response::text("ok"), Err(v) => Response::text(format!("{:?}", v)).with_status_code(403), } }, _ => Response::text("hello world") ) }); } } fn main() { let s = ServerData::default(); ServerData::run(s, "127.0.0.1:8888"); } #[cfg(test)] mod tests { extern crate reqwest; use lazy_static; use std::thread; use std::io::Read; use serde::Serialize; use ring::{rand, signature}; use untrusted; use base64; const UNITTEST_USER: &str = "unittest_user"; const UNITTEST_NAME: &str = "unittest_name"; const UNITTEST_NAME_VALUE: &str = "unittest_name_value"; fn start_test_server() { use super::ServerData; let mut s = ServerData::default(); let pubkey_bytes = KEYPAIR.public_key_bytes(); s.add_id(UNITTEST_USER, pubkey_bytes); s.update_name(UNITTEST_NAME, UNITTEST_NAME_VALUE); ServerData::run(s, "127.0.0.1:8888"); } fn generate_keypair() -> signature::Ed25519KeyPair { let rng = rand::SystemRandom::new(); let pkcs8_bytes = signature::Ed25519KeyPair::generate_pkcs8(&rng).unwrap(); let keypair = signature::Ed25519KeyPair::from_pkcs8( untrusted::Input::from(&pkcs8_bytes) ).unwrap(); keypair } lazy_static! { static ref SERVER_THREAD: thread::JoinHandle<()> = thread::spawn(start_test_server); static ref KEYPAIR: signature::Ed25519KeyPair = generate_keypair(); } fn spawn_server_and_get(path: &str) -> reqwest::Response { lazy_static::initialize(&SERVER_THREAD); let new_path = String::from("http://localhost:8888") + path; reqwest::get(&new_path).unwrap() } fn spawn_server_and_post<T: Serialize>(path: &str, json: &T) -> reqwest::Response { lazy_static::initialize(&SERVER_THREAD); let client = reqwest::Client::new().unwrap(); let new_path = String::from("http://localhost:8888") + path; client.post(&new_path).unwrap() .json(json).unwrap() .send().unwrap() } #[test] fn test_basic() { let mut resp = spawn_server_and_get("/"); assert!(resp.status().is_success()); let mut content = String::new(); resp.read_to_string(&mut content).unwrap(); assert_eq!(content, "hello world"); } #[test] fn test_id() { let mut resp = spawn_server_and_get((String::from("/id/") + UNITTEST_USER).as_str()); assert!(resp.status().is_success()); let mut content = String::new(); resp.read_to_string(&mut content).unwrap(); let pubkey_bytes = KEYPAIR.public_key_bytes(); let pubkey_string = base64::encode(pubkey_bytes); assert_eq!(content, pubkey_string); } #[test] fn test_get_name() { let resp = spawn_server_and_get("/name/test_no_name"); assert_eq!(resp.status(), reqwest::StatusCode::NotFound); let mut resp = spawn_server_and_get((String::from("/name/") + UNITTEST_NAME).as_str()); assert!(resp.status().is_success()); let mut content = String::new(); resp.read_to_string(&mut content).unwrap(); assert_eq!(content, UNITTEST_NAME_VALUE); } #[test] fn test_post_name() { const NEWNAME: &str = "/name/test_post_name"; let resp = spawn_server_and_get(NEWNAME); assert!(!resp.status().is_success()); let changed_name = "foo!"; let data = super::UpdateMessage::signed_message(&KEYPAIR, UNITTEST_USER, changed_name); let mut resp = spawn_server_and_post(NEWNAME, &data); assert!(resp.status().is_success()); let mut content = String::new(); resp.read_to_string(&mut content).unwrap(); assert_eq!(content, "ok"); let mut resp = spawn_server_and_get(NEWNAME); assert!(resp.status().is_success()); let mut content = String::new(); resp.read_to_string(&mut content).unwrap(); assert_eq!(content, changed_name); let baddata = super::UpdateMessage { user: UNITTEST_USER.into(), signature: "".into(), new_contents: "aieeee!".into(), }; let resp = spawn_server_and_post(NEWNAME, &baddata); assert!(!resp.status().is_success()); let mut resp = spawn_server_and_get(NEWNAME); assert!(resp.status().is_success()); let mut content = String::new(); resp.read_to_string(&mut content).unwrap(); assert_eq!(content, changed_name); } }
#[macro_use] extern crate rouille; extern crate lazy_static; extern crate serde; extern crate rustc_serialize; extern crate ring; extern crate untrusted; extern crate base64; use std::collections::HashMap; use std::sync::RwLock; use rouille::Response; extern crate protocol; use protocol::*; use ring::{signature, rand}; #[derive(Debug, Default, Clone)] struct ServerData { names: HashMap<String, UpdateMessage>, keys: HashMap<String, Vec<u8>>, } impl ServerData { fn get_name(&self, name: &str) -> Option<&UpdateMessage> { self.names.get(name) } fn get_id_key(&self, id: &str) -> Option<&[u8]> { self.keys.get(id).map(|x| x.as_ref()) } fn add_id(&mut self, id: &str, key: &[u8]) { self.keys.insert(id.into(), key.into()); } fn validate_update(&self, msg: &UpdateMessage) -> Result<(), ValidationError> { match self.keys.get(&msg.user) { Some(key) => msg.verify_signature(key), None => Err(ValidationError::UnknownUser(msg.user.clone())) } } fn update_name(&mut self, name: &str, contents: &UpdateMessage) { self.names.insert(name.to_string(), contents.clone()); } fn apply_update_if_valid(&mut self, dest: &str, msg: &UpdateMessage) -> Result<(), ValidationError> { let _ = self.validate_update(msg)?; self.update_name(dest, &msg); Ok(()) } fn add_user(&mut self, username: &str) { let rng = rand::SystemRandom::new(); let pkcs8_bytes = signature::Ed25519KeyPair::generate_pkcs8(&rng).unwrap(); let keypair = signature::Ed25519KeyPair::from_pkcs8( untrusted::Input::from(&pkcs8_bytes) ).unwrap(); let encoded_privkey = base64::encode(&pkcs8_bytes[..]); println!("Private key for {} is: {}", username, encoded_privkey); let pubkey_bytes = keypair.public_key_bytes(); self.add_id(username, pubkey_bytes); } fn run(server: ServerData, addr: &str) { let server = RwLock::new(server); server.write().unwrap().add_user("icefox"); rouille::start_server(addr, move |request| { router!( request, (GET) (/id/{name:String}) => { if let Some(n) = server.read().unwrap().get_id_key(&name) { Response::text(base64::encode(n)) } else { Response::empty_404() } }, (GET) (/name/{name:String}) => { println!("Got get to {}", &name); if let Some(n) = server.read().unwrap().get_name(&name) { Response::json(n) } else { Response::empty_404() } }, (POST) (/name/{name:String}) => { println!("Got post to {}", &name); let rename_request: UpdateMessage = try_or_400!(rouille::input::json_input(request)); println!("Got post to {}: {:?}", &name, rename_request); match server.write().unwrap().apply_update_if_valid(&name, &rename_request) { Ok(_) => Response::text("ok"), Err(v) => Response::text(format!("{:?}", v)).with_status_code(403), } }, _ => Response::text("hello world") ) }); } } fn main() { let s = ServerData::default(); ServerData::run(s, "127.0.0.1:8888"); } #[cfg(test)] mod tests { extern crate reqwest; use lazy_static; use std::thread; use std::io::Read; use serde::Serialize; use ring::{rand, signature}; use untrusted; use base64; const UNITTEST_USER: &str = "unittest_user"; const UNITTEST_NAME: &str = "unittest_name"; const UNITTEST_NAME_VALUE: &str = "unittest_name_value"; fn start_test_server() { use super::ServerData; let mut s = ServerData::default(); let pubkey_bytes = KEYPAIR.public_key_bytes(); s.add_id(UNITTEST_USER, pubkey_bytes); s.update_name(UNITTEST_NAME, UNITTEST_NAME_VALUE); ServerData::run(s, "127.0.0.1:8888"); }
lazy_static! { static ref SERVER_THREAD: thread::JoinHandle<()> = thread::spawn(start_test_server); static ref KEYPAIR: signature::Ed25519KeyPair = generate_keypair(); } fn spawn_server_and_get(path: &str) -> reqwest::Response { lazy_static::initialize(&SERVER_THREAD); let new_path = String::from("http://localhost:8888") + path; reqwest::get(&new_path).unwrap() } fn spawn_server_and_post<T: Serialize>(path: &str, json: &T) -> reqwest::Response { lazy_static::initialize(&SERVER_THREAD); let client = reqwest::Client::new().unwrap(); let new_path = String::from("http://localhost:8888") + path; client.post(&new_path).unwrap() .json(json).unwrap() .send().unwrap() } #[test] fn test_basic() { let mut resp = spawn_server_and_get("/"); assert!(resp.status().is_success()); let mut content = String::new(); resp.read_to_string(&mut content).unwrap(); assert_eq!(content, "hello world"); } #[test] fn test_id() { let mut resp = spawn_server_and_get((String::from("/id/") + UNITTEST_USER).as_str()); assert!(resp.status().is_success()); let mut content = String::new(); resp.read_to_string(&mut content).unwrap(); let pubkey_bytes = KEYPAIR.public_key_bytes(); let pubkey_string = base64::encode(pubkey_bytes); assert_eq!(content, pubkey_string); } #[test] fn test_get_name() { let resp = spawn_server_and_get("/name/test_no_name"); assert_eq!(resp.status(), reqwest::StatusCode::NotFound); let mut resp = spawn_server_and_get((String::from("/name/") + UNITTEST_NAME).as_str()); assert!(resp.status().is_success()); let mut content = String::new(); resp.read_to_string(&mut content).unwrap(); assert_eq!(content, UNITTEST_NAME_VALUE); } #[test] fn test_post_name() { const NEWNAME: &str = "/name/test_post_name"; let resp = spawn_server_and_get(NEWNAME); assert!(!resp.status().is_success()); let changed_name = "foo!"; let data = super::UpdateMessage::signed_message(&KEYPAIR, UNITTEST_USER, changed_name); let mut resp = spawn_server_and_post(NEWNAME, &data); assert!(resp.status().is_success()); let mut content = String::new(); resp.read_to_string(&mut content).unwrap(); assert_eq!(content, "ok"); let mut resp = spawn_server_and_get(NEWNAME); assert!(resp.status().is_success()); let mut content = String::new(); resp.read_to_string(&mut content).unwrap(); assert_eq!(content, changed_name); let baddata = super::UpdateMessage { user: UNITTEST_USER.into(), signature: "".into(), new_contents: "aieeee!".into(), }; let resp = spawn_server_and_post(NEWNAME, &baddata); assert!(!resp.status().is_success()); let mut resp = spawn_server_and_get(NEWNAME); assert!(resp.status().is_success()); let mut content = String::new(); resp.read_to_string(&mut content).unwrap(); assert_eq!(content, changed_name); } }
fn generate_keypair() -> signature::Ed25519KeyPair { let rng = rand::SystemRandom::new(); let pkcs8_bytes = signature::Ed25519KeyPair::generate_pkcs8(&rng).unwrap(); let keypair = signature::Ed25519KeyPair::from_pkcs8( untrusted::Input::from(&pkcs8_bytes) ).unwrap(); keypair }
function_block-full_function
[ { "content": "fn get_ipfs_doc(name: &str) {\n\n // Using 'get' here \n\n let url = format!(\"http://localhost:5001/api/v0/cat?arg={}\", name);\n\n let mut resp = reqwest::get(&url).expect(\"Could not get IPFS doc?\");\n\n let mut content = String::new();\n\n resp.read_to_string(&mut content).unwr...
Rust
src/search/old.rs
KevinWMatthews/mindbase
ab70ffd27c35acec0a0ecedf787234e8980d6e73
use crate::{ allegation::{ Allegation, Body, }, mbql::{ ast, error::{ MBQLError, MBQLErrorKind, }, query::BindResult, Query, }, symbol::Atom, AgentId, AllegationId, Analogy, ArtifactId, MBError, MindBase, Symbol, }; use std::convert::TryInto; use std::rc::Rc; pub struct GSContext<'a> { scan_min: [u8; 64], scan_max: [u8; 64], gs_agents: Vec<AgentId>, mb: &'a MindBase, } impl<'a> GSContext<'a> { pub fn symbolize(&mut self, symbolizable: &Rc<ast::GSymbolizable>, vivify: bool, query: &Query) -> Result<Symbol, MBQLError> { let node = self.symbolize_recurse(symbolizable, vivify, query)?; Ok(node.take_symbol()) } fn symbolize_recurse(&mut self, gsym: &Rc<ast::GSymbolizable>, vivify: bool, query: &Query) -> Result<GSNode, MBQLError> { let symbol = match &**gsym { ast::GSymbolizable::Artifact(a) => GSNode::artifact(self, vivify, query, a)?, ast::GSymbolizable::GroundPair(a) => GSNode::pair(self, vivify, query, a)?, ast::GSymbolizable::SymbolVar(sv) => GSNode::symbolvar(self, vivify, query, sv)?, ast::GSymbolizable::Ground(_) => { unreachable!() }, }; Ok(symbol) } fn find_matching_analogy_symbol(&self, left: &GSNode, right: &GSNode, query: &Query) -> Result<Option<Symbol>, MBError> { let left = left.symbol(); let right = right.symbol(); let comp_merged: Vec<SidedMergeItem<Atom>> = SidedMerge::new(left.atoms.iter(), right.atoms.iter()).map(|a| a.to_owned()) .collect(); let output_left: Vec<Atom> = Vec::new(); let output_right: Vec<Atom> = Vec::new(); let output_analogy: Vec<Atom> = Vec::new(); let iter = query.mb.allegation_iter().filter_map(|allegation| { match allegation { Ok((_, Allegation { body: Body::Analogy(analogy), agent_id, .. })) if self.gs_agents.contains(&agent_id) => { Some(Ok(analogy)) }, Ok(_) => None, Err(e) => Some(Err(e)), } }); for analogy in iter { let analogy = analogy?; let analogy_merged = SidedMerge::new(analogy.left.atoms.iter(), analogy.right.atoms.iter()); let si = SortedIntersect::new(analogy_merged, comp_merged.iter()); for item in si { match (item.left.side, item.right.side) { (Left, Left) => ll_hit = true, (Right, Right) => rr_hit = true, (Left, Right) => lr_hit = true, (Right, Left) => rl_hit = true, } } if (ll_hit && rr_hit) || (lr_hit && rl_hit) { output_analogy.push(analogy.id) } } return Ok(Symbol::new_option(output_analogy)); } } use std::{ cmp::Ordering, iter::Peekable, }; struct SidedMerge<L, R> where L: Iterator<Item = R::Item>, R: Iterator { left: Peekable<L>, right: Peekable<R>, } impl<L, R> SidedMerge<L, R> where L: Iterator<Item = R::Item>, R: Iterator { fn new(left: L, right: R) -> Self { SidedMerge { left: left.peekable(), right: right.peekable(), } } } pub struct SidedMergeItem<T> { pub item: T, side: ItemSide, } enum ItemSide { Left, Right, } impl<T: Clone> SidedMergeItem<&T> { pub fn to_owned(self) -> SidedMergeItem<T> { SidedMergeItem { item: self.item.clone(), side: self.side, } } } impl<L, R> Iterator for SidedMerge<L, R> where L: Iterator<Item = R::Item>, R: Iterator, L::Item: Ord { type Item = SidedMergeItem<L::Item>; fn next(&mut self) -> Option<Self::Item> { let which = match (self.left.peek(), self.right.peek()) { (Some(l), Some(r)) => Some(l.cmp(r)), (Some(_), None) => Some(Ordering::Less), (None, Some(_)) => Some(Ordering::Greater), (None, None) => None, }; match which { Some(Ordering::Less) => { Some(SidedMergeItem { item: self.left.next().unwrap(), side: ItemSide::Left, }) }, Some(Ordering::Equal) => { Some(SidedMergeItem { item: self.left.next().unwrap(), side: ItemSide::Left, }) }, Some(Ordering::Greater) => { Some(SidedMergeItem { item: self.right.next().unwrap(), side: ItemSide::Right, }) }, None => None, } } } struct SortedIntersect<L, R> where L: Iterator<Item = R::Item>, R: Iterator { left: Peekable<L>, right: Peekable<R>, } impl<L, R> SortedIntersect<L, R> where L: Iterator<Item = R::Item>, R: Iterator { fn new(left: L, right: R) -> Self { SortedIntersect { left: left.peekable(), right: right.peekable(), } } } impl<L, R> Iterator for SortedIntersect<L, R> where L: Iterator<Item = R::Item>, R: Iterator, L::Item: Ord { type Item = L::Item; fn next(&mut self) -> Option<Self::Item> { let mut left = match self.left.next() { None => return None, Some(i) => i, }; let mut right = match self.right.next() { None => return None, Some(i) => i, }; use std::cmp::Ordering::*; loop { match left.cmp(&right) { Less => { left = match self.left.next() { Some(x) => x, None => return None, }; }, Greater => { right = match self.right.next() { Some(x) => x, None => return None, }; }, Equal => return Some(left), } } } } fn analogy_compare(analogy: &Analogy, left: &Symbol, right: &Symbol, atoms: &mut Vec<Atom>) { unimplemented!() } fn intersect_symbols(symbol_a: &Symbol, symbol_b: &Symbol) -> bool { let mut a_iter = symbol_a.atoms.iter(); let mut b_iter = symbol_b.atoms.iter(); let mut a = match a_iter.next() { Some(v) => v, None => { return false; }, }; let mut b = match b_iter.next() { Some(v) => v, None => { return false; }, }; use std::cmp::Ordering::*; loop { match a.cmp(b) { Less => { a = match a_iter.next() { Some(x) => x, None => return false, }; }, Greater => { b = match b_iter.next() { Some(x) => x, None => return false, }; }, Equal => return true, } } }
use crate::{ allegation::{ Allegation, Body, }, mbql::{ ast, error::{ MBQLError, MBQLErrorKind, }, query::BindResult, Query, }, symbol::Atom, AgentId, AllegationId, Analogy, ArtifactId, MBError, MindBase, Symbol, }; use std::convert::TryInto; use std::rc::Rc; pub struct GSContext<'a> { scan_min: [u8; 64], scan_max: [u8; 64], gs_agents: Vec<AgentId>, mb: &'a MindBase, } impl<'a> GSContext<'a> { pub fn symbolize(&mut self, symbolizable: &Rc<ast::GSymbolizable>, vivify: bool, query: &Query) -> Result<Symbol, MBQLError> { let node = self.symbolize_recurse(symbolizable, vivify, query)?; Ok(node.take_symbol()) } fn symbolize_recurse(&mut self, gsym: &Rc<ast::GSymbolizable>, vivify: bool, query: &Query) -> Result<GSNode, MBQLError> { let symbol = match &**gsym { ast::GSymbolizable::Artifact(a) => GSNode::artifact(self, vivify, query, a)?, ast::GSymbolizable::GroundPair(a) => GSNode::pair(self, vivify, query, a)?, ast::GSymbolizable::SymbolVar(sv) => GSNode::symbolvar(self, vivify, query, sv)?, ast::GSymbolizable::Ground(_) => { unreachable!() }, }; Ok(symbol) } fn find_matching_analogy_symbol(&self, left: &GSNode, right: &GSNode, query: &Query) -> Result<Option<Symbol>, MBError> { let left = left.symbol(); let right = right.symbol(); let comp_merged: Vec<SidedMergeItem<Atom>> = SidedMerge::new(left.atoms.iter(), right.atoms.iter()).map(|a| a.to_owned()) .collect(); let output_left: Vec<Atom> = Vec::new(); let output_right: Vec<Atom> = Vec::new(); let output_analogy: Vec<Atom> = Vec::new(); let iter = query.mb.allegation_iter().filter_map(|allegation| { match allegation { Ok((_, Allegation { body: Body::Analogy(analogy), agent_id, .. })) if self.gs_agents.contains(&agent_id) => { Some(Ok(analogy)) }, Ok(_) => None, Err(e) => Some(Err(e)), } }); for analogy in iter { let analogy = analogy?; let analogy_merged = SidedMerge::new(analogy.left.atoms.iter(), analogy.right.atoms.iter()); let si = SortedIntersect::new(analogy_merged, comp_merged.iter()); for item in si { match (item.left.side, item.right.side) { (Left, Left) => ll_hit = true, (Right, Right) => rr_hit = true, (Left, Right) => lr_hit = true, (Right, Left) => rl_hit = true, } } if (ll_hit && rr_hit) || (lr_hit && rl_hit) { output_analogy.push(analogy.id) } } return Ok(Symbol::new_option(output_analogy)); } } use std::{ cmp::Ordering, iter::Peekable, }; struct SidedMerge<L, R> where L: Iterator<Item = R::Item>, R: Iterator { left: Peekable<L>, right: Peekable<R>, } impl<L, R> SidedMerge<L, R> where L: Iterator<Item = R::Item>, R: Iterator { fn new(left: L, right: R) -> Self { SidedMerge { left: left.peekable(), right: right.peekable(), } } } pub struct SidedMergeItem<T> { pub item: T, side: ItemSide, } enum ItemSide { Left, Right, } impl<T: Clone> SidedMergeItem<&T> { pub fn to_owned(self) -> SidedMergeItem<T> { SidedMergeItem { item: self.item.clone(), side: self.side, } } } impl<L, R> Iterator for SidedMerge<L, R> where L: Iterator<Item = R::Item>, R: Iterator, L::Item: Ord { type Item = SidedMergeItem<L::Item>; fn next(&mut self) -> Option<Self::Item> { let which = match (self.left.peek(), self.right.peek()) { (Some(l), Some(r)) => Some(l.cmp(r)), (Some(_), None) => Some(Ordering::Less), (None, Some(_)) => Some(Ordering::Greater), (None, None) => None, }; match which { Some(Ordering::Less) => { Some(SidedMergeItem { item: self.left.next().unwrap(), side: ItemSide::Left, }) }, Some(Ordering::Equal) => { Some(SidedMergeItem { item: self.left.next().unwrap(), side: ItemSide::Left, }) }, Some(Ordering::Greater) => { Some(SidedMergeItem { item: self.right.next().unwra
} struct SortedIntersect<L, R> where L: Iterator<Item = R::Item>, R: Iterator { left: Peekable<L>, right: Peekable<R>, } impl<L, R> SortedIntersect<L, R> where L: Iterator<Item = R::Item>, R: Iterator { fn new(left: L, right: R) -> Self { SortedIntersect { left: left.peekable(), right: right.peekable(), } } } impl<L, R> Iterator for SortedIntersect<L, R> where L: Iterator<Item = R::Item>, R: Iterator, L::Item: Ord { type Item = L::Item; fn next(&mut self) -> Option<Self::Item> { let mut left = match self.left.next() { None => return None, Some(i) => i, }; let mut right = match self.right.next() { None => return None, Some(i) => i, }; use std::cmp::Ordering::*; loop { match left.cmp(&right) { Less => { left = match self.left.next() { Some(x) => x, None => return None, }; }, Greater => { right = match self.right.next() { Some(x) => x, None => return None, }; }, Equal => return Some(left), } } } } fn analogy_compare(analogy: &Analogy, left: &Symbol, right: &Symbol, atoms: &mut Vec<Atom>) { unimplemented!() } fn intersect_symbols(symbol_a: &Symbol, symbol_b: &Symbol) -> bool { let mut a_iter = symbol_a.atoms.iter(); let mut b_iter = symbol_b.atoms.iter(); let mut a = match a_iter.next() { Some(v) => v, None => { return false; }, }; let mut b = match b_iter.next() { Some(v) => v, None => { return false; }, }; use std::cmp::Ordering::*; loop { match a.cmp(b) { Less => { a = match a_iter.next() { Some(x) => x, None => return false, }; }, Greater => { b = match b_iter.next() { Some(x) => x, None => return false, }; }, Equal => return true, } } }
p(), side: ItemSide::Right, }) }, None => None, } }
function_block-function_prefixed
[ { "content": "pub fn parse<T: std::io::BufRead>(reader: T, query: &mut super::Query) -> Result<(), MBQLError> {\n\n for (line_number, line) in reader.lines().enumerate() {\n\n let line_str: String = line.map_err(|error| {\n\n MBQLError { position: Position { row: ...
Rust
src/liberty.rs
marlls1989/liberty-parse
efd8bbe621f4a8a612910c47608c0665993a77a8
use std::{ collections::BTreeMap, fmt, ops::{Deref, DerefMut}, }; use crate::ast::{GroupItem, LibertyAst, Value}; #[derive(Debug, PartialEq, Clone)] pub struct Liberty(pub Vec<Library>); impl Liberty { pub fn to_ast(self) -> LibertyAst { LibertyAst( self.0 .into_iter() .map(|g| g.into_group().into_group_item()) .collect(), ) } pub fn from_ast(ast: LibertyAst) -> Self { Liberty( ast.0 .into_iter() .map(|g| Library::from_group(Group::from_group_item(g))) .collect(), ) } } impl Deref for Liberty { type Target = [Library]; fn deref(&self) -> &Self::Target { self.0.deref() } } impl DerefMut for Liberty { fn deref_mut(&mut self) -> &mut Self::Target { self.0.deref_mut() } } impl From<LibertyAst> for Liberty { fn from(ast: LibertyAst) -> Self { Liberty::from_ast(ast) } } impl fmt::Display for Liberty { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.clone().to_ast().fmt(f) } } impl IntoIterator for Liberty { type Item = Library; type IntoIter = ::std::vec::IntoIter<Self::Item>; fn into_iter(self) -> Self::IntoIter { self.0.into_iter() } } #[derive(Debug, PartialEq, Clone)] pub struct Library { pub name: String, pub simple_attributes: BTreeMap<String, Value>, pub complex_attributes: BTreeMap<String, Vec<Value>>, pub groups: Vec<Group>, pub cells: BTreeMap<String, Cell>, } impl Library { pub fn new(name: &str) -> Self { Self { name: name.to_string(), simple_attributes: BTreeMap::new(), complex_attributes: BTreeMap::new(), groups: vec![], cells: BTreeMap::new(), } } } #[derive(Debug, PartialEq, Clone)] pub struct Group { pub type_: String, pub name: String, pub simple_attributes: BTreeMap<String, Value>, pub complex_attributes: BTreeMap<String, Vec<Value>>, pub groups: Vec<Group>, } impl Group { pub fn new(type_: &str, name: &str) -> Self { Self { type_: type_.to_string(), name: name.to_string(), simple_attributes: BTreeMap::new(), complex_attributes: BTreeMap::new(), groups: vec![], } } pub fn from_group_item(group_item: GroupItem) -> Self { let (type_, name, items) = group_item.group(); let mut simple_attributes: BTreeMap<String, Value> = BTreeMap::new(); let mut complex_attributes: BTreeMap<String, Vec<Value>> = BTreeMap::new(); let mut groups: Vec<Self> = vec![]; for item in items { match item { GroupItem::SimpleAttr(name, value) => { simple_attributes.insert(name, value); } GroupItem::ComplexAttr(name, value) => { complex_attributes.insert(name, value); } GroupItem::Group(type_, name, items) => { groups.push(Group::from_group_item(GroupItem::Group(type_, name, items))); } _ => {} } } Self { name, type_, simple_attributes, complex_attributes, groups, } } pub fn into_group_item(self) -> GroupItem { let mut items: Vec<GroupItem> = Vec::with_capacity( self.simple_attributes.len() + self.complex_attributes.len() + self.groups.len(), ); items.extend( self.simple_attributes .into_iter() .map(|(name, value)| GroupItem::SimpleAttr(name, value)), ); items.extend( self.complex_attributes .into_iter() .map(|(name, value)| GroupItem::ComplexAttr(name, value)), ); items.extend(self.groups.into_iter().map(|g| g.into_group_item())); GroupItem::Group(self.type_, self.name, items) } } #[derive(Debug, PartialEq, Clone)] pub struct Cell { pub name: String, pub simple_attributes: BTreeMap<String, Value>, pub complex_attributes: BTreeMap<String, Vec<Value>>, pub groups: Vec<Group>, pub pins: BTreeMap<String, Pin>, } impl Cell { pub fn new(name: &str) -> Self { Self { name: name.to_string(), simple_attributes: BTreeMap::new(), complex_attributes: BTreeMap::new(), groups: vec![], pins: BTreeMap::new(), } } } #[derive(Debug, PartialEq, Clone)] pub struct Pin { pub name: String, pub simple_attributes: BTreeMap<String, Value>, pub complex_attributes: BTreeMap<String, Vec<Value>>, pub groups: Vec<Group>, } impl Pin { pub fn new(name: &str) -> Self { Self { name: name.to_string(), simple_attributes: BTreeMap::new(), complex_attributes: BTreeMap::new(), groups: vec![], } } } pub trait FromGroup { type Item; fn from_group(group: Group) -> Self::Item; } pub trait ToGroup { type Item; fn into_group(self) -> Group; } impl FromGroup for Library { type Item = Library; fn from_group(group: Group) -> Self::Item { let (cells, groups) = group.groups.into_iter().partition(|g| g.type_ == "cell"); Self { name: group.name, simple_attributes: group.simple_attributes, complex_attributes: group.complex_attributes, groups, cells: cells.into_iter().fold(BTreeMap::new(), |mut acc, cell| { acc.insert(cell.name.clone(), Cell::from_group(cell)); acc }), } } } impl ToGroup for Library { type Item = Library; fn into_group(self) -> Group { let mut groups: Vec<Group> = Vec::with_capacity(self.groups.len() + self.cells.len()); groups.extend(self.groups); groups.extend(self.cells.into_iter().map(|(_, cell)| cell.into_group())); Group { name: self.name, type_: String::from("library"), simple_attributes: self.simple_attributes, complex_attributes: self.complex_attributes, groups, } } } impl FromGroup for Cell { type Item = Cell; fn from_group(group: Group) -> Self::Item { let (pins, groups) = group.groups.into_iter().partition(|g| g.type_ == "pin"); Self { name: group.name, simple_attributes: group.simple_attributes, complex_attributes: group.complex_attributes, groups, pins: pins.into_iter().fold(BTreeMap::new(), |mut acc, pin| { acc.insert(pin.name.clone(), Pin::from_group(pin)); acc }), } } } impl ToGroup for Cell { type Item = Cell; fn into_group(self) -> Group { let mut groups: Vec<Group> = Vec::with_capacity(self.groups.len() + self.pins.len()); groups.extend(self.pins.into_iter().map(|(_, pin)| pin.into_group())); groups.extend(self.groups); Group { name: self.name, type_: String::from("cell"), simple_attributes: self.simple_attributes, complex_attributes: self.complex_attributes, groups, } } } impl FromGroup for Pin { type Item = Pin; fn from_group(group: Group) -> Self::Item { Self { name: group.name, simple_attributes: group.simple_attributes, complex_attributes: group.complex_attributes, groups: group.groups, } } } impl ToGroup for Pin { type Item = Pin; fn into_group(self) -> Group { Group { name: self.name, type_: String::from("pin"), simple_attributes: self.simple_attributes, complex_attributes: self.complex_attributes, groups: self.groups, } } } #[cfg(test)] mod test { use super::*; #[test] fn test_iter() { let lib = Liberty(vec![Library::new("mylib")]); let mut iter = lib.into_iter(); assert_eq!(iter.next(), Some(Library::new("mylib"))); assert_eq!(iter.next(), None); } #[test] fn test_pin_into_group() { let mut pin = Pin::new("my_pin"); pin.groups.push(Group::new("gtype", "gname")); let group = pin.into_group(); assert_eq!(group.type_, "pin"); assert_eq!(group.name, "my_pin"); assert_eq!(group.groups.len(), 1); } #[test] fn test_pin_from_group() { let mut group = Group::new("pin", "a"); group.groups.push(Group::new("gtype", "gname")); let pin = Pin::from_group(group); assert_eq!(pin.name, "a"); assert_eq!(pin.groups.len(), 1); } #[test] fn test_cell_into_group() { let mut cell = Cell::new("my_cell"); cell.groups.push(Group::new("gtype", "gname")); cell.pins.insert("a".to_string(), Pin::new("a")); cell.pins.insert("b".to_string(), Pin::new("b")); let group = cell.into_group(); assert_eq!(group.type_, "cell"); assert_eq!(group.name, "my_cell"); assert_eq!(group.groups.len(), 3); } #[test] fn test_cell_from_group() { let mut group = Group::new("cell", "AND2"); group.groups.push(Group::new("gtype", "gname")); group.groups.push(Group::new("pin", "a")); group.groups.push(Group::new("pin", "b")); let cell = Cell::from_group(group); assert_eq!(cell.name, "AND2"); assert_eq!(cell.groups.len(), 1); assert_eq!(cell.pins.len(), 2); } #[test] fn test_library_into_group() { let mut lib = Library::new("my_lib"); lib.groups.push(Group::new("gtype", "gname")); lib.cells.insert("AND2".to_string(), Cell::new("AND2")); lib.cells.insert("NAND2".to_string(), Cell::new("NAND2")); let group = lib.into_group(); assert_eq!(group.type_, "library"); assert_eq!(group.name, "my_lib"); assert_eq!(group.groups.len(), 3); } #[test] fn test_lib_from_group() { let mut group = Group::new("library", "mylib"); group.groups.push(Group::new("gtype", "gname")); let mut cell = Group::new("cell", "AND2"); cell.groups.push(Group::new("pin", "a")); cell.groups.push(Group::new("pin", "b")); group.groups.push(cell); let lib = Library::from_group(group); assert_eq!(lib.name, "mylib"); assert_eq!(lib.groups.len(), 1); assert_eq!(lib.cells.len(), 1); let converted_cell = lib.cells.get("AND2").unwrap(); assert_eq!(converted_cell.name, "AND2"); assert_eq!(converted_cell.groups.len(), 0); assert_eq!(converted_cell.pins.len(), 2); } }
use std::{ collections::BTreeMap, fmt, ops::{Deref, DerefMut}, }; use crate::ast::{GroupItem, LibertyAst, Value}; #[derive(Debug, PartialEq, Clone)] pub struct Liberty(pub Vec<Library>); impl Liberty { pub fn to_ast(self) -> LibertyAst { LibertyAst( self.0 .into_iter() .map(|g| g.into_group().into_group_item()) .collect(), ) } pub fn from_ast(ast: LibertyAst) -> Self { Liberty( ast.0 .into_iter() .map(|g| Library::from_group(Group::from_group_item(g))) .collect(), ) } } impl Deref for Liberty { type Target = [Library]; fn deref(&self) -> &Self::Target { self.0.deref() } } impl DerefMut for Liberty { fn deref_mut(&mut self) -> &mut Self::Target { self.0.deref_mut() } } impl From<LibertyAst> for Liberty { fn from(ast: LibertyAst) -> Self { Liberty::from_ast(ast) } } impl fmt::Display for Liberty { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.clone().to_ast().fmt(f) } } impl IntoIterator for Liberty { type Item = Library; type IntoIter = ::std::vec::IntoIter<Self::Item>; fn into_iter(self) -> Self::IntoIter { self.0.into_iter() } } #[derive(Debug, PartialEq, Clone)] pub struct Library { pub name: String, pub simple_attributes: BTreeMap<String, Value>, pub complex_attributes: BTreeMap<String, Vec<Value>>, pub groups: Vec<Group>, pub cells: BTreeMap<String, Cell>, } impl Library { pub fn new(name: &str) -> Self { Self { name: name.to_string(), simple_attributes: BTreeMap::new(), complex_attributes: BTreeMap::new(), groups: vec![], cells: BTreeMap::new(), } } } #[derive(Debug, PartialEq, Clone)] pub struct Group { pub type_: String, pub name: String, pub simple_attributes: BTreeMap<String, Value>, pub complex_attributes: BTreeMap<String, Vec<Value>>, pub groups: Vec<Group>, } impl Group { pub fn new(type_: &str, name: &str) -> Self { Self { type_: type_.to_string(), name: name.to_string(), simple_attributes: BTreeMap::new(), complex_attributes: BTreeMap::new(), groups: vec![], } } pub fn from_group_item(group_item: GroupItem) -> Self { let (type_, name, items) = group_item.group(); let mut simple_attributes: BTreeMap<String, Value> = BTreeMap::new(); let mut complex_attributes: BTreeMap<String, Vec<Value>> = BTreeMap::new(); let mut groups: Vec<Self> = vec![]; for item in items {
} Self { name, type_, simple_attributes, complex_attributes, groups, } } pub fn into_group_item(self) -> GroupItem { let mut items: Vec<GroupItem> = Vec::with_capacity( self.simple_attributes.len() + self.complex_attributes.len() + self.groups.len(), ); items.extend( self.simple_attributes .into_iter() .map(|(name, value)| GroupItem::SimpleAttr(name, value)), ); items.extend( self.complex_attributes .into_iter() .map(|(name, value)| GroupItem::ComplexAttr(name, value)), ); items.extend(self.groups.into_iter().map(|g| g.into_group_item())); GroupItem::Group(self.type_, self.name, items) } } #[derive(Debug, PartialEq, Clone)] pub struct Cell { pub name: String, pub simple_attributes: BTreeMap<String, Value>, pub complex_attributes: BTreeMap<String, Vec<Value>>, pub groups: Vec<Group>, pub pins: BTreeMap<String, Pin>, } impl Cell { pub fn new(name: &str) -> Self { Self { name: name.to_string(), simple_attributes: BTreeMap::new(), complex_attributes: BTreeMap::new(), groups: vec![], pins: BTreeMap::new(), } } } #[derive(Debug, PartialEq, Clone)] pub struct Pin { pub name: String, pub simple_attributes: BTreeMap<String, Value>, pub complex_attributes: BTreeMap<String, Vec<Value>>, pub groups: Vec<Group>, } impl Pin { pub fn new(name: &str) -> Self { Self { name: name.to_string(), simple_attributes: BTreeMap::new(), complex_attributes: BTreeMap::new(), groups: vec![], } } } pub trait FromGroup { type Item; fn from_group(group: Group) -> Self::Item; } pub trait ToGroup { type Item; fn into_group(self) -> Group; } impl FromGroup for Library { type Item = Library; fn from_group(group: Group) -> Self::Item { let (cells, groups) = group.groups.into_iter().partition(|g| g.type_ == "cell"); Self { name: group.name, simple_attributes: group.simple_attributes, complex_attributes: group.complex_attributes, groups, cells: cells.into_iter().fold(BTreeMap::new(), |mut acc, cell| { acc.insert(cell.name.clone(), Cell::from_group(cell)); acc }), } } } impl ToGroup for Library { type Item = Library; fn into_group(self) -> Group { let mut groups: Vec<Group> = Vec::with_capacity(self.groups.len() + self.cells.len()); groups.extend(self.groups); groups.extend(self.cells.into_iter().map(|(_, cell)| cell.into_group())); Group { name: self.name, type_: String::from("library"), simple_attributes: self.simple_attributes, complex_attributes: self.complex_attributes, groups, } } } impl FromGroup for Cell { type Item = Cell; fn from_group(group: Group) -> Self::Item { let (pins, groups) = group.groups.into_iter().partition(|g| g.type_ == "pin"); Self { name: group.name, simple_attributes: group.simple_attributes, complex_attributes: group.complex_attributes, groups, pins: pins.into_iter().fold(BTreeMap::new(), |mut acc, pin| { acc.insert(pin.name.clone(), Pin::from_group(pin)); acc }), } } } impl ToGroup for Cell { type Item = Cell; fn into_group(self) -> Group { let mut groups: Vec<Group> = Vec::with_capacity(self.groups.len() + self.pins.len()); groups.extend(self.pins.into_iter().map(|(_, pin)| pin.into_group())); groups.extend(self.groups); Group { name: self.name, type_: String::from("cell"), simple_attributes: self.simple_attributes, complex_attributes: self.complex_attributes, groups, } } } impl FromGroup for Pin { type Item = Pin; fn from_group(group: Group) -> Self::Item { Self { name: group.name, simple_attributes: group.simple_attributes, complex_attributes: group.complex_attributes, groups: group.groups, } } } impl ToGroup for Pin { type Item = Pin; fn into_group(self) -> Group { Group { name: self.name, type_: String::from("pin"), simple_attributes: self.simple_attributes, complex_attributes: self.complex_attributes, groups: self.groups, } } } #[cfg(test)] mod test { use super::*; #[test] fn test_iter() { let lib = Liberty(vec![Library::new("mylib")]); let mut iter = lib.into_iter(); assert_eq!(iter.next(), Some(Library::new("mylib"))); assert_eq!(iter.next(), None); } #[test] fn test_pin_into_group() { let mut pin = Pin::new("my_pin"); pin.groups.push(Group::new("gtype", "gname")); let group = pin.into_group(); assert_eq!(group.type_, "pin"); assert_eq!(group.name, "my_pin"); assert_eq!(group.groups.len(), 1); } #[test] fn test_pin_from_group() { let mut group = Group::new("pin", "a"); group.groups.push(Group::new("gtype", "gname")); let pin = Pin::from_group(group); assert_eq!(pin.name, "a"); assert_eq!(pin.groups.len(), 1); } #[test] fn test_cell_into_group() { let mut cell = Cell::new("my_cell"); cell.groups.push(Group::new("gtype", "gname")); cell.pins.insert("a".to_string(), Pin::new("a")); cell.pins.insert("b".to_string(), Pin::new("b")); let group = cell.into_group(); assert_eq!(group.type_, "cell"); assert_eq!(group.name, "my_cell"); assert_eq!(group.groups.len(), 3); } #[test] fn test_cell_from_group() { let mut group = Group::new("cell", "AND2"); group.groups.push(Group::new("gtype", "gname")); group.groups.push(Group::new("pin", "a")); group.groups.push(Group::new("pin", "b")); let cell = Cell::from_group(group); assert_eq!(cell.name, "AND2"); assert_eq!(cell.groups.len(), 1); assert_eq!(cell.pins.len(), 2); } #[test] fn test_library_into_group() { let mut lib = Library::new("my_lib"); lib.groups.push(Group::new("gtype", "gname")); lib.cells.insert("AND2".to_string(), Cell::new("AND2")); lib.cells.insert("NAND2".to_string(), Cell::new("NAND2")); let group = lib.into_group(); assert_eq!(group.type_, "library"); assert_eq!(group.name, "my_lib"); assert_eq!(group.groups.len(), 3); } #[test] fn test_lib_from_group() { let mut group = Group::new("library", "mylib"); group.groups.push(Group::new("gtype", "gname")); let mut cell = Group::new("cell", "AND2"); cell.groups.push(Group::new("pin", "a")); cell.groups.push(Group::new("pin", "b")); group.groups.push(cell); let lib = Library::from_group(group); assert_eq!(lib.name, "mylib"); assert_eq!(lib.groups.len(), 1); assert_eq!(lib.cells.len(), 1); let converted_cell = lib.cells.get("AND2").unwrap(); assert_eq!(converted_cell.name, "AND2"); assert_eq!(converted_cell.groups.len(), 0); assert_eq!(converted_cell.pins.len(), 2); } }
match item { GroupItem::SimpleAttr(name, value) => { simple_attributes.insert(name, value); } GroupItem::ComplexAttr(name, value) => { complex_attributes.insert(name, value); } GroupItem::Group(type_, name, items) => { groups.push(Group::from_group_item(GroupItem::Group(type_, name, items))); } _ => {} }
if_condition
[ { "content": "pub fn parse_libs<'a, E: ParseError<&'a str>>(input: &'a str) -> IResult<&str, Vec<GroupItem>, E> {\n\n context(\n\n \"parse_libs\",\n\n all_consuming(terminated(\n\n fold_many0(\n\n alt((\n\n context(\n\n \"outer...
Rust
src/libpcp/term/constant.rs
ptal/pcp
5775dd8523a35ff521daf3cfa709794829d75955
use term::ops::*; use model::*; use kernel::*; use propagation::events::*; use gcollections::ops::*; use gcollections::*; use std::fmt::Debug; #[derive(Clone, Debug)] pub struct Constant<V> { value: V } impl<V> Constant<V> { pub fn new(value: V) -> Constant<V> { Constant { value: value } } } impl<V> DisplayStateful<Model> for Constant<V> where V: Debug { fn display(&self, _model: &Model) { print!("{:?}", self.value); } } impl<V, Domain, VStore> StoreMonotonicUpdate<VStore> for Constant<V> where VStore: Collection<Item=Domain>, Domain: Collection<Item=V> + Cardinality + Contains { fn update(&mut self, _store: &mut VStore, value: VStore::Item) -> bool { !value.is_empty() && value.contains(&self.value) } } impl<V, Domain, VStore> StoreRead<VStore> for Constant<V> where VStore: Collection<Item=Domain>, Domain: Collection<Item=V> + Singleton, V: Clone { fn read(&self, _store: &VStore) -> Domain { Domain::singleton(self.value.clone()) } } impl<V> ViewDependencies<FDEvent> for Constant<V> { fn dependencies(&self, _event: FDEvent) -> Vec<(usize, FDEvent)> { vec![] } } #[cfg(test)] mod test { use super::*; use trilean::SKleene; use trilean::SKleene::*; use propagation::*; use propagation::events::FDEvent; use propagation::events::FDEvent::*; use concept::*; use variable::VStoreFD; use propagators::test::*; use propagators::cmp::*; use interval::interval::*; type VStore = VStoreFD; #[test] fn x_less_constant() { let dom0_10 = (0,10).to_interval(); let dom0_4 = (0,4).to_interval(); let mut store = VStore::empty(); let x = Box::new(store.alloc(dom0_10)) as Var<VStore>; let c = Box::new(Constant::new(5 as i32)) as Var<VStore>; let x_less_c = XLessY::new(x.bclone(), c); test_propagation(1, x_less_c, &mut store, Unknown, True, vec![(0, Bound)], true); assert_eq!(x.read(&store), dom0_4); } #[test] fn unary_propagator_test() { let dom0_10 = (0,10).to_interval(); let dom0_0 = (0,0).to_interval(); unary_propagator_test_one(1, dom0_10, 0, XLessY::new, False, False, vec![], false); unary_propagator_test_one(2, dom0_10, 11, XLessY::new, True, True, vec![], true); unary_propagator_test_one(3, dom0_10, 10, XLessY::new, Unknown, True, vec![(0, Bound)], true); unary_propagator_test_one(4, dom0_10, -1, x_leq_y, False, False, vec![], false); unary_propagator_test_one(5, dom0_10, 10, x_leq_y, True, True, vec![], true); unary_propagator_test_one(6, dom0_10, 9, x_leq_y, Unknown, True, vec![(0, Bound)], true); unary_propagator_test_one(7, dom0_10, 10, x_greater_y, False, False, vec![], false); unary_propagator_test_one(8, dom0_10, -1, x_greater_y, True, True, vec![], true); unary_propagator_test_one(9, dom0_10, 0, x_greater_y, Unknown, True, vec![(0, Bound)], true); unary_propagator_test_one(10, dom0_10, 11, x_geq_y, False, False, vec![], false); unary_propagator_test_one(11, dom0_10, 0, x_geq_y, True, True, vec![], true); unary_propagator_test_one(12, dom0_10, 1, x_geq_y, Unknown, True, vec![(0, Bound)], true); unary_propagator_test_one(13, dom0_0, 0, XNeqY::new, False, False, vec![], false); unary_propagator_test_one(14, dom0_10, 5, XNeqY::new, Unknown, Unknown, vec![], true); unary_propagator_test_one(15, dom0_10, 0, XNeqY::new, Unknown, True, vec![(0, Bound)], true); unary_propagator_test_one(16, dom0_10, 10, XNeqY::new, Unknown, True, vec![(0, Bound)], true); } fn unary_propagator_test_one<P, R>(id: u32, x: Interval<i32>, c: i32, make_prop: P, before: SKleene, after: SKleene, expected: Vec<(usize, FDEvent)>, propagate_success: bool) where P: FnOnce(FDVar, FDVar) -> R, R: PropagatorConcept<VStoreFD, FDEvent> { let mut store = VStore::empty(); let x = Box::new(store.alloc(x)) as Var<VStore>; let propagator = make_prop(x, Box::new(Constant::new(c)) as Var<VStore>); test_propagation(id, propagator, &mut store, before, after, expected, propagate_success); } }
use term::ops::*; use model::*; use kernel::*; use propagation::events::*; use gcollections::ops::*; use gcollections::*; use std::fmt::Debug; #[derive(Clone, Debug)] pub struct Constant<V> { value: V } impl<V> Constant<V> { pub fn new(value: V) -> Constant<V> { Constant { value: value } } } impl<V> DisplayStateful<Model> for Constant<V> where V: Debug { fn display(&self, _model: &Model) { print!("{:?}", self.value); } } impl<V, Domain, VStore> StoreMonotonicUpdate<VStore> for Constant<V> where VStore: Collection<Item=Domain>, Domain: Collection<Item=V> + Cardinality + Contains { fn update(&mut self, _store: &mut VStore, value: VStore::Item) -> bool { !value.is_empty() && value.contains(&self.value) } } impl<V, Domain, VStore> StoreRead<VStore> for Constant<V> where VStore: Collection<Item=Domain>, Domain: Collection<Item=V> + Singleton, V: Clone { fn read(&self, _store: &VStore) -> Domain { Domain::singleton(self.value.clone()) } } impl<V> ViewDependencies<FDEvent> for Constant<V> { fn dependencies(&self, _event: FDEvent) -> Vec<(usize, FDEvent)> { vec![] } } #[cfg(test)] mod test { use super::*; use trilean::SKleene; use trilean::SKleene::*; use propagation::*; use propagation::events::FDEvent; use propagation::events::FDEvent::*; use concept::*; use variable::VStoreFD; use propagators::test::*; use propagators::cmp::*; use interval::interval::*; type VStore = VStoreFD; #[test] fn x_less_constant() { let dom0_10 = (0,10).to_interval(); let dom0_4 = (0,4).to_interval(); let mut store = VStore::empty(); let x = Box::new(store.alloc(dom0_10)) as Var<VStore>; let c = Box::new(Constant::new(5 as i32)) as Var<VStore>; let x_less_c = XLessY::new(x.bclone(), c); test_propagation(1, x_less_c, &mut store, Unknown, True, vec![(0, Bound)], true); assert_eq!(x.read(&store), dom0_4); } #[test] fn unary_propagator_test() { let dom0_10 = (0,10).to_interval(); let dom0_0 = (0,0).to_interval(); unary_propagator_test_one(1, dom0_10, 0, XLessY::new, False, False, vec![], false); unary_propagator_test_one(2, dom0_10, 11, XLessY::new, True, True, vec![], true); unary_propagator_test_one(3, dom0_10, 10, XLessY::new, Unknown, True, vec![(0, Bound)], true); unary_propagator_test_one(4, dom0_10, -1, x_leq_y, False, False, vec![], false); unary_propagator_test_one(5, dom0_10, 10, x_leq_y, True, True, vec![], true); unary_propagator_test_one(6, dom0_10, 9, x_leq_y, Unknown, True, vec![(0, Bound)], true); unary_propagator_test_one(7, dom0_10, 10, x_greater_y, False, False, vec![], false); unary_propagator_test_one(8, dom0_10, -1, x_greater_y, True, True, vec![], true); unary_propagator_test_one(9, dom0_10, 0, x_greater_y, Unknown, True, vec![(0, Bound)], true); unary_propagator_test_one(10, dom0_10, 11, x_geq_y, False, False, vec![], false); unary_propagator_test_one(11, dom0_10, 0, x_geq_y, True, True, vec![], true); unary_propagator_test_one(12, dom0_10, 1, x_geq_y, Unknown, True, vec![(0, Bound)], true); unary_propagator_test_one(13, dom0_0, 0, XNeqY::new, False, False, vec![], false); unary_propagator_test_one(14, dom0_10, 5, XNeqY::new, Unknown, Unknown, vec![], true); unary_propagator_test_one(15, dom0_10, 0, XNeqY::new, Unknown, True, vec![(0, Bound)], true); unary_propagator_test_one(16, dom0_10, 10, XNeqY::new, Unknown, True, vec![(0, Bound)], true); }
}
fn unary_propagator_test_one<P, R>(id: u32, x: Interval<i32>, c: i32, make_prop: P, before: SKleene, after: SKleene, expected: Vec<(usize, FDEvent)>, propagate_success: bool) where P: FnOnce(FDVar, FDVar) -> R, R: PropagatorConcept<VStoreFD, FDEvent> { let mut store = VStore::empty(); let x = Box::new(store.alloc(x)) as Var<VStore>; let propagator = make_prop(x, Box::new(Constant::new(c)) as Var<VStore>); test_propagation(id, propagator, &mut store, before, after, expected, propagate_success); }
function_block-full_function
[ { "content": "pub fn x_leq_y<VStore, Domain, Bound>(x: Var<VStore>, y: Var<VStore>) -> XLessEqY<VStore> where\n\n VStore: VStoreConcept<Item=Domain> + 'static,\n\n Domain: Collection<Item=Bound> + IntDomain,\n\n Bound: IntBound\n\n{\n\n XLessY::new(x, Box::new(Addition::new(y, Bound::one())))\n\n}\n\n\n", ...
Rust
src/state.rs
macfadyen/sailfish
44752a6769a2a7566a90dd9c8df21d4e2c49d720
use crate::cmdline::CommandLine; use crate::error; use crate::{Mesh, Patch, PointMass, Setup}; use std::fs::{create_dir_all, File}; use std::io::prelude::*; use std::io::Write; #[derive(Debug, Clone, Copy)] pub enum Recurrence { Linear(f64), Log(f64), } #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub struct RecurringTask { pub number: u64, pub last_time: Option<f64>, } impl Default for RecurringTask { fn default() -> Self { Self::new() } } impl RecurringTask { pub fn new() -> Self { Self { number: 0, last_time: None, } } pub fn next(&mut self, current_time: f64, recurrence: Recurrence) { self.last_time = Some(self.next_time(current_time, recurrence)); self.number += 1; } pub fn next_time(&self, current_time: f64, recurrence: Recurrence) -> f64 { if let Some(last_time) = self.last_time { match recurrence { Recurrence::Linear(interval) => last_time + interval, Recurrence::Log(multiplier) => last_time * (1.0 + multiplier), } } else { current_time } } pub fn is_due(&self, current_time: f64, recurrence: Recurrence) -> bool { current_time >= self.next_time(current_time, recurrence) } } #[derive(Clone, serde::Serialize, serde::Deserialize)] pub struct State { pub command_line: CommandLine, pub restart_file: Option<String>, pub mesh: Mesh, pub setup_name: String, pub parameters: String, pub primitive: Vec<f64>, pub primitive_patches: Vec<Patch>, pub time: f64, pub iteration: u64, pub checkpoint: RecurringTask, #[serde(default)] pub time_series: RecurringTask, #[serde(default)] pub masses: Vec<PointMass>, #[serde(default)] pub time_series_data: Vec<Vec<f64>>, #[serde(default)] pub version: String, } impl State { pub fn from_checkpoint( filename: &str, new_parameters: &str, command_line: &CommandLine, ) -> Result<State, error::Error> { println!("read {}", filename); let mut f = File::open(filename).map_err(error::Error::IOError)?; let mut bytes = Vec::new(); f.read_to_end(&mut bytes).map_err(error::Error::IOError)?; let mut state: State = rmp_serde::from_read_ref(&bytes) .map_err(|e| error::Error::InvalidCheckpoint(format!("{}", e)))?; if !state.parameters.is_empty() && !new_parameters.is_empty() { state.parameters += ":"; } state.parameters += new_parameters; state.restart_file = Some(filename.to_string()); state.command_line.update(&command_line)?; state.version = crate::sailfish_version(); Ok(state) } pub fn set_primitive(&mut self, primitive: Vec<f64>) { assert!( primitive.len() == self.primitive.len(), "new and old primitive array sizes must match" ); self.primitive = primitive; } pub fn write_checkpoint( &mut self, setup: &dyn Setup, outdir: &str, ) -> Result<(), error::Error> { let filename = format!("{}/chkpt.{:04}.sf", outdir, self.checkpoint.number); println!("write {}", filename); self.masses = setup.masses(self.time).to_vec(); self.parameters = setup.model_parameter_string(); self.checkpoint .next(self.time, self.command_line.checkpoint_rule(setup)); create_dir_all(outdir).map_err(error::Error::IOError)?; let bytes = rmp_serde::to_vec_named(self).unwrap(); let mut file = File::create(&filename).map_err(error::Error::IOError)?; file.write_all(&bytes).map_err(error::Error::IOError)?; Ok(()) } pub fn upsample(mut self) -> Self { println!("upsample grid resolution"); let mut mesh = match self.mesh { Mesh::Structured(ref mut mesh) => mesh, _ => panic!("can only upsample structured mesh"), }; for patch in &mut self.primitive_patches { patch.upsample_mut() } mesh.ni *= 2; mesh.nj *= 2; mesh.dx *= 0.5; mesh.dy *= 0.5; self } }
use crate::cmdline::CommandLine; use crate::error; use crate::{Mesh, Patch, PointMass, Setup}; use std::fs::{create_dir_all, File}; use std::io::prelude::*; use std::io::Write; #[derive(Debug, Clone, Copy)] pub enum Recurrence { Linear(f64), Log(f64), } #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub struct RecurringTask { pub number: u64, pub last_time: Option<f64>, } impl Default for RecurringTask { fn default() -> Self { Self::new() } } impl RecurringTask { pub fn new() -> Self { Self { number: 0, last_time: None, } } pub fn next(&mut self, current_time: f64, recurrence: Recurrence) { self.last_time = Some(self.next_time(current_time, recurrence)); self.number += 1; } pub fn next_time(&self, current_time: f64, recurrence: Recurrence) -> f64 { if let Some(last_time) = self.last_time { match recurrence { Recurrence::Linear(interval) => last_time + interval, Recurrence::Log(multiplier) => last_time * (1.0 + multiplier), } } else { current_time } } pub fn is_due(&self, current_time: f64, recurrence: Recurrence) -> bool { current_time >= self.next_time(current_time, recurrence) } } #[derive(Clone, serde::Serialize, serde::Deserialize)] pub struct State { pub command_line: CommandLine, pub restart_file: Option<String>, pub mesh: Mesh, pub setup_name: String, pub parameters: String, pub primitive: Vec<f64>, pub primitive_patches: Vec<Patch>, pub time: f64, pub iteration: u64, pub checkpoint: RecurringTask, #[serde(default)] pub time_series: RecurringTask, #[serde(default)] pub masses: Vec<PointMass>, #[serde(default)] pub time_series_data: Vec<Vec<f64>>, #[serde(default)] pub version: String, } impl State { pub fn from_checkpoint( filename: &str, new_parameters: &str, command_line: &CommandLine, ) -> Result<State, error::Error> { println!("read {}", filename); let mut f = File::open(filename).map_err(error::Error::IOError)?; let mut bytes = Vec::new(); f.read_to_end(&mut bytes).map_err(error::Error::IOErr
pub fn set_primitive(&mut self, primitive: Vec<f64>) { assert!( primitive.len() == self.primitive.len(), "new and old primitive array sizes must match" ); self.primitive = primitive; } pub fn write_checkpoint( &mut self, setup: &dyn Setup, outdir: &str, ) -> Result<(), error::Error> { let filename = format!("{}/chkpt.{:04}.sf", outdir, self.checkpoint.number); println!("write {}", filename); self.masses = setup.masses(self.time).to_vec(); self.parameters = setup.model_parameter_string(); self.checkpoint .next(self.time, self.command_line.checkpoint_rule(setup)); create_dir_all(outdir).map_err(error::Error::IOError)?; let bytes = rmp_serde::to_vec_named(self).unwrap(); let mut file = File::create(&filename).map_err(error::Error::IOError)?; file.write_all(&bytes).map_err(error::Error::IOError)?; Ok(()) } pub fn upsample(mut self) -> Self { println!("upsample grid resolution"); let mut mesh = match self.mesh { Mesh::Structured(ref mut mesh) => mesh, _ => panic!("can only upsample structured mesh"), }; for patch in &mut self.primitive_patches { patch.upsample_mut() } mesh.ni *= 2; mesh.nj *= 2; mesh.dx *= 0.5; mesh.dy *= 0.5; self } }
or)?; let mut state: State = rmp_serde::from_read_ref(&bytes) .map_err(|e| error::Error::InvalidCheckpoint(format!("{}", e)))?; if !state.parameters.is_empty() && !new_parameters.is_empty() { state.parameters += ":"; } state.parameters += new_parameters; state.restart_file = Some(filename.to_string()); state.command_line.update(&command_line)?; state.version = crate::sailfish_version(); Ok(state) }
function_block-function_prefixed
[ { "content": "/// Tries to construct a dynamic setup from a string key and model parameter\n\n/// string.\n\n///\n\n/// The result is put under `Arc` so it can be attached to solver instances\n\n/// and shared safely between threads. If no setup matches the given name, a\n\n/// `PrintUserInformation` error is r...
Rust
tests/mixins.rs
redzic/grass
37c1ada66418fdbb87968ede91efb9be83a80afa
#![cfg(test)] #[macro_use] mod macros; test!( basic_mixin, "@mixin a {\n color: red;\n}\n\nb {\n @include a;\n}\n", "b {\n color: red;\n}\n" ); test!(empty_mixin, "@mixin a {}\n\nb {\n @include a;\n}\n", ""); test!( just_a_comment, "@mixin foo() {\n /* begin foo */\n}\n\na {\n @include foo();\n}\n", "a {\n /* begin foo */\n}\n" ); test!( mixin_two_styles, "@mixin a {\n color: red;\n color: blue;\n}\n\nb {\n @include a;\n}\n", "b {\n color: red;\n color: blue;\n}\n" ); test!( mixin_ruleset, "@mixin a {\n b {\n color: red;\n }\n}\nb {\n @include a;\n}\n", "b b {\n color: red;\n}\n" ); test!( mixin_two_rulesets, "@mixin a {\n b {\n color: red;\n }\n c {\n color: blue;\n }\n}\nd {\n @include a;\n}\n", "d b {\n color: red;\n}\nd c {\n color: blue;\n}\n" ); test!( mixin_ruleset_and_style, "@mixin a {\n b {\n color: red;\n }\n color: blue;\n}\nd {\n @include a;\n}\n", "d {\n color: blue;\n}\nd b {\n color: red;\n}\n" ); test!( mixin_style_and_ruleset, "@mixin a {\n color: blue;\n b {\n color: red;\n}\n}\nd {\n @include a;\n}\n", "d {\n color: blue;\n}\nd b {\n color: red;\n}\n" ); test!( mixin_nested_rulesets, "@mixin a {\n b {\n c {\n color: red;\n}\n}\n}\nd {\n @include a;\n}\n", "d b c {\n color: red;\n}\n" ); test!( mixin_removes_empty_ruleset, "@mixin a {\n color: red; b {\n}\n}\nd {\n @include a;\n}\n", "d {\n color: red;\n}\n" ); test!( mixin_variable_scope_one_ruleset, "@mixin a {\n $a: blue;\nb {\n $a: red;\n} color: $a\n}\nd {\n @include a;\n}\n", "d {\n color: red;\n}\n" ); test!( mixin_no_args, "@mixin a {\n color: red;\n}\nd {\n @include a();\n}\n", "d {\n color: red;\n}\n" ); test!( mixin_single_arg, "@mixin a($b) {\n color: $b;\n}\nd {\n @include a(red);\n}\n", "d {\n color: red;\n}\n" ); test!( mixin_two_args, "@mixin a($b, $c) {\n color: $b;\n color: $c\n}\nd {\n @include a(red, blue);\n}\n", "d {\n color: red;\n color: blue;\n}\n" ); test!( mixin_arg_trailing_comma, "@mixin a($b, $c,) {\n color: $b;\n color: $c\n}\nd {\n @include a(red, blue);\n}\n", "d {\n color: red;\n color: blue;\n}\n" ); test!( mixin_property_interpolation, "@mixin a($b) {\n #{$b}: red;\n}\nd {\n @include a(color);\n}\n", "d {\n color: red;\n}\n" ); test!( mixin_style_interpolation, "@mixin a($b) {\n color: #{$b};\n}\nd {\n @include a(red);\n}\n", "d {\n color: red;\n}\n" ); test!( mixin_simple_default_value, "@mixin a($b: red) {\n color: $b;\n}\nd {\n @include a;\n}\n", "d {\n color: red;\n}\n" ); test!( mixin_second_value_default, "@mixin a($a, $b: blue) {\n color: $a $b;\n}\nd {\n @include a(red);\n}\n", "d {\n color: red blue;\n}\n" ); test!( mixin_two_default_values, "@mixin a($a: red, $b: blue) {\n color: $a $b;\n}\nd {\n @include a;\n}\n", "d {\n color: red blue;\n}\n" ); test!( mixin_override_default_value_positionally, "@mixin a($a: red) {\n color: $a;\n}\nd {\n @include a(blue);\n}\n", "d {\n color: blue;\n}\n" ); test!( mixin_keyword_arg, "@mixin a($a) {\n color: $a;\n}\nd {\n @include a($a: blue);\n}\n", "d {\n color: blue;\n}\n" ); test!( mixin_keyword_arg_override_default, "@mixin a($a: red) {\n color: $a;\n}\nd {\n @include a($a: blue);\n}\n", "d {\n color: blue;\n}\n" ); test!( mixin_keyword_applies_to_second_arg, "@mixin a($a: red, $b) {\n color: $a $b;\n}\nd {\n @include a($b: blue);\n}\n", "d {\n color: red blue;\n}\n" ); test!( mixin_two_keywords, "@mixin a($a, $b) {\n color: $a $b;\n}\nd {\n @include a($a: red, $b: blue);\n}\n", "d {\n color: red blue;\n}\n" ); test!( mixin_two_keywords_wrong_direction, "@mixin a($a, $b) {\n color: $a $b;\n}\nd {\n @include a($b: blue, $a: red);\n}\n", "d {\n color: red blue;\n}\n" ); test!( variable_in_call_args, "@mixin a($a) {\n color: $a;\n}\nd {\n $c: red;\n @include a($c);\n}\n", "d {\n color: red;\n}\n" ); test!( comment_before_positional_call_arg, "@mixin a($a) {\n color: $a;\n}\nd {\n @include a(/*foo*/red);\n}\n", "d {\n color: red;\n}\n" ); test!( comment_after_positional_call_arg, "@mixin a($a) {\n color: $a;\n}\nd {\n @include a(red/*foo*/);\n}\n", "d {\n color: red;\n}\n" ); test!( comment_before_keyword_call_arg_val, "@mixin a($a) {\n color: $a;\n}\nd {\n @include a($a: /*foo*/red);\n}\n", "d {\n color: red;\n}\n" ); test!( comment_after_keyword_call_arg_val, "@mixin a($a) {\n color: $a;\n}\nd {\n @include a($a: red/*foo*/);\n}\n", "d {\n color: red;\n}\n" ); test!( comment_before_keyword_call_arg_name, "@mixin a($a) {\n color: $a;\n}\nd {\n @include a(/*foo*/$a: red);\n}\n", "d {\n color: red;\n}\n" ); test!( comment_after_keyword_call_arg_name, "@mixin a($a) {\n color: $a;\n}\nd {\n @include a($a/*foo*/: red);\n}\n", "d {\n color: red;\n}\n" ); test!( toplevel_include, "@mixin a {\n a {\n color: red;\n }\n}\n\n@include a;\n", "a {\n color: red;\n}\n" ); test!( include_list, "@mixin foo($x) {\n color: $x;\n}\na {\n @include foo(0px 0px 0px 0px #ef8086 inset !important);\n}\n", "a {\n color: 0px 0px 0px 0px #ef8086 inset !important;\n}\n" ); test!( content_without_variable, "@mixin foo {\n @content;\n}\n\na {\n @include foo {\n color: red;\n }\n}\n", "a {\n color: red;\n}\n" ); test!( content_with_variable, "@mixin foo($a) {\n @content;\n}\n\na {\n @include foo(red) {\n color: red;\n }\n}\n", "a {\n color: red;\n}\n" ); test!( mixin_style_does_not_end_with_semicolon, "@mixin foo {\n color: red\n}\n\na {\n @include foo;\n}\n", "a {\n color: red;\n}\n" ); test!( args_hyphen_arg_allows_underscore, "@mixin foo($a-b) {\n color: $a-b;\n color: $a_b;\n}\na {\n @include foo($a_b: a);\n @include foo($a-b: a);\n}\n", "a {\n color: a;\n color: a;\n color: a;\n color: a;\n}\n" ); test!( args_underscore_arg_allows_hyphen, "@mixin foo($a_b) {\n color: $a-b;\n color: $a_b;\n}\na {\n @include foo($a_b: a);\n @include foo($a-b: a);\n}\n", "a {\n color: a;\n color: a;\n color: a;\n color: a;\n}\n" ); test!( control_flow_in_content, "@mixin foo {\n @content;\n}\n\na {\n @include foo {@if true {color: red;}}\n}\n", "a {\n color: red;\n}\n" ); test!( content_in_control_flow, "@mixin foo() {\n @if true {\n @content;\n }\n}\n\na {\n @include foo {\n color: red;\n }\n}\n", "a {\n color: red;\n}\n" ); test!( content_inside_unknown_at_rule, "@mixin foo() {\n @foo (max-width: max) {\n @content;\n }\n}\n\na {\n @include foo {\n color: red;\n }\n}\n", "@foo (max-width: max) {\n a {\n color: red;\n }\n}\n" ); test!( content_inside_media, "@mixin foo() {\n @media (max-width: max) {\n @content;\n }\n}\n\na {\n @include foo {\n color: red;\n }\n}\n", "@media (max-width: max) {\n a {\n color: red;\n }\n}\n" ); error!( function_inside_mixin, "@mixin foo() {\n @function bar() {\n @return foo;\n }\n}\n\na {\n @include foo {\n color: red;\n }\n}\n", "Error: Mixins may not contain function declarations." ); error!( content_inside_control_flow_outside_mixin, "a {\n @if true {\n @content;\n }\n}\n", "Error: @content is only allowed within mixin declarations." ); error!( undefined_mixin, "a {@include foo;}", "Error: Undefined mixin." ); error!( body_missing_closing_curly_brace, "@mixin foo() {", "Error: expected \"}\"." ); test!( include_empty_args_no_semicolon, "@mixin c {}\n\na {\n @include c()\n}\n", "" ); test!( local_variable_declared_before_mixin_is_still_in_scope, "@mixin foo {}\n\na {\n $a: red;\n @include foo;\n color: $a;\n}\n", "a {\n color: red;\n}\n" ); test!( empty_content_args, "@mixin foo { @content() } a { @include foo { color: red; }; }", "a {\n color: red;\n}\n" ); test!( empty_content_args_using_empty_args, "@mixin foo { @content() } a { @include foo using () { color: red; }; }", "a {\n color: red;\n}\n" ); test!( content_using_one_arg, "@mixin foo { @content(red) } a { @include foo using ($a) { color: $a; } }", "a {\n color: red;\n}\n" ); test!( multiple_content_using_different_args, "@mixin foo { @content(1); @content(2); } @mixin bar { @include foo using ($a) { color: $a } } a { @include bar; }", "a {\n color: 1;\n color: 2;\n}\n" ); test!( chained_content, "@mixin foo { @content; } @mixin bar { @include foo { @content; } } a { @include bar { color: red; } }", "a {\n color: red;\n}\n" ); test!( content_can_access_local_variables, "@mixin foo { @content; } a { $bar: red; @include foo { color: $bar; } }", "a {\n color: red;\n}\n" ); error!( content_using_too_many_args, "@mixin foo { @content(red, blue) } a { @include foo using ($a) { color: $a; } }", "Error: Only 1 argument allowed, but 2 were passed." ); error!( content_using_too_few_args, "@mixin foo { @content() } a { @include foo using ($a) { color: $a; } }", "Error: Missing argument $a." );
#![cfg(test)] #[macro_use] mod macros; test!( basic_mixin, "@mixin a {\n color: red;\n}\n\nb {\n @include a;\n}\n", "b {\n color: red;\n}\n" ); test!(empty_mixin, "@mixin a {}\n\nb {\n @include a;\n}\n", ""); test!( just_a_comment, "@mixin foo() {\n /* begin
color: red;\n }\n}\n\n@include a;\n", "a {\n color: red;\n}\n" ); test!( include_list, "@mixin foo($x) {\n color: $x;\n}\na {\n @include foo(0px 0px 0px 0px #ef8086 inset !important);\n}\n", "a {\n color: 0px 0px 0px 0px #ef8086 inset !important;\n}\n" ); test!( content_without_variable, "@mixin foo {\n @content;\n}\n\na {\n @include foo {\n color: red;\n }\n}\n", "a {\n color: red;\n}\n" ); test!( content_with_variable, "@mixin foo($a) {\n @content;\n}\n\na {\n @include foo(red) {\n color: red;\n }\n}\n", "a {\n color: red;\n}\n" ); test!( mixin_style_does_not_end_with_semicolon, "@mixin foo {\n color: red\n}\n\na {\n @include foo;\n}\n", "a {\n color: red;\n}\n" ); test!( args_hyphen_arg_allows_underscore, "@mixin foo($a-b) {\n color: $a-b;\n color: $a_b;\n}\na {\n @include foo($a_b: a);\n @include foo($a-b: a);\n}\n", "a {\n color: a;\n color: a;\n color: a;\n color: a;\n}\n" ); test!( args_underscore_arg_allows_hyphen, "@mixin foo($a_b) {\n color: $a-b;\n color: $a_b;\n}\na {\n @include foo($a_b: a);\n @include foo($a-b: a);\n}\n", "a {\n color: a;\n color: a;\n color: a;\n color: a;\n}\n" ); test!( control_flow_in_content, "@mixin foo {\n @content;\n}\n\na {\n @include foo {@if true {color: red;}}\n}\n", "a {\n color: red;\n}\n" ); test!( content_in_control_flow, "@mixin foo() {\n @if true {\n @content;\n }\n}\n\na {\n @include foo {\n color: red;\n }\n}\n", "a {\n color: red;\n}\n" ); test!( content_inside_unknown_at_rule, "@mixin foo() {\n @foo (max-width: max) {\n @content;\n }\n}\n\na {\n @include foo {\n color: red;\n }\n}\n", "@foo (max-width: max) {\n a {\n color: red;\n }\n}\n" ); test!( content_inside_media, "@mixin foo() {\n @media (max-width: max) {\n @content;\n }\n}\n\na {\n @include foo {\n color: red;\n }\n}\n", "@media (max-width: max) {\n a {\n color: red;\n }\n}\n" ); error!( function_inside_mixin, "@mixin foo() {\n @function bar() {\n @return foo;\n }\n}\n\na {\n @include foo {\n color: red;\n }\n}\n", "Error: Mixins may not contain function declarations." ); error!( content_inside_control_flow_outside_mixin, "a {\n @if true {\n @content;\n }\n}\n", "Error: @content is only allowed within mixin declarations." ); error!( undefined_mixin, "a {@include foo;}", "Error: Undefined mixin." ); error!( body_missing_closing_curly_brace, "@mixin foo() {", "Error: expected \"}\"." ); test!( include_empty_args_no_semicolon, "@mixin c {}\n\na {\n @include c()\n}\n", "" ); test!( local_variable_declared_before_mixin_is_still_in_scope, "@mixin foo {}\n\na {\n $a: red;\n @include foo;\n color: $a;\n}\n", "a {\n color: red;\n}\n" ); test!( empty_content_args, "@mixin foo { @content() } a { @include foo { color: red; }; }", "a {\n color: red;\n}\n" ); test!( empty_content_args_using_empty_args, "@mixin foo { @content() } a { @include foo using () { color: red; }; }", "a {\n color: red;\n}\n" ); test!( content_using_one_arg, "@mixin foo { @content(red) } a { @include foo using ($a) { color: $a; } }", "a {\n color: red;\n}\n" ); test!( multiple_content_using_different_args, "@mixin foo { @content(1); @content(2); } @mixin bar { @include foo using ($a) { color: $a } } a { @include bar; }", "a {\n color: 1;\n color: 2;\n}\n" ); test!( chained_content, "@mixin foo { @content; } @mixin bar { @include foo { @content; } } a { @include bar { color: red; } }", "a {\n color: red;\n}\n" ); test!( content_can_access_local_variables, "@mixin foo { @content; } a { $bar: red; @include foo { color: $bar; } }", "a {\n color: red;\n}\n" ); error!( content_using_too_many_args, "@mixin foo { @content(red, blue) } a { @include foo using ($a) { color: $a; } }", "Error: Only 1 argument allowed, but 2 were passed." ); error!( content_using_too_few_args, "@mixin foo { @content() } a { @include foo using ($a) { color: $a; } }", "Error: Missing argument $a." );
foo */\n}\n\na {\n @include foo();\n}\n", "a {\n /* begin foo */\n}\n" ); test!( mixin_two_styles, "@mixin a {\n color: red;\n color: blue;\n}\n\nb {\n @include a;\n}\n", "b {\n color: red;\n color: blue;\n}\n" ); test!( mixin_ruleset, "@mixin a {\n b {\n color: red;\n }\n}\nb {\n @include a;\n}\n", "b b {\n color: red;\n}\n" ); test!( mixin_two_rulesets, "@mixin a {\n b {\n color: red;\n }\n c {\n color: blue;\n }\n}\nd {\n @include a;\n}\n", "d b {\n color: red;\n}\nd c {\n color: blue;\n}\n" ); test!( mixin_ruleset_and_style, "@mixin a {\n b {\n color: red;\n }\n color: blue;\n}\nd {\n @include a;\n}\n", "d {\n color: blue;\n}\nd b {\n color: red;\n}\n" ); test!( mixin_style_and_ruleset, "@mixin a {\n color: blue;\n b {\n color: red;\n}\n}\nd {\n @include a;\n}\n", "d {\n color: blue;\n}\nd b {\n color: red;\n}\n" ); test!( mixin_nested_rulesets, "@mixin a {\n b {\n c {\n color: red;\n}\n}\n}\nd {\n @include a;\n}\n", "d b c {\n color: red;\n}\n" ); test!( mixin_removes_empty_ruleset, "@mixin a {\n color: red; b {\n}\n}\nd {\n @include a;\n}\n", "d {\n color: red;\n}\n" ); test!( mixin_variable_scope_one_ruleset, "@mixin a {\n $a: blue;\nb {\n $a: red;\n} color: $a\n}\nd {\n @include a;\n}\n", "d {\n color: red;\n}\n" ); test!( mixin_no_args, "@mixin a {\n color: red;\n}\nd {\n @include a();\n}\n", "d {\n color: red;\n}\n" ); test!( mixin_single_arg, "@mixin a($b) {\n color: $b;\n}\nd {\n @include a(red);\n}\n", "d {\n color: red;\n}\n" ); test!( mixin_two_args, "@mixin a($b, $c) {\n color: $b;\n color: $c\n}\nd {\n @include a(red, blue);\n}\n", "d {\n color: red;\n color: blue;\n}\n" ); test!( mixin_arg_trailing_comma, "@mixin a($b, $c,) {\n color: $b;\n color: $c\n}\nd {\n @include a(red, blue);\n}\n", "d {\n color: red;\n color: blue;\n}\n" ); test!( mixin_property_interpolation, "@mixin a($b) {\n #{$b}: red;\n}\nd {\n @include a(color);\n}\n", "d {\n color: red;\n}\n" ); test!( mixin_style_interpolation, "@mixin a($b) {\n color: #{$b};\n}\nd {\n @include a(red);\n}\n", "d {\n color: red;\n}\n" ); test!( mixin_simple_default_value, "@mixin a($b: red) {\n color: $b;\n}\nd {\n @include a;\n}\n", "d {\n color: red;\n}\n" ); test!( mixin_second_value_default, "@mixin a($a, $b: blue) {\n color: $a $b;\n}\nd {\n @include a(red);\n}\n", "d {\n color: red blue;\n}\n" ); test!( mixin_two_default_values, "@mixin a($a: red, $b: blue) {\n color: $a $b;\n}\nd {\n @include a;\n}\n", "d {\n color: red blue;\n}\n" ); test!( mixin_override_default_value_positionally, "@mixin a($a: red) {\n color: $a;\n}\nd {\n @include a(blue);\n}\n", "d {\n color: blue;\n}\n" ); test!( mixin_keyword_arg, "@mixin a($a) {\n color: $a;\n}\nd {\n @include a($a: blue);\n}\n", "d {\n color: blue;\n}\n" ); test!( mixin_keyword_arg_override_default, "@mixin a($a: red) {\n color: $a;\n}\nd {\n @include a($a: blue);\n}\n", "d {\n color: blue;\n}\n" ); test!( mixin_keyword_applies_to_second_arg, "@mixin a($a: red, $b) {\n color: $a $b;\n}\nd {\n @include a($b: blue);\n}\n", "d {\n color: red blue;\n}\n" ); test!( mixin_two_keywords, "@mixin a($a, $b) {\n color: $a $b;\n}\nd {\n @include a($a: red, $b: blue);\n}\n", "d {\n color: red blue;\n}\n" ); test!( mixin_two_keywords_wrong_direction, "@mixin a($a, $b) {\n color: $a $b;\n}\nd {\n @include a($b: blue, $a: red);\n}\n", "d {\n color: red blue;\n}\n" ); test!( variable_in_call_args, "@mixin a($a) {\n color: $a;\n}\nd {\n $c: red;\n @include a($c);\n}\n", "d {\n color: red;\n}\n" ); test!( comment_before_positional_call_arg, "@mixin a($a) {\n color: $a;\n}\nd {\n @include a(/*foo*/red);\n}\n", "d {\n color: red;\n}\n" ); test!( comment_after_positional_call_arg, "@mixin a($a) {\n color: $a;\n}\nd {\n @include a(red/*foo*/);\n}\n", "d {\n color: red;\n}\n" ); test!( comment_before_keyword_call_arg_val, "@mixin a($a) {\n color: $a;\n}\nd {\n @include a($a: /*foo*/red);\n}\n", "d {\n color: red;\n}\n" ); test!( comment_after_keyword_call_arg_val, "@mixin a($a) {\n color: $a;\n}\nd {\n @include a($a: red/*foo*/);\n}\n", "d {\n color: red;\n}\n" ); test!( comment_before_keyword_call_arg_name, "@mixin a($a) {\n color: $a;\n}\nd {\n @include a(/*foo*/$a: red);\n}\n", "d {\n color: red;\n}\n" ); test!( comment_after_keyword_call_arg_name, "@mixin a($a) {\n color: $a;\n}\nd {\n @include a($a/*foo*/: red);\n}\n", "d {\n color: red;\n}\n" ); test!( toplevel_include, "@mixin a {\n a {\n
random
[ { "content": "#![cfg(test)]\n\n\n\n#[macro_export]\n\nmacro_rules! test {\n\n ($( #[$attr:meta] ),*$func:ident, $input:expr) => {\n\n $(#[$attr])*\n\n #[test]\n\n #[allow(non_snake_case)]\n\n fn $func() {\n\n let sass = grass::from_string($input.to_string())\n\n ...
Rust
examples/custom-source.rs
Re3Studios/assets_manager
f6a0390aae4072f7d9e7084433f5fb585bc5fe8f
use assets_manager::{ hot_reloading::{DynUpdateSender, EventSender, FsWatcherBuilder}, source::{DirEntry, FileSystem, Source}, AssetCache, BoxedError, }; use std::{ borrow::Cow, io, path::{Path, PathBuf}, }; #[derive(Debug, Clone)] pub struct FsWithOverride { default_dir: FileSystem, override_dir: Option<FileSystem>, } impl FsWithOverride { pub fn new<P: AsRef<Path>>(default_path: P) -> io::Result<Self> { let default_dir = FileSystem::new(default_path)?; let override_dir = std::env::var_os("ASSETS_OVERRIDE").and_then(|path| { FileSystem::new(path) .map_err(|err| { log::error!("Error setting override assets directory: {}", err); }) .ok() }); Ok(Self { default_dir, override_dir, }) } pub fn path_of(&self, specifier: &str, ext: &str) -> PathBuf { self.default_dir.path_of(DirEntry::File(specifier, ext)) } } impl Source for FsWithOverride { fn read(&self, id: &str, ext: &str) -> io::Result<Cow<[u8]>> { if let Some(dir) = &self.override_dir { match dir.read(id, ext) { Ok(content) => return Ok(content), Err(err) => { if err.kind() != io::ErrorKind::NotFound { let path = dir.path_of(DirEntry::File(id, ext)); log::warn!("Error reading \"{}\": {}", path.display(), err); } } } } self.default_dir.read(id, ext) } fn read_dir(&self, id: &str, f: &mut dyn FnMut(DirEntry)) -> io::Result<()> { if let Some(dir) = &self.override_dir { match dir.read_dir(id, f) { Ok(()) => return Ok(()), Err(err) => { if err.kind() != io::ErrorKind::NotFound { let path = dir.path_of(DirEntry::Directory(id)); log::warn!("Error reading \"{}\": {}", path.display(), err); } } } } self.default_dir.read_dir(id, f) } fn exists(&self, entry: DirEntry) -> bool { self.override_dir .as_ref() .map_or(false, |dir| dir.exists(entry)) || self.default_dir.exists(entry) } fn configure_hot_reloading(&self, events: EventSender) -> Result<DynUpdateSender, BoxedError> { let mut builder = FsWatcherBuilder::new()?; if let Some(dir) = &self.override_dir { builder.watch(dir.root().to_owned())?; } builder.watch(self.default_dir.root().to_owned())?; Ok(builder.build(events)) } fn make_source(&self) -> Option<Box<dyn Source + Send>> { Some(Box::new(self.clone())) } } fn main() -> Result<(), BoxedError> { env_logger::builder() .filter_level(log::LevelFilter::Info) .init(); let source = FsWithOverride::new("assets")?; let cache = AssetCache::with_source(source); let msg = cache.load::<String>("example.hello")?; loop { #[cfg(feature = "hot-reloading")] cache.hot_reload(); println!("{}", msg.read()); std::thread::sleep(std::time::Duration::from_secs(1)) } }
use assets_manager::{ hot_reloading::{DynUpdateSender, EventSender, FsWatcherBuilder}, source::{DirEntry, FileSystem, Source}, AssetCache, BoxedError, }; use std::{ borrow::Cow, io, path::{Path, PathBuf}, }; #[derive(Debug, Clone)] pub struct FsWithOverride { default_dir: FileSystem, override_dir: Option<FileSystem>, } impl FsWithOverride { pub fn new<P: AsRef<Path>>(default_path: P) -> io::Result<Self> { let default_dir = FileSystem::new(default_path)?; let override_dir = std::env::var_os("ASSETS_OVERRIDE").and_then(|path| { FileSystem::new(path) .map_err(|err| { log::error!("Error setting override assets directory: {}", err); }) .ok() }); Ok(Self { default_dir, override_dir, }) } pub fn path_of(&self, specifier: &str, ext: &str) -> PathBuf { self.default_dir.path_of(DirEntry::File(specifier, ext)) } } impl Source for FsWithOverride { fn read(&self, id: &str, ext: &str) -> io::Result<Cow<[u8]>> { if let Some(dir) = &self.override_dir { match dir.read(id, ext) { Ok(content) => return Ok(content), Err(err) => { if err.kind() != io::ErrorKind::NotFound { let path = dir.path_of(DirEntry::File(id, ext)); log::warn!("Error reading \"{}\": {}", path.display(), err); } } } } self.default_dir.read(id, ext) } fn read_dir(&self, id: &str, f: &mut dyn FnMut(DirEntry)) -> io::Result<()> { if let Some(dir) = &self.override_dir { match dir.read_dir(id, f) { Ok(()) => return Ok(()), Err(err) => { if err.kind() != io::ErrorKind::NotFound { let path = dir.path_of(DirEntry::Directory(id)); log::warn!("Error reading \"{}\": {}", path.display(), err); } } } } self.default_dir.read_dir(id, f) } fn exists(&self, en
|| self.default_dir.exists(entry) } fn configure_hot_reloading(&self, events: EventSender) -> Result<DynUpdateSender, BoxedError> { let mut builder = FsWatcherBuilder::new()?; if let Some(dir) = &self.override_dir { builder.watch(dir.root().to_owned())?; } builder.watch(self.default_dir.root().to_owned())?; Ok(builder.build(events)) } fn make_source(&self) -> Option<Box<dyn Source + Send>> { Some(Box::new(self.clone())) } } fn main() -> Result<(), BoxedError> { env_logger::builder() .filter_level(log::LevelFilter::Info) .init(); let source = FsWithOverride::new("assets")?; let cache = AssetCache::with_source(source); let msg = cache.load::<String>("example.hello")?; loop { #[cfg(feature = "hot-reloading")] cache.hot_reload(); println!("{}", msg.read()); std::thread::sleep(std::time::Duration::from_secs(1)) } }
try: DirEntry) -> bool { self.override_dir .as_ref() .map_or(false, |dir| dir.exists(entry))
function_block-random_span
[ { "content": "fn load<A: Asset>(source: &dyn Source, id: &str) -> Result<Box<dyn AnyAsset>, Error> {\n\n let asset = load_from_source::<A, _>(source, id)?;\n\n Ok(Box::new(asset))\n\n}\n\n\n", "file_path": "src/key.rs", "rank": 0, "score": 240843.7802783121 }, { "content": "fn read_dir...
Rust
src/geom.rs
H2CO3/quirs
44b75e71d5578dd605bd12d88d45eb2763d3e071
use std::fmt; use util::int_to_usize; use info::Info; use quirc_sys::{ quirc_point, quirc_code, quirc_data }; use quirc_sys::{ quirc_decode, quirc_decode_error_t }; use error::{ Error, Result }; use self::quirc_decode_error_t::QUIRC_SUCCESS; #[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Hash)] pub struct Vec2D { pub x: usize, pub y: usize, } impl Vec2D { pub fn from_raw(p: quirc_point) -> Result<Self> { let (x, y) = (int_to_usize(p.x)?, int_to_usize(p.y)?); Ok(Vec2D { x, y }) } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct Image<'a> { data: &'a [u8], size: Vec2D, } impl<'a> Image<'a> { pub fn new(data: &'a [u8], size: Vec2D) -> Result<Self> { if data.len() == size.x * size.y { Ok(Image { data, size }) } else { Err(Error::SizeMismatch) } } pub fn data(&self) -> &[u8] { self.data } pub fn width(&self) -> usize { self.size.x } pub fn height(&self) -> usize { self.size.y } } #[derive(Clone, Copy)] pub struct QrCode(quirc_code); impl QrCode { #[doc(hidden)] pub fn from_raw(raw: quirc_code) -> Result<Self> { let _ = int_to_usize(raw.size)?; for i in 0..4 { let _ = Vec2D::from_raw(raw.corners[i])?; } Ok(QrCode(raw)) } fn corner_at(&self, i: usize) -> Vec2D { Vec2D::from_raw(self.0.corners[i]).expect("invalid corner coordinates") } pub fn top_left_corner(&self) -> Vec2D { self.corner_at(0) } pub fn top_right_corner(&self) -> Vec2D { self.corner_at(1) } pub fn bottom_right_corner(&self) -> Vec2D { self.corner_at(2) } pub fn bottom_left_corner(&self) -> Vec2D { self.corner_at(3) } pub fn size(&self) -> usize { int_to_usize(self.0.size).expect("code size under- or overflows usize") } pub fn bitmap(&self) -> &[u8] { let size = self.size(); let num_bits = size * size; let num_bytes = (num_bits + 7) / 8; &self.0.cell_bitmap[..num_bytes] } pub fn get(&self, coord: Vec2D) -> Option<bool> { let size = self.size(); let Vec2D { x, y } = coord; if x < size && y < size { let i = y * size + x; let bit = self.0.cell_bitmap[i / 8] >> (i % 8) & 1; Some(bit != 0) } else { None } } pub fn bit_at(&self, coord: Vec2D) -> bool { self.get(coord).unwrap_or_else( || panic!("{:?} out of bounds for bitmap of size {}", coord, self.size()) ) } pub fn decode(&self) -> Result<Info> { let mut raw = quirc_data::default(); let error_code = unsafe { quirc_decode(&self.0, &mut raw) }; if error_code == QUIRC_SUCCESS { Ok(Info::from_raw(raw)) } else { Err(error_code.into()) } } } impl fmt::Debug for QrCode { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("QrCode") .field("top_left_corner", &self.top_left_corner()) .field("top_right_corner", &self.top_right_corner()) .field("bottom_right_corner", &self.bottom_right_corner()) .field("bottom_left_corner", &self.bottom_left_corner()) .field("size", &self.size()) .field("bitmap", &self.bitmap()) .finish() } }
use std::fmt; use util::int_to_usize; use info::Info; use quirc_sys::{ quirc_point, quirc_code, quirc_data }; use quirc_sys::{ quirc_decode, quirc_decode_error_t }; use error::{ Error, Result }; use self::quirc_decode_error_t::QUIRC_SUCCESS; #[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Hash)] pub struct Vec2D { pub x: usize, pub y: usize, } impl Vec2D { pub fn from_raw(p: quirc_point) -> Result<Self> { let (x, y) = (int_to_usize(p.x)?, int_to_usize(p.y)?); Ok(Vec2D { x, y }) } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct Image<'a> {
a } pub fn width(&self) -> usize { self.size.x } pub fn height(&self) -> usize { self.size.y } } #[derive(Clone, Copy)] pub struct QrCode(quirc_code); impl QrCode { #[doc(hidden)] pub fn from_raw(raw: quirc_code) -> Result<Self> { let _ = int_to_usize(raw.size)?; for i in 0..4 { let _ = Vec2D::from_raw(raw.corners[i])?; } Ok(QrCode(raw)) } fn corner_at(&self, i: usize) -> Vec2D { Vec2D::from_raw(self.0.corners[i]).expect("invalid corner coordinates") } pub fn top_left_corner(&self) -> Vec2D { self.corner_at(0) } pub fn top_right_corner(&self) -> Vec2D { self.corner_at(1) } pub fn bottom_right_corner(&self) -> Vec2D { self.corner_at(2) } pub fn bottom_left_corner(&self) -> Vec2D { self.corner_at(3) } pub fn size(&self) -> usize { int_to_usize(self.0.size).expect("code size under- or overflows usize") } pub fn bitmap(&self) -> &[u8] { let size = self.size(); let num_bits = size * size; let num_bytes = (num_bits + 7) / 8; &self.0.cell_bitmap[..num_bytes] } pub fn get(&self, coord: Vec2D) -> Option<bool> { let size = self.size(); let Vec2D { x, y } = coord; if x < size && y < size { let i = y * size + x; let bit = self.0.cell_bitmap[i / 8] >> (i % 8) & 1; Some(bit != 0) } else { None } } pub fn bit_at(&self, coord: Vec2D) -> bool { self.get(coord).unwrap_or_else( || panic!("{:?} out of bounds for bitmap of size {}", coord, self.size()) ) } pub fn decode(&self) -> Result<Info> { let mut raw = quirc_data::default(); let error_code = unsafe { quirc_decode(&self.0, &mut raw) }; if error_code == QUIRC_SUCCESS { Ok(Info::from_raw(raw)) } else { Err(error_code.into()) } } } impl fmt::Debug for QrCode { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("QrCode") .field("top_left_corner", &self.top_left_corner()) .field("top_right_corner", &self.top_right_corner()) .field("bottom_right_corner", &self.bottom_right_corner()) .field("bottom_left_corner", &self.bottom_left_corner()) .field("size", &self.size()) .field("bitmap", &self.bitmap()) .finish() } }
data: &'a [u8], size: Vec2D, } impl<'a> Image<'a> { pub fn new(data: &'a [u8], size: Vec2D) -> Result<Self> { if data.len() == size.x * size.y { Ok(Image { data, size }) } else { Err(Error::SizeMismatch) } } pub fn data(&self) -> &[u8] { self.dat
random
[ { "content": "#[cfg_attr(feature = \"cargo-clippy\", allow(if_same_then_else, cast_possible_truncation, cast_possible_wrap))]\n\npub fn usize_to_int(n: usize) -> Result<c_int> {\n\n if size_of::<usize>() < size_of::<c_int>() {\n\n Ok(n as c_int)\n\n } else if n <= INT_MAX as usize {\n\n Ok(n...