lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
Rust
src/elements/component.rs
xarvic/panoramix
db65b4d9cc928baad25aa7bbb9a0a4b7926628ba
use crate::ctx::{CompCtx, ProcessEventCtx, ReconcileCtx}; use crate::element_tree::{Element, VirtualDom}; use crate::elements::ElementBox; use crate::glue::GlobalEventCx; use crate::metadata::{Metadata, NoState}; use derivative::Derivative; use std::fmt::Debug; pub trait Component: Debug + Clone { type Props: Clone + Debug + PartialEq + 'static; type LocalEvent: Clone + Debug + PartialEq + 'static; fn new(props: Self::Props) -> ElementBox<Self::LocalEvent>; fn name() -> &'static str; } #[derive(Derivative, Default, PartialEq, Eq, Hash)] #[derivative(Clone(bound = "Comp::Props: Clone"))] pub struct ComponentHolder< Comp: Component, ReturnedTree: Element<Event = Comp::LocalEvent>, CompFn: Clone + Fn(&CompCtx, Comp::Props) -> ReturnedTree, > { component_fn: CompFn, props: Comp::Props, _marker: std::marker::PhantomData<Comp>, } #[derive(Derivative, Hash)] #[derivative( Debug(bound = ""), Clone(bound = ""), Default(bound = "Child: Default"), PartialEq(bound = "Child: PartialEq"), Eq(bound = "Child: Eq") )] pub struct ComponentOutput< ComponentEvent: Clone + Debug + PartialEq, ComponentState: Clone + Default + Debug + PartialEq, Child: Element, > { pub child: Child, #[derivative(Debug = "ignore")] pub _metadata: Metadata<ComponentEvent, ComponentState>, } #[derive(Derivative, Hash)] #[derivative( Debug(bound = ""), Clone(bound = "Child: Clone"), Default(bound = "Child: Default"), PartialEq(bound = "Child: PartialEq"), Eq(bound = "Child: Eq") )] pub struct ComponentOutputData< ComponentEvent: Clone + Debug + PartialEq, ComponentState: Clone + Default + Debug + PartialEq, Child: VirtualDom, > { pub child: Child, #[derivative(Debug = "ignore")] pub _metadata: Metadata<ComponentEvent, ComponentState>, } impl< ComponentEvent: Clone + Debug + PartialEq, ComponentState: Clone + Default + Debug + PartialEq, Child: Element, > ComponentOutput<ComponentEvent, ComponentState, Child> { pub fn new(md: Metadata<ComponentEvent, ComponentState>, child: Child) -> Self { Self { child, _metadata: md, } } } impl< Comp: Component, ReturnedTree: Element<Event = Comp::LocalEvent>, CompFn: Clone + Fn(&CompCtx, Comp::Props) -> ReturnedTree, > ComponentHolder<Comp, ReturnedTree, CompFn> { pub fn new(component_fn: CompFn, props: Comp::Props) -> Self { Self { component_fn, props, _marker: Default::default(), } } } impl< Comp: Component, ReturnedTree: Element<Event = Comp::LocalEvent>, CompFn: Clone + Fn(&CompCtx, Comp::Props) -> ReturnedTree, > std::fmt::Debug for ComponentHolder<Comp, ReturnedTree, CompFn> { #[rustfmt::skip] fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_tuple(Comp::name()) .field(&self.props) .finish() } } impl< Comp: Component + 'static, ReturnedTree: Element<Event = Comp::LocalEvent>, CompFn: Clone + Fn(&CompCtx, Comp::Props) -> ReturnedTree + 'static, > Element for ComponentHolder<Comp, ReturnedTree, CompFn> { type Event = Comp::LocalEvent; type ComponentState = NoState; type AggregateChildrenState = ReturnedTree::AggregateChildrenState; type BuildOutput = ReturnedTree::BuildOutput; fn build( self, prev_state: Self::AggregateChildrenState, ) -> (Self::BuildOutput, Self::AggregateChildrenState) { let default_state = Default::default(); let local_state = ReturnedTree::get_component_state(&prev_state).unwrap_or(&default_state); let ctx = CompCtx { called_use_metadata: std::cell::Cell::new(false), local_state: local_state, }; let element_tree = (self.component_fn)(&ctx, self.props); element_tree.build(prev_state) } } impl< ComponentEvent: Clone + Debug + PartialEq + 'static, ComponentState: Clone + Default + Debug + PartialEq + 'static, Child: Element, > Element for ComponentOutput<ComponentEvent, ComponentState, Child> { type Event = ComponentEvent; type ComponentState = ComponentState; type AggregateChildrenState = ( Vec<ComponentEvent>, ComponentState, Child::AggregateChildrenState, ); type BuildOutput = ComponentOutputData<ComponentEvent, ComponentState, Child::BuildOutput>; fn build( self, prev_state: Self::AggregateChildrenState, ) -> (Self::BuildOutput, Self::AggregateChildrenState) { let (_, prev_local_state, children_prev_state) = prev_state; let (child, children_state) = self.child.build(children_prev_state); ( ComponentOutputData { child, _metadata: Default::default(), }, (vec![], prev_local_state, children_state), ) } fn get_component_state(state: &Self::AggregateChildrenState) -> Option<&Self::ComponentState> { Some(&state.1) } } impl< ComponentEvent: Clone + Debug + PartialEq + 'static, ComponentState: Clone + Default + Debug + PartialEq + 'static, Child: VirtualDom, > VirtualDom for ComponentOutputData<ComponentEvent, ComponentState, Child> { type Event = ComponentEvent; type AggregateChildrenState = ( Vec<ComponentEvent>, ComponentState, Child::AggregateChildrenState, ); type TargetWidgetSeq = Child::TargetWidgetSeq; fn init_tree(&self) -> Child::TargetWidgetSeq { self.child.init_tree() } fn reconcile( &self, prev_value: &Self, widget_seq: &mut Child::TargetWidgetSeq, ctx: &mut ReconcileCtx, ) { self.child.reconcile(&prev_value.child, widget_seq, ctx); } fn process_local_event( &self, children_state: &mut Self::AggregateChildrenState, _widget_seq: &mut Child::TargetWidgetSeq, _cx: &mut GlobalEventCx, ) -> Option<Self::Event> { let event_queue = &mut children_state.0; event_queue.pop() } fn process_event( &self, _comp_ctx: &mut ProcessEventCtx, children_state: &mut Self::AggregateChildrenState, widget_seq: &mut Self::TargetWidgetSeq, cx: &mut GlobalEventCx, ) { let mut ctx = ProcessEventCtx { event_queue: &mut children_state.0, state: &mut children_state.1, }; self.child .process_event(&mut ctx, &mut children_state.2, widget_seq, cx) } } #[cfg(test)] mod tests { #![allow(dead_code)] use crate as panoramix; #[derive(Debug, Default, Clone, PartialEq, Hash)] struct MyComponent; type MyPropsType = (); type MyLocalEvent = panoramix::NoEvent; impl MyComponent { fn new(props: MyPropsType) -> impl panoramix::Element<Event = MyLocalEvent> { <Self as panoramix::elements::component::Component>::new(props) } fn render( _ctx: &panoramix::CompCtx, _my_props: MyPropsType, ) -> impl panoramix::Element<Event = MyLocalEvent> { panoramix::elements::EmptyElement::new() } } impl panoramix::elements::component::Component for MyComponent { type Props = MyPropsType; type LocalEvent = MyLocalEvent; fn new(props: Self::Props) -> panoramix::elements::ElementBox<MyLocalEvent> { panoramix::elements::ElementBox::new(panoramix::elements::internals::ComponentHolder::< Self, _, _, >::new(&MyComponent::render, props)) } fn name() -> &'static str { "MyComponent" } } use crate::element_tree::Element; use insta::assert_debug_snapshot; use test_env_log::test; #[test] fn call_component() { let my_component = MyComponent::new(()); let (component_result, _state) = my_component.build(Default::default()); assert_debug_snapshot!(component_result); } }
use crate::ctx::{CompCtx, ProcessEventCtx, ReconcileCtx}; use crate::element_tree::{Element, VirtualDom}; use crate::elements::ElementBox; use crate::glue::GlobalEventCx; use crate::metadata::{Metadata, NoState}; use derivative::Derivative; use std::fmt::Debug; pub trait Component: Debug + Clone { type Props: Clone + Debug + PartialEq + 'static; type LocalEvent: Clone + Debug + PartialEq + 'static; fn new(props: Self::Props) -> ElementBox<Self::LocalEvent>; fn name() -> &'static str; } #[derive(Derivative, Default, PartialEq, Eq, Hash)] #[derivative(Clone(bound = "Comp::Props: Clone"))] pub struct ComponentHolder< Comp: Component, ReturnedTree: Element<Event = Comp::LocalEvent>, CompFn: Clone + Fn(&CompCtx, Comp::Props) -> ReturnedTree, > { component_fn: CompFn, props: Comp::Props, _marker: std::marker::PhantomData<Comp>, } #[derive(Derivative, Hash)] #[derivative( Debug(bound = ""), Clone(bound = ""), Default(bound = "Child: Default"), PartialEq(bound = "Child: PartialEq"), Eq(bound = "Child: Eq") )] pub struct ComponentOutput< ComponentEvent: Clone + Debug + PartialEq, ComponentState: Clone + Default + Debug + PartialEq, Child: Element, > { pub child: Child, #[derivative(Debug = "ignore")] pub _metadata: Metadata<ComponentEvent, ComponentState>, } #[derive(Derivative, Hash)] #[derivative( Debug(bound = ""), Clone(bound = "Child: Clone"), Default(bound = "Child: Default"), PartialEq(bound = "Child: PartialEq"), Eq(bound = "Child: Eq") )] pub struct ComponentOutputData< ComponentEvent: Clone + Debug + PartialEq, ComponentState: Clone + Default + Debug + PartialEq, Child: VirtualDom, > { pub child: Child, #[derivative(Debug = "ignore")] pub _metadata: Metadata<ComponentEvent, ComponentState>, } impl< ComponentEvent: Clone + Debug + PartialEq, ComponentState: Clone + Default + Debug + PartialEq, Child: Element, > ComponentOutput<ComponentEvent, ComponentState, Child> { pub fn new(md: Metadata<ComponentEvent, ComponentState>, child: Child) -> Self { Self { child, _metadata: md, } } } impl< Comp: Component, ReturnedTree: Element<Event = Comp::LocalEvent>, CompFn: Clone + Fn(&CompCtx, Comp::Props) -> ReturnedTree, > ComponentHolder<Comp, ReturnedTree, CompFn> { pub fn new(component_fn: CompFn, props: Comp::Props) -> Self { Self { component_fn, props, _marker: Default::default(), } } } impl< Comp: Component, ReturnedTree: Element<Event = Comp::LocalEvent>, CompFn: Clone + Fn(&CompCtx, Comp::Props) -> ReturnedTree, > std::fmt::Debug for ComponentHolder<Comp, ReturnedTree, CompFn> { #[rustfmt::skip] fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_tuple(Comp::name()) .field(&self.props) .finish() } } impl< Comp: Component + 'static, ReturnedTree: Element<Event = Comp::LocalEvent>, CompFn: Clone + Fn(&CompCtx, Comp::Props) -> ReturnedTree + 'static, > Element for ComponentHolder<Comp, ReturnedTree, CompFn> { type Event = Comp::LocalEvent; type ComponentState = NoState; type AggregateChildrenState = ReturnedTree::AggregateChildrenState; type BuildOutput = ReturnedTree::BuildOutput; fn build( self, prev_state: Self::AggregateChildrenState, ) -> (Self::BuildOutput, Self::AggregateChildrenState) { let default_state = Default::default(); let local_state = ReturnedTree::get_component_state(&prev_state).unwrap_or(&default_state); let ctx = CompCtx { called_use_metadata: std::cell::Cell::new(false), local_state: local_state, }; let element_tree = (self.component_fn)(&ctx, self.props); element_tree.build(prev_state) } } impl< ComponentEvent: Clone + Debug + PartialEq + 'static, ComponentState: Clone + Default + Debug + PartialEq + 'static, Child: Element, > Element for ComponentOutput<ComponentEvent, ComponentState, Child> { type Event = ComponentEvent; type ComponentState = ComponentState; type AggregateChildrenState = ( Vec<ComponentEvent>, ComponentState, Child::AggregateChildrenState, ); type BuildOutput = ComponentOutputData<ComponentEvent, ComponentState, Child::BuildOutput>; fn build( self, prev_state: Self::AggregateChildrenState, ) -> (Self::BuildOutput, Self::AggregateChildrenState) { let (_, prev_local_state, children_prev_state) = prev_state; let (child, children_state) = self.child.build(children_prev_state); ( ComponentOutputData { child, _metadata: Default::default(), }, (vec![], prev_local_state, children_state), ) } fn get_component_state(state: &Self::AggregateChildrenState) -> Option<&Self::ComponentState> { Some(&state.1) } } impl< ComponentEvent: Clone + Debug + PartialEq + 'static, ComponentState: Clone + Default + Debug + PartialEq + 'static, Child: VirtualDom, > VirtualDom for ComponentOutputData<ComponentEvent, ComponentState, Child> { type Event = ComponentEvent; type AggregateChildrenState = ( Vec<ComponentEvent>, ComponentState, Child::AggregateChildrenState, ); type TargetWidgetSeq = Child::TargetWidgetSeq; fn init_tree(&self) -> Child::TargetWidgetSeq { self.child.init_tree() }
fn process_local_event( &self, children_state: &mut Self::AggregateChildrenState, _widget_seq: &mut Child::TargetWidgetSeq, _cx: &mut GlobalEventCx, ) -> Option<Self::Event> { let event_queue = &mut children_state.0; event_queue.pop() } fn process_event( &self, _comp_ctx: &mut ProcessEventCtx, children_state: &mut Self::AggregateChildrenState, widget_seq: &mut Self::TargetWidgetSeq, cx: &mut GlobalEventCx, ) { let mut ctx = ProcessEventCtx { event_queue: &mut children_state.0, state: &mut children_state.1, }; self.child .process_event(&mut ctx, &mut children_state.2, widget_seq, cx) } } #[cfg(test)] mod tests { #![allow(dead_code)] use crate as panoramix; #[derive(Debug, Default, Clone, PartialEq, Hash)] struct MyComponent; type MyPropsType = (); type MyLocalEvent = panoramix::NoEvent; impl MyComponent { fn new(props: MyPropsType) -> impl panoramix::Element<Event = MyLocalEvent> { <Self as panoramix::elements::component::Component>::new(props) } fn render( _ctx: &panoramix::CompCtx, _my_props: MyPropsType, ) -> impl panoramix::Element<Event = MyLocalEvent> { panoramix::elements::EmptyElement::new() } } impl panoramix::elements::component::Component for MyComponent { type Props = MyPropsType; type LocalEvent = MyLocalEvent; fn new(props: Self::Props) -> panoramix::elements::ElementBox<MyLocalEvent> { panoramix::elements::ElementBox::new(panoramix::elements::internals::ComponentHolder::< Self, _, _, >::new(&MyComponent::render, props)) } fn name() -> &'static str { "MyComponent" } } use crate::element_tree::Element; use insta::assert_debug_snapshot; use test_env_log::test; #[test] fn call_component() { let my_component = MyComponent::new(()); let (component_result, _state) = my_component.build(Default::default()); assert_debug_snapshot!(component_result); } }
fn reconcile( &self, prev_value: &Self, widget_seq: &mut Child::TargetWidgetSeq, ctx: &mut ReconcileCtx, ) { self.child.reconcile(&prev_value.child, widget_seq, ctx); }
function_block-full_function
[ { "content": "#[component]\n\nfn NoComponentOutput(ctx: &CompCtx, _props: ()) -> impl Element<Event = NoEvent> {\n\n let md = ctx.use_metadata::<NoEvent, i32>();\n\n let _local_state = ctx.get_local_state(md);\n\n EmptyElement::new()\n\n}\n\n\n", "file_path": "tests/metadata.rs", "rank": 0, ...
Rust
src/bin/s7c53_expandable_messages.rs
dkull/cryptopals
8d6b723b2c7f7905755804631a2bdda4c6b2005e
extern crate cryptopals; use aes::block_cipher_trait::generic_array::GenericArray; use aes::block_cipher_trait::BlockCipher; use aes::Aes128; use cryptopals::bytes_to_hex; use std::collections::{HashMap, HashSet}; use std::convert::TryInto; const BS: usize = 16; const HS: usize = 4; fn md(m: &[u8], h: &[u8; HS]) -> Vec<u8> { /* my Merkle-Damgard uses h as key, m as message, should be flipped */ let mut padded_h = [0u8; BS]; padded_h[..HS].copy_from_slice(h); for block in m.chunks(HS) { let cipher = Aes128::new(GenericArray::from_slice(&padded_h)); let mut container = [0u8; BS]; container[..HS].copy_from_slice(&block); let mut buffer = GenericArray::clone_from_slice(&container); cipher.encrypt_block(&mut buffer); padded_h[..HS].copy_from_slice(&buffer[..HS]); } padded_h[..HS].to_vec() } fn find_collision_pair(long_len: usize, h: &[u8; HS]) -> (Vec<u8>, Vec<u8>, Vec<u8>) { let dummy_block = [0u8; HS]; let mut prefix_h = [0u8; HS]; prefix_h.copy_from_slice(h); let mut prefix: Vec<u8> = Vec::with_capacity((long_len + 1) * HS); for _ in 0..long_len - 1 { let long_hash = &md(&dummy_block, &prefix_h); prefix_h.copy_from_slice(long_hash); prefix.extend(&dummy_block); } let mut long_hashes: HashMap<Vec<u8>, Vec<u8>> = HashMap::with_capacity(2usize.pow(20)); let mut short_hashes: HashMap<Vec<u8>, Vec<u8>> = HashMap::with_capacity(2usize.pow(20)); for block in 0..u32::MAX { let block = block.to_be_bytes(); let short_hash = md(&block, &h); let long_hash = md(&block, &prefix_h); if let Some(coll_long) = long_hashes.get(&short_hash) { prefix.extend(coll_long); return (short_hash.to_vec(), block.to_vec(), prefix.to_vec()); } if let Some(coll_short) = short_hashes.get(&long_hash) { prefix.extend(&block); return (long_hash.to_vec(), coll_short.to_vec(), prefix.to_vec()); } long_hashes.insert(long_hash.to_vec(), block.to_vec()); short_hashes.insert(short_hash.to_vec(), block.to_vec()); } unreachable!(); } fn build_expandable_message(k: usize, h: &[u8; HS]) -> Vec<(Vec<u8>, Vec<u8>, Vec<u8>)> { let mut h_tmp = [0u8; HS]; h_tmp.copy_from_slice(h); let mut output = Vec::with_capacity(k); for i in 0..k { let i = k - i - 1; let long_len = 2usize.pow(i.try_into().unwrap()) + 1; let (hash, short, long) = find_collision_pair(long_len, &h_tmp); println!( "collision: in_hash: {} ({}, <long>[blocks:{}]) out_hash: {}", bytes_to_hex(&h_tmp), bytes_to_hex(&short), long_len, bytes_to_hex(&hash) ); h_tmp.copy_from_slice(&hash); output.push((short, long, hash)); } output } fn produce_message(expandable_msg: &[(Vec<u8>, Vec<u8>, Vec<u8>)], k: usize, L: usize) -> Vec<u8> { assert!(L >= k); let mut added_blocks = 0; let mut msg = vec![]; let t = L - k; for i in 0..k { let long_len = expandable_msg[i].1.len() / HS; println!( "k: {} i: {} need: {} added: {} -- new long {}", k, i, L, added_blocks, long_len ); let mask = 0x01 << (k - i - 1); if t & mask >= 1 { msg.extend(expandable_msg[i].1.clone()); added_blocks += long_len; } else { msg.extend(expandable_msg[i].0.clone()); added_blocks += 1; } } assert_eq!(added_blocks, L); assert_eq!(msg.len() / HS, L); msg } fn main() { eprintln!("(s7c53)"); let zero_block = [0u8; HS]; let k = 25; let exp_msg = build_expandable_message(k, &zero_block); let mut lookup = HashMap::new(); let mut last_hash = [0u8; HS]; for (short, long, hash) in &exp_msg { last_hash.copy_from_slice(hash); lookup.insert(hash, (short.clone(), long.clone())); } println!( "expandable msg has {} pairs with final hash {}", exp_msg.len(), bytes_to_hex(&last_hash) ); let target_m = cryptopals::random_key(2usize.pow(k.try_into().unwrap()) * (HS as usize)); let original_hash = md(&target_m, &zero_block); println!( "(generated random target m len: {} hash: {})", target_m.len() / HS, bytes_to_hex(&original_hash) ); println!("storing intermediate hashes of M..."); let mut init_h = [0u8; HS]; let mut intermediate_hashes = HashMap::with_capacity(target_m.len() / HS); for (i, block) in target_m.chunks(HS).enumerate() { intermediate_hashes.insert(init_h.to_vec(), i); let hash = md(&block, &init_h); init_h.copy_from_slice(&hash); } println!("searching for linking block..."); let mut injection_at: Option<usize> = None; let mut bridge: Option<Vec<u8>> = None; let mut m_tail: Option<Vec<u8>> = None; for block in 0..u32::MAX { let block = block.to_be_bytes(); let hash = md(&block, &last_hash); if let Some(first_usable) = intermediate_hashes.get(&hash) { println!( "found: last_hash {} + block {} => hash {}, which matches input hash ({}) of a block at M[{}]", bytes_to_hex(&last_hash), bytes_to_hex(&block), bytes_to_hex(&hash), bytes_to_hex(&hash), *first_usable, ); injection_at = Some(*first_usable); bridge = Some(block.to_vec()); m_tail = Some(target_m[(first_usable * HS)..].to_vec()); break; } } let injection_at = injection_at.expect("did not find injection point"); let bridge = bridge.expect("did not find injection point"); let m_tail = m_tail.expect("did not find injection point"); println!("first usable block found at {}", injection_at); let prefix = produce_message(&exp_msg, k, injection_at - 1); println!( "prefix last block: {}", bytes_to_hex(&prefix[prefix.len() - HS..]) ); let final_fake_m = vec![prefix, bridge, m_tail].concat(); let fake_msg_hash = md(&final_fake_m, &zero_block); println!( "final len: {} diff: {} fake hash: {}", final_fake_m.len() / HS, (final_fake_m.len() / HS) as isize - (target_m.len() / HS) as isize, bytes_to_hex(&fake_msg_hash) ); assert_eq!(fake_msg_hash, original_hash); println!("[ injection place ]"); let mut h = [0u8; HS]; h.copy_from_slice(&zero_block); for (i, block) in final_fake_m.chunks(HS).enumerate() { let hash = md(&block, &h); if (injection_at - 5 < i) && (i < injection_at + 5) { println!( "i: {} h: {} block: {} new_h: {}", i, bytes_to_hex(&h), bytes_to_hex(&block), bytes_to_hex(&hash) ); } h.copy_from_slice(&hash); } }
extern crate cryptopals; use aes::block_cipher_trait::generic_array::GenericArray; use aes::block_cipher_trait::BlockCipher; use aes::Aes128; use cryptopals::bytes_to_hex; use std::collections::{HashMap, HashSet}; use std::convert::TryInto; const BS: usize = 16; const HS: usize = 4; fn md(m: &[u8], h: &[u8; HS]) -> Vec<u8> { /* my Merkle-Damgard uses h as key, m as message, should be flipped */ let mut padded_h = [0u8; BS]; padded_h[..HS].copy_from_slice(h); for block in m.chunks(HS) { let cipher = Aes128::new(GenericArray::from_slice(&padded_h)); let mut container = [0u8; BS]; container[..HS].copy_from_slice(&block); let mut buffer = GenericArray::clone_from_slice(&container); cipher.encrypt_block(&mut buffer); padded_h[..HS].copy_from_slice(&buffer[..HS]); } padded_h[..HS].to_vec() } fn find_collision_pair(long_len: usize, h: &[u8; HS]) -> (Vec<u8>, Vec<u8>, Vec<u8>) { let dummy_block = [0u8; HS]; let mut prefix_h = [0u8; HS]; prefix_h.copy_from_slice(h); let mut prefix: Vec<u8> = Vec::with_capacity((long_len + 1) * HS); for _ in 0..long_len - 1 { let long_hash = &md(&dummy_block, &prefix_h); prefix_h.copy_from_slice(long_hash); prefix.extend(&dummy_block); } let mut long_hashes: HashMap<Vec<u8>, Vec<u8>> = HashMap::with_capacity(2usize.pow(20)); let mut short_hashes: HashMap<Vec<u8>, Vec<u8>> = HashMap::with_capacity(2usize.pow(20)); for block in 0..u32::MAX { let block = block.to_be_bytes(); let short_hash = md(&block, &h); let long_hash = md(&block, &prefix_h); if let Some(coll_long) = long_hashes.get(&short_hash) { prefix.extend(coll_long); return (short_hash.to_vec(), block.to_vec(), prefix.to_vec()); } if let Some(coll_short) = short_hashes.get(&long_hash) { prefix.extend(&block); return (long_hash.to_vec(), coll_short.to_vec(), prefix.to_vec()); } long_hashes.insert(long_hash.to_vec(), block.to_vec()); short_hashes.insert(short_hash.to_vec(), block.to_vec()); } unreachable!(); } fn build_expandable_message(k: usize, h: &[u8; HS]) -> Vec<(Vec<u8>, Vec<u8>, Vec<u8>)> { let mut h_tmp = [0u8; HS]; h_tmp.copy_from_slice(h); let mut output = Vec::with_capacity(k); for i in 0..k { let i = k - i - 1; let long_len = 2usize.pow(i.try_into().unwrap()) + 1; let (hash, short, long) = find_collision_pair(long_len, &h_tmp); println!( "collision: in_hash: {} ({}, <long>[blocks:{}]) out_hash: {}", bytes_to_hex(&h_tmp), bytes_to_hex(&short), long_len, bytes_to_hex(&hash) ); h_tmp.copy_from_slice(&hash); output.push((short, long, hash)); } output } fn produce_message(expandable_msg: &[(Vec<u8>, Vec<u8>, Vec<u8>)], k: usize, L: usize) -> Vec<u8> { assert
let prefix = produce_message(&exp_msg, k, injection_at - 1); println!( "prefix last block: {}", bytes_to_hex(&prefix[prefix.len() - HS..]) ); let final_fake_m = vec![prefix, bridge, m_tail].concat(); let fake_msg_hash = md(&final_fake_m, &zero_block); println!( "final len: {} diff: {} fake hash: {}", final_fake_m.len() / HS, (final_fake_m.len() / HS) as isize - (target_m.len() / HS) as isize, bytes_to_hex(&fake_msg_hash) ); assert_eq!(fake_msg_hash, original_hash); println!("[ injection place ]"); let mut h = [0u8; HS]; h.copy_from_slice(&zero_block); for (i, block) in final_fake_m.chunks(HS).enumerate() { let hash = md(&block, &h); if (injection_at - 5 < i) && (i < injection_at + 5) { println!( "i: {} h: {} block: {} new_h: {}", i, bytes_to_hex(&h), bytes_to_hex(&block), bytes_to_hex(&hash) ); } h.copy_from_slice(&hash); } }
!(L >= k); let mut added_blocks = 0; let mut msg = vec![]; let t = L - k; for i in 0..k { let long_len = expandable_msg[i].1.len() / HS; println!( "k: {} i: {} need: {} added: {} -- new long {}", k, i, L, added_blocks, long_len ); let mask = 0x01 << (k - i - 1); if t & mask >= 1 { msg.extend(expandable_msg[i].1.clone()); added_blocks += long_len; } else { msg.extend(expandable_msg[i].0.clone()); added_blocks += 1; } } assert_eq!(added_blocks, L); assert_eq!(msg.len() / HS, L); msg } fn main() { eprintln!("(s7c53)"); let zero_block = [0u8; HS]; let k = 25; let exp_msg = build_expandable_message(k, &zero_block); let mut lookup = HashMap::new(); let mut last_hash = [0u8; HS]; for (short, long, hash) in &exp_msg { last_hash.copy_from_slice(hash); lookup.insert(hash, (short.clone(), long.clone())); } println!( "expandable msg has {} pairs with final hash {}", exp_msg.len(), bytes_to_hex(&last_hash) ); let target_m = cryptopals::random_key(2usize.pow(k.try_into().unwrap()) * (HS as usize)); let original_hash = md(&target_m, &zero_block); println!( "(generated random target m len: {} hash: {})", target_m.len() / HS, bytes_to_hex(&original_hash) ); println!("storing intermediate hashes of M..."); let mut init_h = [0u8; HS]; let mut intermediate_hashes = HashMap::with_capacity(target_m.len() / HS); for (i, block) in target_m.chunks(HS).enumerate() { intermediate_hashes.insert(init_h.to_vec(), i); let hash = md(&block, &init_h); init_h.copy_from_slice(&hash); } println!("searching for linking block..."); let mut injection_at: Option<usize> = None; let mut bridge: Option<Vec<u8>> = None; let mut m_tail: Option<Vec<u8>> = None; for block in 0..u32::MAX { let block = block.to_be_bytes(); let hash = md(&block, &last_hash); if let Some(first_usable) = intermediate_hashes.get(&hash) { println!( "found: last_hash {} + block {} => hash {}, which matches input hash ({}) of a block at M[{}]", bytes_to_hex(&last_hash), bytes_to_hex(&block), bytes_to_hex(&hash), bytes_to_hex(&hash), *first_usable, ); injection_at = Some(*first_usable); bridge = Some(block.to_vec()); m_tail = Some(target_m[(first_usable * HS)..].to_vec()); break; } } let injection_at = injection_at.expect("did not find injection point"); let bridge = bridge.expect("did not find injection point"); let m_tail = m_tail.expect("did not find injection point"); println!("first usable block found at {}", injection_at);
random
[ { "content": "// s2c9\n\npub fn pkcs7_padding(data: &mut Vec<u8>, block_size: usize) {\n\n let missing_bytes = block_size - (data.len() % block_size);\n\n (0..missing_bytes).for_each(|_| data.push(missing_bytes as u8));\n\n}\n", "file_path": "src/block_ciphers.rs", "rank": 0, "score": 150182.5...
Rust
src/command/mod.rs
Shuenhoy/gluon_language-server
d7322580afcd55f1c124178d126aea83704a3bf7
use std::fmt; use crate::completion::{CompletionSymbol, CompletionSymbolContent}; use crate::either; use gluon::{ self, base::{ ast::{Expr, SpannedExpr}, filename_to_module, kind::ArcKind, pos::{BytePos, Spanned}, symbol::Symbol, types::{ArcType, BuiltinType, Type}, }, import::Import, RootedThread, Thread, ThreadExt, }; use { codespan_lsp::{byte_span_to_range, position_to_byte_index}, futures::prelude::*, jsonrpc_core::IoHandler, languageserver_types::{ CompletionItemKind, DocumentSymbol, Documentation, Location, MarkupContent, MarkupKind, Position, SymbolInformation, SymbolKind, }, url::Url, }; use crate::{ check_importer::{CheckImporter, Module}, name::strip_file_prefix_with_thread, rpc::ServerError, server::Handler, }; pub mod completion; pub mod definition; pub mod document_highlight; pub mod document_symbols; pub mod formatting; pub mod hover; pub mod initialize; pub mod signature_help; pub mod symbol; fn type_to_completion_item_kind(typ: &ArcType) -> CompletionItemKind { match **typ { _ if typ.as_function().is_some() => CompletionItemKind::Function, Type::Alias(ref alias) => type_to_completion_item_kind(alias.unresolved_type()), Type::App(ref f, _) => type_to_completion_item_kind(f), Type::Variant(_) => CompletionItemKind::Enum, Type::Record(_) => CompletionItemKind::Module, _ => CompletionItemKind::Variable, } } fn ident_to_completion_item_kind( id: &str, typ_or_kind: either::Either<&ArcKind, &ArcType>, ) -> CompletionItemKind { match typ_or_kind { either::Either::Left(_) => CompletionItemKind::Class, either::Either::Right(typ) => { if id.starts_with(char::is_uppercase) { CompletionItemKind::Constructor } else { type_to_completion_item_kind(typ) } } } } fn make_documentation<T>(typ: Option<T>, comment: &str) -> Documentation where T: fmt::Display, { use std::fmt::Write; let mut value = String::new(); if let Some(typ) = typ { write!(value, "```gluon\n{}\n```\n", typ).unwrap(); } value.push_str(comment); Documentation::MarkupContent(MarkupContent { kind: MarkupKind::Markdown, value, }) } fn completion_symbol_kind(symbol: &CompletionSymbol<'_, '_>) -> SymbolKind { match symbol.content { CompletionSymbolContent::Type { .. } => SymbolKind::Class, CompletionSymbolContent::Value { typ, expr } => expr_to_kind(expr, typ), } } fn completion_symbol_to_document_symbol( source: &codespan::FileMap, symbol: &Spanned<CompletionSymbol<'_, '_>, BytePos>, ) -> Result<DocumentSymbol, ServerError<()>> { let kind = completion_symbol_kind(&symbol.value); let range = byte_span_to_range(source, symbol.span)?; Ok(DocumentSymbol { kind, range, selection_range: range, name: symbol.value.name.declared_name().to_string(), detail: Some(match &symbol.value.content { CompletionSymbolContent::Type { alias } => alias.unresolved_type().to_string(), CompletionSymbolContent::Value { typ, .. } => typ.to_string(), }), deprecated: Default::default(), children: if symbol.value.children.is_empty() { None } else { Some( symbol .value .children .iter() .map(|child| completion_symbol_to_document_symbol(source, child)) .collect::<Result<_, _>>()?, ) }, }) } fn completion_symbol_to_symbol_information( source: &codespan::FileMap, symbol: Spanned<CompletionSymbol<'_, '_>, BytePos>, uri: Url, ) -> Result<SymbolInformation, ServerError<()>> { let kind = completion_symbol_kind(&symbol.value); Ok(SymbolInformation { kind, location: Location { uri, range: byte_span_to_range(source, symbol.span)?, }, name: symbol.value.name.declared_name().to_string(), container_name: None, deprecated: Default::default(), }) } fn expr_to_kind(expr: &SpannedExpr<Symbol>, typ: &ArcType) -> SymbolKind { match expr.value { Expr::Ident(ref id) if id.name.declared_name().contains('.') => SymbolKind::Module, _ => type_to_kind(typ), } } fn type_to_kind(typ: &ArcType) -> SymbolKind { match **typ { _ if typ.as_function().is_some() => SymbolKind::Function, Type::Ident(ref id) if id.name.declared_name() == "Bool" => SymbolKind::Boolean, Type::Alias(ref alias) if alias.name.declared_name() == "Bool" => SymbolKind::Boolean, Type::Builtin(builtin) => match builtin { BuiltinType::Char | BuiltinType::String => SymbolKind::String, BuiltinType::Byte | BuiltinType::Int | BuiltinType::Float => SymbolKind::Number, BuiltinType::Array => SymbolKind::Array, BuiltinType::Function => SymbolKind::Function, }, _ => SymbolKind::Variable, } } async fn retrieve_expr<F, R>( thread: &Thread, text_document_uri: &Url, f: F, ) -> Result<R, ServerError<()>> where F: FnOnce(&Module) -> Result<R, ServerError<()>>, { let filename = strip_file_prefix_with_thread(&thread, &text_document_uri); let module = filename_to_module(&filename); let import = thread.get_macros().get("import").expect("Import macro"); let import = import .downcast_ref::<Import<CheckImporter>>() .expect("Check importer"); match import.importer.module(&thread, &module).await { Some(ref source_module) => return f(source_module), None => (), } Err(ServerError { message: { let m = import.importer.0.lock().await; format!( "Module `{}` is not defined\n{:?}", module, m.keys().collect::<Vec<_>>() ) }, data: None, }) } async fn retrieve_expr_with_pos<F, R>( thread: &Thread, text_document_uri: &Url, position: &Position, f: F, ) -> Result<R, ServerError<()>> where F: FnOnce(&Module, BytePos) -> Result<R, ServerError<()>>, { retrieve_expr(thread, text_document_uri, move |module| { let byte_index = position_to_byte_index(&module.source, position)?; f(module, byte_index) }) .await }
use std::fmt; use crate::completion::{CompletionSymbol, CompletionSymbolContent}; use crate::either; use gluon::{ self, base::{ ast::{Expr, SpannedExpr}, filename_to_module, kind::ArcKind, pos::{BytePos, Spanned}, symbol::Symbol, types::{ArcType, BuiltinType, Type}, }, import::Import, RootedThread, Thread, ThreadExt, }; use { codespan_lsp::{byte_span_to_range, position_to_byte_index}, futures::prelude::*, jsonrpc_core::IoHandler, languageserver_types::{ CompletionItemKind, DocumentSymbol, Documentation, Location, MarkupContent, MarkupKind, Position, SymbolInformation, SymbolKind, }, url::Url, }; use crate::{ check_importer::{CheckImporter, Module}, name::strip_file_prefix_with_thread, rpc::ServerError, server::Handler, }; pub mod completion; pub mod definition; pub mod document_highlight; pub mod document_symbols; pub mod formatting; pub mod hover; pub mod initialize; pub mod signature_help; pub mod symbol; fn type_to_completion_item_kind(typ: &ArcType) -> CompletionItemKind { match **typ { _ if typ.as_function().is_some() => CompletionItemKind::Function, Type::Alias(ref alias) => type_to_completion_item_kind(alias.unresolved_type()), Type::App(ref f, _) => type_to_completion_item_kind(f), Type::Variant(_) => CompletionItemKind::Enum, Type::Record(_) => CompletionItemKind::Module, _ => CompletionItemKind::Variable, } } fn ident_to_completion_item_kind( id: &str, typ_or_kind: either::Either<&ArcKind, &ArcType>, ) -> CompletionItemKind { match typ_or_kind { either::Either::Left(_) => CompletionItemKind::Class, either::Either::Right(typ) => { if id.starts_with(char::is_uppercase) { CompletionItemKind::Constructor } else { type_to_completion_item_kind(typ) } } } } fn make_documentation<T>(typ: Option<T>, comment: &str) -> Documentation where T: fmt::Display, { use std::fmt::Write; let mut value = String::new(); if let Some(typ) = typ { write!(value, "```gluon\n{}\n```\n", typ).unwrap(); } value.push_str(comment); Documentation::MarkupContent(MarkupContent { kind: MarkupKind::Markdown, value, }) } fn completion_symbol_kind(symbol: &CompletionSymbol<'_, '_>) -> SymbolKind { match symbol.content { CompletionSymbolContent::Type { .. } => SymbolKind::Class, CompletionSymbolContent::Value { typ, expr } => expr_to_kind(expr, typ), } } fn completion_symbol_to_document_symbol( source: &codespan::FileMap, symbol: &Spanned<CompletionSymbol<'_, '_>, BytePos>, ) -> Result<DocumentSymbol, ServerError<()>> { let kind = completion_symbol_kind(&symbol.value); let range = byte_span_to_range(source, symbol.span)?; Ok(DocumentSymbol { kind, range, selection_range: range, name: symbol.value.name.declared_name().to_string(), detail: Some(match &symbol.value.content { CompletionSymbolContent::Type { alias } => alias.unresolved_type().to_string(), CompletionSymbolContent::Value { typ, .. } => typ.to_string(), }), deprecated: Default::default(), children: if symbol.value.children.is_empty() { None } else { Some( symbol .value .children .iter() .map(|child| completion_symbol_to_document_symbol(source, child)) .collect::<Result<_, _>>()?, ) }, }) } fn completion_symbol_to_symbol_information( source: &codespan::FileMap, symbol: Spanned<CompletionSymbol<'_, '_>, BytePos>, uri: Url, ) -> Result<SymbolInformation, ServerError<()>> { let kind = completion_symbol_kind(&symbol.value); Ok(SymbolInformation { kind, location: Location { uri, range: byte_span_to_range(source, symbol.span)?, }, name: symbol.value.name.declared_name().to_string(), container_name: None, deprecated: Default::default(), }) } fn expr_to_kind(expr: &SpannedExpr<Symbol>, typ: &ArcType) -> SymbolKind { match expr.value { Expr::Ident(ref id) if id.name.declared_name().contains('.') => SymbolKind::Module, _ => type_to_kind(typ), } } fn type_to_kind(typ: &ArcType) -> SymbolKind { match **typ { _ if typ.as_function().is_some() => SymbolKind::Function, Type::Ident(ref id) if id.name.declared_name() == "Bool" => SymbolKind::Boolean, Type::Alias(ref alias) if alias.name.declared_name() == "Bool" => SymbolKind::Boolean, Type::Builtin(builtin) =>
, _ => SymbolKind::Variable, } } async fn retrieve_expr<F, R>( thread: &Thread, text_document_uri: &Url, f: F, ) -> Result<R, ServerError<()>> where F: FnOnce(&Module) -> Result<R, ServerError<()>>, { let filename = strip_file_prefix_with_thread(&thread, &text_document_uri); let module = filename_to_module(&filename); let import = thread.get_macros().get("import").expect("Import macro"); let import = import .downcast_ref::<Import<CheckImporter>>() .expect("Check importer"); match import.importer.module(&thread, &module).await { Some(ref source_module) => return f(source_module), None => (), } Err(ServerError { message: { let m = import.importer.0.lock().await; format!( "Module `{}` is not defined\n{:?}", module, m.keys().collect::<Vec<_>>() ) }, data: None, }) } async fn retrieve_expr_with_pos<F, R>( thread: &Thread, text_document_uri: &Url, position: &Position, f: F, ) -> Result<R, ServerError<()>> where F: FnOnce(&Module, BytePos) -> Result<R, ServerError<()>>, { retrieve_expr(thread, text_document_uri, move |module| { let byte_index = position_to_byte_index(&module.source, position)?; f(module, byte_index) }) .await }
match builtin { BuiltinType::Char | BuiltinType::String => SymbolKind::String, BuiltinType::Byte | BuiltinType::Int | BuiltinType::Float => SymbolKind::Number, BuiltinType::Array => SymbolKind::Array, BuiltinType::Function => SymbolKind::Function, }
if_condition
[ { "content": "pub fn test_url(uri: &str) -> Url {\n\n Url::from_file_path(&env::current_dir().unwrap().join(uri)).unwrap()\n\n}\n\n\n\npub async fn write_message<W, V>(writer: &mut W, value: V) -> io::Result<()>\n\nwhere\n\n W: ?Sized + AsyncWrite + Unpin,\n\n V: Serialize,\n\n{\n\n let mut vec = Ve...
Rust
src/filesystem.rs
minijackson/playfs
df28aae610d67d9eaec78df0e4f096bcb4394944
use fuse_mt::{self, DirectoryEntry, FileAttr, FileType, RequestInfo, ResultEmpty, ResultEntry, ResultOpen, ResultReaddir}; use rusqlite::{types::ToSql, Connection}; use time::Timespec; use libc::{c_int, ENOENT}; use std::ffi::OsString; use std::path::{self, Path}; use std::sync::Mutex; const DEFAULT_FILE_ATTR: FileAttr = FileAttr { size: 0, blocks: 0, atime: Timespec { sec: 1, nsec: 0 }, mtime: Timespec { sec: 1, nsec: 0 }, ctime: Timespec { sec: 1, nsec: 0 }, crtime: Timespec { sec: 1, nsec: 0 }, kind: FileType::RegularFile, perm: 0o755, nlink: 0, uid: 1000, gid: 1000, rdev: 0, flags: 0, }; pub struct Filesystem { conn: Mutex<Connection>, } pub enum PlayPath { Root, Artist(String), Album { artist: String, album: String, }, Song { artist: String, album: String, song: String, }, } fn component_to_string(component: &path::Component) -> String { String::from( match component { path::Component::Normal(comp) => comp, _ => panic!("Wut"), }.to_str() .unwrap(), ) } fn decompose_path(path: &Path) -> Result<PlayPath, ()> { let components = path.components().collect::<Vec<_>>(); match components.len() { 1 => Ok(PlayPath::Root), 2 => Ok(PlayPath::Artist(component_to_string(&components[1]))), 3 => Ok(PlayPath::Album { artist: component_to_string(&components[1]), album: component_to_string(&components[2]), }), 4 => Ok(PlayPath::Song { artist: component_to_string(&components[1]), album: component_to_string(&components[2]), song: component_to_string(&components[3]), }), _ => Err(()), } } impl Filesystem { pub fn new(db: &Path) -> Self { info!("Opening database: {}", db.display()); Filesystem { conn: Mutex::new(Connection::open(db).expect("Could not open database")), } } fn has_thing(&self, request: &'static str, params: &[&ToSql]) -> bool { let conn = self.conn.lock().unwrap(); let mut req = conn.prepare(request).unwrap(); req.query_row(params, |row| row.get(0): bool).unwrap() } fn has_artist(&self, artist: &str) -> bool { self.has_thing( "SELECT EXISTS(SELECT 1 FROM albums WHERE albumartist = ?);", &[&artist], ) } fn has_album(&self, artist: &str, album: &str) -> bool { self.has_thing( "SELECT EXISTS(SELECT 1 FROM albums WHERE albumartist = ? AND album = ?);", &[&artist, &album], ) } fn get_things( &self, request: &'static str, params: &[&ToSql], kind: FileType, ) -> Result<Vec<DirectoryEntry>, c_int> { let conn = self.conn.lock().unwrap(); let mut req = conn.prepare(request).unwrap(); let results = req.query_map(params, |row| DirectoryEntry { name: OsString::from((row.get(0): String).replace("/", "_")), kind, }).unwrap() .collect::<Result<Vec<_>, _>>() .map_err(|_| 0); results } fn get_artists(&self) -> Result<Vec<DirectoryEntry>, c_int> { self.get_things( "SELECT DISTINCT albumartist FROM albums", &[], FileType::Directory, ) } fn get_albums(&self, artist: &str) -> Result<Vec<DirectoryEntry>, c_int> { self.get_things( "SELECT DISTINCT album FROM albums WHERE albumartist = ?", &[&artist], FileType::Directory, ) } fn get_songs(&self, artist: &str, album: &str) -> Result<Vec<DirectoryEntry>, c_int> { self.get_things( "SELECT DISTINCT title FROM items WHERE albumartist = ? AND album = ?", &[&artist, &album], FileType::RegularFile, ) } } impl fuse_mt::FilesystemMT for Filesystem { fn init(&self, req: RequestInfo) -> ResultEmpty { info!("Init: {:?}", req); Ok(()) } fn opendir(&self, _req: RequestInfo, path: &Path, _flags: u32) -> ResultOpen { match decompose_path(path) { Err(_) => Err(ENOENT), Ok(PlayPath::Root) => Ok((0, 0)), Ok(PlayPath::Artist(artist)) => { if self.has_artist(&artist) { Ok((0, 0)) } else { Err(ENOENT) } } Ok(PlayPath::Album { artist, album }) => { if self.has_album(&artist, &album) { Ok((0, 0)) } else { Err(ENOENT) } } _ => Err(1), } } fn readdir(&self, _req: RequestInfo, path: &Path, _fh: u64) -> ResultReaddir { match decompose_path(path) { Err(_) => Err(ENOENT), Ok(PlayPath::Root) => self.get_artists(), Ok(PlayPath::Artist(artist)) => self.get_albums(&artist), Ok(PlayPath::Album { artist, album }) => self.get_songs(&artist, &album), _ => Err(1), } } fn getattr(&self, _req: RequestInfo, path: &Path, _fh: Option<u64>) -> ResultEntry { match decompose_path(path) { Err(_) => Err(ENOENT), Ok(PlayPath::Root) | Ok(PlayPath::Artist(_)) | Ok(PlayPath::Album { .. }) => Ok(( Timespec::new(1, 0), FileAttr { kind: FileType::Directory, ..DEFAULT_FILE_ATTR }, )), Ok(PlayPath::Song { .. }) => Ok(( Timespec::new(1, 0), DEFAULT_FILE_ATTR , )), } } }
use fuse_mt::{self, DirectoryEntry, FileAttr, FileType, RequestInfo, ResultEmpty, ResultEntry, ResultOpen, ResultReaddir}; use rusqlite::{types::ToSql, Connection}; use time::Timespec; use libc::{c_int, ENOENT}; use std::ffi::OsString; use std::path::{self, Path}; use std::sync::Mutex; const DEFAULT_FILE_ATTR: FileAttr = FileAttr { size: 0, blocks: 0, atime: Timespec { sec: 1, nsec: 0 }, mtime: Timespec { sec: 1, nsec: 0 }, ctime: Timespec { sec: 1, nsec: 0 }, crtime: Timespec { sec: 1, nsec: 0 }, kind: FileType::RegularFile, perm: 0o755, nlink: 0, uid: 1000, gid: 1000, rdev: 0, flags: 0, }; pub struct Filesystem { conn: Mutex<Connection>, } pub enum PlayPath { Root, Artist(String), Album { artist: String, album: String, }, Song { artist: String, album: String, song: String, }, } fn component_to_string(component: &path::Component) -> String { String::from( match component { path::Component::Normal(comp) => comp, _ => panic!("Wut"), }.to_str() .unwrap(), ) } fn decompose_path(path: &Path) -> Result<PlayPath, ()> { let components = path.components().collect::<Vec<_>>(); match components.len() { 1 => Ok(PlayPath::Root), 2 => Ok(PlayPath::Artist(component_to_string(&components[1]))), 3 => Ok(PlayPath::Album { artist: component_to_string(&components[1]), album: component_to_string(&components[2]), }), 4 =>
, _ => Err(()), } } impl Filesystem { pub fn new(db: &Path) -> Self { info!("Opening database: {}", db.display()); Filesystem { conn: Mutex::new(Connection::open(db).expect("Could not open database")), } } fn has_thing(&self, request: &'static str, params: &[&ToSql]) -> bool { let conn = self.conn.lock().unwrap(); let mut req = conn.prepare(request).unwrap(); req.query_row(params, |row| row.get(0): bool).unwrap() } fn has_artist(&self, artist: &str) -> bool { self.has_thing( "SELECT EXISTS(SELECT 1 FROM albums WHERE albumartist = ?);", &[&artist], ) } fn has_album(&self, artist: &str, album: &str) -> bool { self.has_thing( "SELECT EXISTS(SELECT 1 FROM albums WHERE albumartist = ? AND album = ?);", &[&artist, &album], ) } fn get_things( &self, request: &'static str, params: &[&ToSql], kind: FileType, ) -> Result<Vec<DirectoryEntry>, c_int> { let conn = self.conn.lock().unwrap(); let mut req = conn.prepare(request).unwrap(); let results = req.query_map(params, |row| DirectoryEntry { name: OsString::from((row.get(0): String).replace("/", "_")), kind, }).unwrap() .collect::<Result<Vec<_>, _>>() .map_err(|_| 0); results } fn get_artists(&self) -> Result<Vec<DirectoryEntry>, c_int> { self.get_things( "SELECT DISTINCT albumartist FROM albums", &[], FileType::Directory, ) } fn get_albums(&self, artist: &str) -> Result<Vec<DirectoryEntry>, c_int> { self.get_things( "SELECT DISTINCT album FROM albums WHERE albumartist = ?", &[&artist], FileType::Directory, ) } fn get_songs(&self, artist: &str, album: &str) -> Result<Vec<DirectoryEntry>, c_int> { self.get_things( "SELECT DISTINCT title FROM items WHERE albumartist = ? AND album = ?", &[&artist, &album], FileType::RegularFile, ) } } impl fuse_mt::FilesystemMT for Filesystem { fn init(&self, req: RequestInfo) -> ResultEmpty { info!("Init: {:?}", req); Ok(()) } fn opendir(&self, _req: RequestInfo, path: &Path, _flags: u32) -> ResultOpen { match decompose_path(path) { Err(_) => Err(ENOENT), Ok(PlayPath::Root) => Ok((0, 0)), Ok(PlayPath::Artist(artist)) => { if self.has_artist(&artist) { Ok((0, 0)) } else { Err(ENOENT) } } Ok(PlayPath::Album { artist, album }) => { if self.has_album(&artist, &album) { Ok((0, 0)) } else { Err(ENOENT) } } _ => Err(1), } } fn readdir(&self, _req: RequestInfo, path: &Path, _fh: u64) -> ResultReaddir { match decompose_path(path) { Err(_) => Err(ENOENT), Ok(PlayPath::Root) => self.get_artists(), Ok(PlayPath::Artist(artist)) => self.get_albums(&artist), Ok(PlayPath::Album { artist, album }) => self.get_songs(&artist, &album), _ => Err(1), } } fn getattr(&self, _req: RequestInfo, path: &Path, _fh: Option<u64>) -> ResultEntry { match decompose_path(path) { Err(_) => Err(ENOENT), Ok(PlayPath::Root) | Ok(PlayPath::Artist(_)) | Ok(PlayPath::Album { .. }) => Ok(( Timespec::new(1, 0), FileAttr { kind: FileType::Directory, ..DEFAULT_FILE_ATTR }, )), Ok(PlayPath::Song { .. }) => Ok(( Timespec::new(1, 0), DEFAULT_FILE_ATTR , )), } } }
Ok(PlayPath::Song { artist: component_to_string(&components[1]), album: component_to_string(&components[2]), song: component_to_string(&components[3]), })
call_expression
[ { "content": "fn main() {\n\n let matches = App::new(\"PlayFS\")\n\n .about(\"A fast Fuse Filesystem for Beets\")\n\n .version(\"0.1\")\n\n .author(\"Rémi NICOLE <minijackson@riseup.net>\")\n\n .arg(Arg::with_name(\"mountpoint\")\n\n .help(\"Where to mount the filesyst...
Rust
src/helpers.rs
nexxeln/license-generator
7fad4a3af9d25f8a51ac752c55672b782ac4b01e
use std::{process::Command, fs, io}; use dialoguer::{console::Style, theme::ColorfulTheme, Input, Select}; use license::LicenseContent; use crate::license; pub fn fill_content(license: &LicenseContent) { let name = get_name(); let year = get_current_year(); let body = license .body .replace("[year]", &year) .replace("[yyyy]", &year) .replace("[fullname]", &name) .replace("[name of copyright owner]", &name) .replace("<year>", &year) .replace("<name of author>", &name); match write_file("LICENSE", &body) { Ok(_) => println!( "{}", Style::new() .for_stderr() .green() .apply_to("✔ License created successfully\n Please take a look at it and make changes if necessary") ), Err(error) => println!( "{} {}", Style::new() .for_stderr() .red() .apply_to("✘ An error occured"), error ), }; } pub fn select(selections: &Vec<String>) -> String { let selection = Select::with_theme(&ColorfulTheme::default()) .with_prompt("Choose a license") .default(0) .items(&selections[..]) .interact() .unwrap(); selections[selection].clone() } fn get_git_username() -> Option<String> { let cmd = Command::new("git") .arg("config") .arg("--global") .arg("--get") .arg("user.name") .output() .expect("fail"); let res: Option<String> = match cmd.status.success() { true => Option::from(String::from_utf8_lossy(&cmd.stdout).to_string()), false => Option::from(None), }; res } fn get_name() -> String { let name: String = match get_git_username() { Some(mut name) => { if name.ends_with("\n") { name.pop(); if name.ends_with("\r") { name.pop(); } } let name: String = Input::with_theme(&ColorfulTheme::default()) .with_prompt("Enter your name") .default(name) .interact_text() .unwrap(); name } None => { let input: String = Input::with_theme(&ColorfulTheme::default()) .with_prompt("Name") .interact_text() .unwrap(); input } }; name } fn get_current_year() -> String { let year: String = Input::with_theme(&ColorfulTheme::default()) .with_prompt("Enter year") .default("2022".to_string()) .interact_text() .unwrap(); year } fn write_file(path: &str, content: &str) -> Result<(), io::Error> { let result = match !fs::metadata(path).is_ok() { false => { let path: String = Input::with_theme(&ColorfulTheme::default()) .with_prompt("LICENSE already exists, enter a new name else the content will be overridden!") .default(path.to_string()) .interact_text() .unwrap(); fs::write(path, content) } true => fs::write(path, content), }; result }
use std::{process::Command, fs, io}; use dialoguer::{console::Style, theme::ColorfulTheme, Input, Select}; use license::LicenseContent; use crate::license; pub fn fill_content(license: &LicenseContent) { let name = get_name(); let year = get_current_year(); let body =
ne() } fn get_git_username() -> Option<String> { let cmd = Command::new("git") .arg("config") .arg("--global") .arg("--get") .arg("user.name") .output() .expect("fail"); let res: Option<String> = match cmd.status.success() { true => Option::from(String::from_utf8_lossy(&cmd.stdout).to_string()), false => Option::from(None), }; res } fn get_name() -> String { let name: String = match get_git_username() { Some(mut name) => { if name.ends_with("\n") { name.pop(); if name.ends_with("\r") { name.pop(); } } let name: String = Input::with_theme(&ColorfulTheme::default()) .with_prompt("Enter your name") .default(name) .interact_text() .unwrap(); name } None => { let input: String = Input::with_theme(&ColorfulTheme::default()) .with_prompt("Name") .interact_text() .unwrap(); input } }; name } fn get_current_year() -> String { let year: String = Input::with_theme(&ColorfulTheme::default()) .with_prompt("Enter year") .default("2022".to_string()) .interact_text() .unwrap(); year } fn write_file(path: &str, content: &str) -> Result<(), io::Error> { let result = match !fs::metadata(path).is_ok() { false => { let path: String = Input::with_theme(&ColorfulTheme::default()) .with_prompt("LICENSE already exists, enter a new name else the content will be overridden!") .default(path.to_string()) .interact_text() .unwrap(); fs::write(path, content) } true => fs::write(path, content), }; result }
license .body .replace("[year]", &year) .replace("[yyyy]", &year) .replace("[fullname]", &name) .replace("[name of copyright owner]", &name) .replace("<year>", &year) .replace("<name of author>", &name); match write_file("LICENSE", &body) { Ok(_) => println!( "{}", Style::new() .for_stderr() .green() .apply_to("✔ License created successfully\n Please take a look at it and make changes if necessary") ), Err(error) => println!( "{} {}", Style::new() .for_stderr() .red() .apply_to("✘ An error occured"), error ), }; } pub fn select(selections: &Vec<String>) -> String { let selection = Select::with_theme(&ColorfulTheme::default()) .with_prompt("Choose a license") .default(0) .items(&selections[..]) .interact() .unwrap(); selections[selection].clo
random
[ { "content": "fn main() {\n\n // fetch licenses\n\n let licenses = license::Licenses::fetch_licenses();\n\n\n\n // select menu\n\n let license = helpers::select(&licenses.get_license_names());\n\n\n\n // get license content\n\n let license_content = &licenses.get_license_from_name(&license);\...
Rust
relayer/src/supervisor/spawn.rs
interchainio/ibc-rs
194a1714cf9dc807dd7b4e33726319293848dc58
use tracing::{error, info}; use ibc::core::{ ics02_client::client_state::{ClientState, IdentifiedAnyClientState}, ics03_connection::connection::IdentifiedConnectionEnd, ics04_channel::channel::State as ChannelState, ics24_host::identifier::ChainId, }; use crate::{ chain::{counterparty::connection_state_on_destination, handle::ChainHandle}, config::Config, object::{Channel, Client, Connection, Object, Packet, Wallet}, registry::Registry, supervisor::error::Error as SupervisorError, telemetry, worker::WorkerMap, }; use super::{ scan::{ChainScan, ChainsScan, ChannelScan, ClientScan, ConnectionScan}, Error, }; pub struct SpawnContext<'a, Chain: ChainHandle> { config: &'a Config, registry: &'a mut Registry<Chain>, workers: &'a mut WorkerMap, } impl<'a, Chain: ChainHandle> SpawnContext<'a, Chain> { pub fn new( config: &'a Config, registry: &'a mut Registry<Chain>, workers: &'a mut WorkerMap, ) -> Self { Self { config, registry, workers, } } pub fn spawn_workers(&mut self, scan: ChainsScan) { for chain_scan in scan.chains { match chain_scan { Ok(chain_scan) => self.spawn_workers_for_chain(chain_scan), Err(e) => error!("failed to spawn worker for a chain, reason: {}", e), } } } pub fn spawn_workers_for_chain(&mut self, scan: ChainScan) { let chain = match self.registry.get_or_spawn(&scan.chain_id) { Ok(chain_handle) => chain_handle, Err(e) => { error!( chain = %scan.chain_id, "skipping workers , reason: failed to spawn chain runtime with error: {}", e ); return; } }; for (_, client_scan) in scan.clients { self.spawn_workers_for_client(chain.clone(), client_scan); } telemetry!(self.spawn_wallet_worker(chain)); } pub fn spawn_wallet_worker(&mut self, chain: Chain) { let wallet_object = Object::Wallet(Wallet { chain_id: chain.id(), }); self.workers .spawn(chain.clone(), chain, &wallet_object, self.config) .then(|| { info!("spawning Wallet worker: {}", wallet_object.short_name()); }); } pub fn spawn_workers_for_client(&mut self, chain: Chain, client_scan: ClientScan) { for (_, connection_scan) in client_scan.connections { self.spawn_workers_for_connection(chain.clone(), &client_scan.client, connection_scan); } } pub fn spawn_workers_for_connection( &mut self, chain: Chain, client: &IdentifiedAnyClientState, connection_scan: ConnectionScan, ) { let connection_id = connection_scan.id().clone(); match self.spawn_connection_workers( chain.clone(), client.clone(), connection_scan.connection, ) { Ok(true) => info!( chain = %chain.id(), connection = %connection_id, "done spawning connection workers", ), Ok(false) => info!( chain = %chain.id(), connection = %connection_id, "no connection workers were spawn", ), Err(e) => error!( chain = %chain.id(), connection = %connection_id, "skipped connection workers, reason: {}", e ), } for (channel_id, channel_scan) in connection_scan.channels { match self.spawn_workers_for_channel(chain.clone(), client, channel_scan) { Ok(true) => info!( chain = %chain.id(), channel = %channel_id, "done spawning channel workers", ), Ok(false) => info!( chain = %chain.id(), channel = %channel_id, "no channel workers were spawned", ), Err(e) => error!( chain = %chain.id(), channel = %channel_id, "skipped channel workers, reason: {}", e ), } } } fn spawn_connection_workers( &mut self, chain: Chain, client: IdentifiedAnyClientState, connection: IdentifiedConnectionEnd, ) -> Result<bool, Error> { let config_conn_enabled = self.config.mode.connections.enabled; let counterparty_chain = self .registry .get_or_spawn(&client.client_state.chain_id()) .map_err(Error::spawn)?; let conn_state_src = connection.connection_end.state; let conn_state_dst = connection_state_on_destination(&connection, &counterparty_chain)?; info!( chain = %chain.id(), connection = %connection.connection_id, counterparty_chain = %counterparty_chain.id(), "connection is {:?}, state on destination chain is {:?}", conn_state_src, conn_state_dst ); if conn_state_src.is_open() && conn_state_dst.is_open() { info!( chain = %chain.id(), connection = %connection.connection_id, "connection is already open, not spawning Connection worker", ); Ok(false) } else if config_conn_enabled && !conn_state_dst.is_open() && conn_state_dst.less_or_equal_progress(conn_state_src) { let connection_object = Object::Connection(Connection { dst_chain_id: client.client_state.chain_id(), src_chain_id: chain.id(), src_connection_id: connection.connection_id, }); self.workers .spawn(chain, counterparty_chain, &connection_object, self.config) .then(|| { info!( "spawning Connection worker: {}", connection_object.short_name() ); }); Ok(true) } else { Ok(false) } } pub fn spawn_workers_for_channel( &mut self, chain: Chain, client: &IdentifiedAnyClientState, channel_scan: ChannelScan, ) -> Result<bool, Error> { let mode = &self.config.mode; let counterparty_chain = self .registry .get_or_spawn(&client.client_state.chain_id()) .map_err(SupervisorError::spawn)?; let chan_state_src = channel_scan.channel.channel_end.state; let chan_state_dst = channel_scan .counterparty .as_ref() .map_or(ChannelState::Uninitialized, |c| c.channel_end.state); info!( chain = %chain.id(), counterparty_chain = %counterparty_chain.id(), channel = %channel_scan.id(), "channel is {}, state on destination chain is {}", chan_state_src, chan_state_dst ); if (mode.clients.enabled || mode.packets.enabled) && chan_state_src.is_open() && chan_state_dst.is_open() { if mode.clients.enabled { let client_object = Object::Client(Client { dst_client_id: client.client_id.clone(), dst_chain_id: chain.id(), src_chain_id: client.client_state.chain_id(), }); self.workers .spawn( counterparty_chain.clone(), chain.clone(), &client_object, self.config, ) .then(|| info!("spawned client worker: {}", client_object.short_name())); } if mode.packets.enabled { let has_packets = || { !channel_scan .unreceived_packets_on_counterparty(&chain, &counterparty_chain) .unwrap_or_default() .is_empty() }; let has_acks = || { !channel_scan .unreceived_acknowledgements_on_counterparty(&chain, &counterparty_chain) .unwrap_or_default() .is_empty() }; if has_packets() || has_acks() { let path_object = Object::Packet(Packet { dst_chain_id: counterparty_chain.id(), src_chain_id: chain.id(), src_channel_id: *channel_scan.id(), src_port_id: channel_scan.channel.port_id.clone(), }); self.workers .spawn( chain.clone(), counterparty_chain.clone(), &path_object, self.config, ) .then(|| info!("spawned packet worker: {}", path_object.short_name())); } } Ok(mode.clients.enabled) } else if mode.channels.enabled && !chan_state_dst.is_open() && chan_state_dst.less_or_equal_progress(chan_state_src) { let channel_object = Object::Channel(Channel { dst_chain_id: counterparty_chain.id(), src_chain_id: chain.id(), src_channel_id: *channel_scan.id(), src_port_id: channel_scan.channel.port_id, }); self.workers .spawn(chain, counterparty_chain, &channel_object, self.config) .then(|| info!("spawned channel worker: {}", channel_object.short_name())); Ok(true) } else { Ok(false) } } pub fn shutdown_workers_for_chain(&mut self, chain_id: &ChainId) { let affected_workers = self.workers.objects_for_chain(chain_id); for object in affected_workers { self.workers.shutdown_worker(&object); } } }
use tracing::{error, info}; use ibc::core::{ ics02_client::client_state::{ClientState, IdentifiedAnyClientState}, ics03_connection::connection::IdentifiedConnectionEnd, ics04_channel::channel::State as ChannelState, ics24_host::identifier::ChainId, }; use crate::{ chain::{counterparty::connection_state_on_destination, handle::ChainHandle}, config::Config, object::{Channel, Client, Connection, Object, Packet, Wallet}, registry::Registry, supervisor::error::Error as SupervisorError, telemetry, worker::WorkerMap, }; use super::{ scan::{ChainScan, ChainsScan, ChannelScan, ClientScan, ConnectionScan}, Error, }; pub struct SpawnContext<'a, Chain: ChainHandle> { config: &'a Config, registry: &'a mut Registry<Chain>, workers: &'a mut WorkerMap, } impl<'a, Chain: ChainHandle> SpawnContext<'a, Chain> { pub fn new( config: &'a Config, registry: &'a mut Registry<Chain>, workers: &'a mut WorkerMap, ) -> Self { Self { config, registry, workers, } } pub fn spawn_workers(&mut self, scan: ChainsScan) { for chain_scan in scan.chains { match chain_scan { Ok(chain_scan) => self.spawn_workers_for_chain(chain_scan), Err(e) => error!("failed to spawn worker for a chain, reason: {}", e), } } } pub fn spawn_workers_for_chain(&mut self, scan: ChainScan) { let chain = match self.registry.get_or_spawn(&scan.chain_id) { Ok(chain_handle) => chain_handle, Err(e) => { error!( chain = %scan.chain_id, "skipping workers , reason: failed to spawn chain runtime with error: {}", e ); return; } }; for (_, client_scan) in scan.clients { self.spawn_workers_for_client(chain.clone(), client_scan); } telemetry!(self.spawn_wallet_worker(chain)); } pub fn spawn_wallet_worker(&mut self, chain: Chain) { let wallet_object = Object::Wallet(Wallet { chain_id: chain.id(), }); self.workers .spawn(chain.clone(), chain, &wallet_object, self.config) .then(|| { info!("spawning Wallet worker: {}", wallet_object.short_name()); }); } pub fn spawn_workers_for_client(&mut self, chain: Chain, client_scan: ClientScan) { for (_, connection_scan) in client_scan.connections { self.spawn_workers_for_connection(chain.clone(), &client_scan.client, connection_scan); } } pub fn spawn_workers_for_connection( &mut self, chain: Chain, client: &IdentifiedAnyClientState, connection_scan: ConnectionScan, ) { let connection_id = connection_scan.id().clone(); match self.spawn_connection_workers( chain.clone(), client.clone(), connection_scan.connection, ) { Ok(true) => info!( chain = %chain.id(), connection = %connection_id, "done spawning connection workers", ), Ok(false) => info!( chain = %chain.id(), connection = %connection_id, "no connection workers were spawn", ), Err(e) => error!( chain = %chain.id(), connection = %connection_id, "skipped connection workers, reason: {}", e ), } for (channel_id, channel_scan) in connection_scan.channels { match self.spawn_workers_for_channel(chain.clone(), client, channel_scan) { Ok(true) => info!( chain = %chain.id(), channel = %channel_id, "done spawning channel workers", ), Ok(false) => info!( chain = %chain.id(), channel = %channel_id, "no channel workers were spawned", ), Err(e) => error!( chain = %chain.id(), channel = %channel_id, "skipped channel workers, reason: {}", e ), } } } fn spawn_connection_workers( &mut self, chain: Chain, client: IdentifiedAnyClientState, connection: IdentifiedConnectionEnd, ) -> Result<bool, Error> { let config_conn_enabled = self.config.mode.connections.enabled; let counterparty_chain = self .registry .get_or_spawn(&client.client_state.chain_id()) .map_err(Error::spawn)?; let conn_state_src = connection.connection_end.state; let conn_state_dst = connection_state_on_destination(&connection, &counterparty_chain)?; info!( chain = %chain.id(), connection = %connection.connection_id, counterparty_chain = %counterparty_chain.id(), "connection is {:?}, state on destination chain is {:?}", conn_state_src, conn_state_dst ); if conn_state_src.is_open() && conn_state_dst.is_open() { info!( chain = %chain.id(), connection = %connection.connection_id, "connection is already open, not spawning Connection worker", ); Ok(false) } else if config_conn_enabled && !conn_state_dst.is_open() && conn_state_dst.less_or_equal_progress(conn_state_src) {
self.workers .spawn(chain, counterparty_chain, &connection_object, self.config) .then(|| { info!( "spawning Connection worker: {}", connection_object.short_name() ); }); Ok(true) } else { Ok(false) } } pub fn spawn_workers_for_channel( &mut self, chain: Chain, client: &IdentifiedAnyClientState, channel_scan: ChannelScan, ) -> Result<bool, Error> { let mode = &self.config.mode; let counterparty_chain = self .registry .get_or_spawn(&client.client_state.chain_id()) .map_err(SupervisorError::spawn)?; let chan_state_src = channel_scan.channel.channel_end.state; let chan_state_dst = channel_scan .counterparty .as_ref() .map_or(ChannelState::Uninitialized, |c| c.channel_end.state); info!( chain = %chain.id(), counterparty_chain = %counterparty_chain.id(), channel = %channel_scan.id(), "channel is {}, state on destination chain is {}", chan_state_src, chan_state_dst ); if (mode.clients.enabled || mode.packets.enabled) && chan_state_src.is_open() && chan_state_dst.is_open() { if mode.clients.enabled { let client_object = Object::Client(Client { dst_client_id: client.client_id.clone(), dst_chain_id: chain.id(), src_chain_id: client.client_state.chain_id(), }); self.workers .spawn( counterparty_chain.clone(), chain.clone(), &client_object, self.config, ) .then(|| info!("spawned client worker: {}", client_object.short_name())); } if mode.packets.enabled { let has_packets = || { !channel_scan .unreceived_packets_on_counterparty(&chain, &counterparty_chain) .unwrap_or_default() .is_empty() }; let has_acks = || { !channel_scan .unreceived_acknowledgements_on_counterparty(&chain, &counterparty_chain) .unwrap_or_default() .is_empty() }; if has_packets() || has_acks() { let path_object = Object::Packet(Packet { dst_chain_id: counterparty_chain.id(), src_chain_id: chain.id(), src_channel_id: *channel_scan.id(), src_port_id: channel_scan.channel.port_id.clone(), }); self.workers .spawn( chain.clone(), counterparty_chain.clone(), &path_object, self.config, ) .then(|| info!("spawned packet worker: {}", path_object.short_name())); } } Ok(mode.clients.enabled) } else if mode.channels.enabled && !chan_state_dst.is_open() && chan_state_dst.less_or_equal_progress(chan_state_src) { let channel_object = Object::Channel(Channel { dst_chain_id: counterparty_chain.id(), src_chain_id: chain.id(), src_channel_id: *channel_scan.id(), src_port_id: channel_scan.channel.port_id, }); self.workers .spawn(chain, counterparty_chain, &channel_object, self.config) .then(|| info!("spawned channel worker: {}", channel_object.short_name())); Ok(true) } else { Ok(false) } } pub fn shutdown_workers_for_chain(&mut self, chain_id: &ChainId) { let affected_workers = self.workers.objects_for_chain(chain_id); for object in affected_workers { self.workers.shutdown_worker(&object); } } }
let connection_object = Object::Connection(Connection { dst_chain_id: client.client_state.chain_id(), src_chain_id: chain.id(), src_connection_id: connection.connection_id, });
assignment_statement
[]
Rust
src/level.rs
djeedai/libracity
1060f0c0ebefe8b58571f61c56857e348f46e1cb
use bevy::{app::CoreStage, asset::AssetStage, prelude::*}; use crate::{ inventory::{Inventory, Slot}, serialize::{Buildables, Levels}, AppState, Cursor, Grid, RegenerateInventoryUiEvent, ResetPlateEvent, }; #[derive(Debug, Clone, PartialEq, Eq)] pub enum LoadLevel { Next, ByName(String), ByIndex(usize), } #[derive(Debug)] pub struct LoadLevelEvent(pub LoadLevel); #[derive(Debug, Component)] pub struct LevelNameText; #[derive(Debug)] pub struct Level { index: usize, name: String, } impl Level { pub fn new() -> Self { Level { index: 0, name: String::new(), } } pub fn index(&self) -> usize { self.index } pub fn name(&self) -> &str { &self.name } } fn change_level_system( mut level: ResMut<Level>, mut inventory: ResMut<Inventory>, levels: Res<Levels>, buildables: Res<Buildables>, grid: Res<Grid>, mut ev_load_level: EventReader<LoadLevelEvent>, mut query_level_name_text: Query<&mut Text, With<LevelNameText>>, mut query_cursor: Query<(&Cursor, &mut Visibility, &mut Transform)>, mut state: ResMut<State<AppState>>, mut ev_regen_ui: EventWriter<RegenerateInventoryUiEvent>, mut ev_reset_plate: EventWriter<ResetPlateEvent>, ) { if let Some(load_level_event) = ev_load_level.iter().last() { let (level_index, level_desc) = match &load_level_event.0 { LoadLevel::Next => { info!("Load level: Next"); let next_level_index = level.index() + 1; let levels = levels.levels(); if next_level_index < levels.len() { let level_desc = &levels[next_level_index]; info!("=> Next level: #{} '{}'", next_level_index, level_desc.name); (next_level_index, level_desc) } else { info!("=== THE END ==="); state.set(AppState::TheEnd).unwrap(); return; } } LoadLevel::ByName(level_name) => { info!("Load level: {}", level_name); if let Some((level_index, level_desc)) = levels .levels() .iter() .enumerate() .find(|(_, l)| l.name == *level_name) { info!("=> Level '{}': #{}", level_name, level_index); (level_index, level_desc) } else { error!( "Failed to handle LoadLevelEvent: Cannot find level '{}'.", level_name ); return; } } LoadLevel::ByIndex(level_index) => { info!("Load level: #{}", level_index); let level_index = *level_index; if level_index < levels.levels().len() { let level_desc = &levels.levels()[level_index]; info!("=> Level #{}: '{}'", level_index, level_desc.name); (level_index, level_desc) } else { error!( "Failed to handle LoadLevelEvent: Cannot find level #{}.", level_index ); return; } } }; *level = Level { index: level_index, name: level_desc.name.clone(), }; inventory.set_slots( level_desc .inventory .iter() .map(|(bref, &count)| Slot::new(bref.clone(), count)), ); let mut text = query_level_name_text.single_mut(); text.sections[0].value = level_desc.name.clone(); let (cursor, mut visibility, mut transform) = query_cursor.single_mut(); visibility.is_visible = true; let cursor_fpos = grid.fpos(&cursor.pos); *transform = Transform::from_translation(Vec3::new(cursor_fpos.x, 0.1, -cursor_fpos.y)) * Transform::from_scale(Vec3::new(1.0, 0.3, 1.0)); ev_regen_ui.send(RegenerateInventoryUiEvent); ev_reset_plate.send(ResetPlateEvent); } } #[derive(Debug, Hash, PartialEq, Eq, Clone, StageLabel)] pub enum LevelStage { ChangeLevel, } pub struct LevelPlugin; impl Plugin for LevelPlugin { fn build(&self, app: &mut App) { app.insert_resource(Level::new()) .add_event::<LoadLevelEvent>(); app.add_stage_before( AssetStage::LoadAssets, LevelStage::ChangeLevel, SystemStage::single_threaded(), ) .add_system_to_stage(LevelStage::ChangeLevel, change_level_system); } }
use bevy::{app::CoreStage, asset::AssetStage, prelude::*}; use crate::{ inventory::{Inventory, Slot}, serialize::{Buildables, Levels}, AppState, Cursor, Grid, RegenerateInventoryUiEvent, ResetPlateEvent, }; #[derive(Debug, Clone, PartialEq, Eq)] pub enum LoadLevel { Next, ByName(String), ByIndex(usize), } #[derive(Debug)] pub struct LoadLevelEvent(pub LoadLevel); #[derive(Debug, Component)] pub struct LevelNameText; #[derive(Debug)] pub struct Level { index: usize, name: String, } impl Level { pub fn new() -> Self { Level { index: 0, name: String::new(), } } pub fn index(&self) -> usize { self.index } pub fn name(&self) -> &str { &self.name } } fn change_level_system( mut level: ResMut<Level>, mut inventory: ResMut<Inventory>, levels: Res<Levels>, buildables: Res<Buildables>, grid: Res<Grid>, mut ev_load_level: EventReader<LoadLevelEvent>, mut query_level_name_text: Query<&mut Text, With<LevelNameText>>, mut query_cursor: Query<(&Cursor, &mut Visibility, &mut Transform)>, mut state: ResMut<State<AppState>>, mut ev_regen_ui: EventWriter<RegenerateInventoryUiEvent>, mut ev_reset_plate: EventWriter<ResetPlateEvent>, ) { if let Some(load_level_event) = ev_load_level.iter().last() { let (level_index, level_desc) = match &load_level_event.0 { LoadLevel::Next => { info!("Load level: Next"); let next_level_index = level.index() + 1; let levels = levels.levels(); if next_level_index < levels.len() { let level_desc = &levels[next_level_index]; info!("=> Next level: #{} '{}'", next_level_index, level_desc.name); (next_level_index, level_desc) } else { info!("=== THE END ==="); state.set(AppState::TheEnd).unwrap(); return; } } LoadLevel::ByName(level_name) => { info!("Load level: {}", level_name);
} LoadLevel::ByIndex(level_index) => { info!("Load level: #{}", level_index); let level_index = *level_index; if level_index < levels.levels().len() { let level_desc = &levels.levels()[level_index]; info!("=> Level #{}: '{}'", level_index, level_desc.name); (level_index, level_desc) } else { error!( "Failed to handle LoadLevelEvent: Cannot find level #{}.", level_index ); return; } } }; *level = Level { index: level_index, name: level_desc.name.clone(), }; inventory.set_slots( level_desc .inventory .iter() .map(|(bref, &count)| Slot::new(bref.clone(), count)), ); let mut text = query_level_name_text.single_mut(); text.sections[0].value = level_desc.name.clone(); let (cursor, mut visibility, mut transform) = query_cursor.single_mut(); visibility.is_visible = true; let cursor_fpos = grid.fpos(&cursor.pos); *transform = Transform::from_translation(Vec3::new(cursor_fpos.x, 0.1, -cursor_fpos.y)) * Transform::from_scale(Vec3::new(1.0, 0.3, 1.0)); ev_regen_ui.send(RegenerateInventoryUiEvent); ev_reset_plate.send(ResetPlateEvent); } } #[derive(Debug, Hash, PartialEq, Eq, Clone, StageLabel)] pub enum LevelStage { ChangeLevel, } pub struct LevelPlugin; impl Plugin for LevelPlugin { fn build(&self, app: &mut App) { app.insert_resource(Level::new()) .add_event::<LoadLevelEvent>(); app.add_stage_before( AssetStage::LoadAssets, LevelStage::ChangeLevel, SystemStage::single_threaded(), ) .add_system_to_stage(LevelStage::ChangeLevel, change_level_system); } }
if let Some((level_index, level_desc)) = levels .levels() .iter() .enumerate() .find(|(_, l)| l.name == *level_name) { info!("=> Level '{}': #{}", level_name, level_index); (level_index, level_desc) } else { error!( "Failed to handle LoadLevelEvent: Cannot find level '{}'.", level_name ); return; }
if_condition
[ { "content": "#[derive(Component)]\n\nstruct InventorySlot {\n\n /// Index of the slot in the [`Inventory`.\n\n index: u32,\n\n /// Number of items in the slot.\n\n count: u32,\n\n /// Entity owning the text with the number of items.\n\n text: Entity,\n\n}\n\n\n\nimpl InventorySlot {\n\n pu...
Rust
rust/src/statistics.rs
MindFlavor/nas_gallery
2b4515099f3f940674798544c3ee7c1dfc2330a2
use crate::file_type::FileType; use crate::options::Options; use prometheus_exporter_base::prelude::*; use rocket::State; use std::collections::HashMap; use std::sync::{Arc, RwLock}; #[inline] pub(crate) fn track_authorized_first_level_folders( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, ) { if options.prometheus_metrics_enabled { statistics.write().unwrap().authorized_first_level_folders += 1; } } #[inline] pub(crate) fn track_unauthorized_first_level_folders( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, ) { if options.prometheus_metrics_enabled { statistics.write().unwrap().unauthorized_first_level_folders += 1; } } #[inline] pub(crate) fn track_authorized_list_files( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, file_tye: FileType, ) { if options.prometheus_metrics_enabled { statistics .write() .unwrap() .inc_authorized_list_files(file_tye); } } #[inline] pub(crate) fn track_unauthorized_list_files( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, file_tye: FileType, ) { if options.prometheus_metrics_enabled { statistics .write() .unwrap() .inc_unauthorized_list_files(file_tye); } } #[inline] pub(crate) fn track_authorized_static( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, path: &str, ) { if options.prometheus_metrics_enabled { statistics.write().unwrap().inc_authorized_static(path); } } #[inline] pub(crate) fn track_authorized_dynamic( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, ) { if options.prometheus_metrics_enabled { statistics.write().unwrap().authorized_dynamic += 1; } } #[inline] pub(crate) fn track_unauthorized_dynamic( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, ) { if options.prometheus_metrics_enabled { statistics.write().unwrap().unauthorized_dynamic += 1; } } #[inline] pub(crate) fn track_picture_thumb_access( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, ) { if options.prometheus_metrics_enabled { statistics.write().unwrap().picture_thumb_access += 1; } } #[inline] pub(crate) fn track_picture_thumb_generation( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, ) { if options.prometheus_metrics_enabled { statistics.write().unwrap().picture_thumb_generation += 1; } } #[inline] pub(crate) fn track_video_thumb_access( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, ) { if options.prometheus_metrics_enabled { statistics.write().unwrap().video_thumb_access += 1; } } #[inline] pub(crate) fn track_video_thumb_generation( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, ) { if options.prometheus_metrics_enabled { statistics.write().unwrap().video_thumb_generation += 1; } } #[inline] pub(crate) fn track_unauthorized_static( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, path: &str, ) { if options.prometheus_metrics_enabled { statistics.write().unwrap().inc_unathorized_static(path); } } #[inline] pub(crate) fn track_authorized_not_found( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, ) { if options.prometheus_metrics_enabled { statistics.write().unwrap().authorized_not_found += 1; } } #[inline] pub(crate) fn track_unauthorized_thumb( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, ) { if options.prometheus_metrics_enabled { statistics.write().unwrap().unauthorized_thumb += 1; } } #[inline] pub(crate) fn track_authorized_thumb( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, ) { if options.prometheus_metrics_enabled { statistics.write().unwrap().authorized_thumb += 1; } } #[derive(Debug)] pub struct Statistics { pub authorized_static: HashMap<String, u64>, pub unathorized_static: HashMap<String, u64>, pub unauthorized_dynamic: u64, pub authorized_dynamic: u64, pub authorized_not_found: u64, pub authorized_thumb: u64, pub unauthorized_thumb: u64, pub picture_thumb_access: u64, pub picture_thumb_generation: u64, pub video_thumb_access: u64, pub video_thumb_generation: u64, pub authorized_list_files: HashMap<FileType, u64>, pub unauthorized_list_files: HashMap<FileType, u64>, pub authorized_first_level_folders: u64, pub unauthorized_first_level_folders: u64, } impl Default for Statistics { fn default() -> Self { let mut authorized_list_files = HashMap::new(); authorized_list_files.insert(FileType::Preview, 0); authorized_list_files.insert(FileType::Extra, 0); authorized_list_files.insert(FileType::Folder, 0); let mut unauthorized_list_files = HashMap::new(); unauthorized_list_files.insert(FileType::Preview, 0); unauthorized_list_files.insert(FileType::Extra, 0); unauthorized_list_files.insert(FileType::Folder, 0); Self { authorized_static: HashMap::new(), unathorized_static: HashMap::new(), authorized_dynamic: 0, unauthorized_dynamic: 0, authorized_not_found: 0, authorized_thumb: 0, unauthorized_thumb: 0, picture_thumb_access: 0, picture_thumb_generation: 0, video_thumb_access: 0, video_thumb_generation: 0, authorized_list_files, unauthorized_list_files, authorized_first_level_folders: 0, unauthorized_first_level_folders: 0, } } } impl Statistics { pub(crate) fn inc_authorized_list_files(&mut self, file_type: FileType) { if let Some(original_value) = self.authorized_list_files.get_mut(&file_type) { *original_value += 1; } else { self.authorized_list_files.insert(file_type, 1); } } pub(crate) fn inc_unauthorized_list_files(&mut self, file_type: FileType) { if let Some(original_value) = self.unauthorized_list_files.get_mut(&file_type) { *original_value += 1; } else { self.unauthorized_list_files.insert(file_type, 1); } } pub(crate) fn inc_authorized_static(&mut self, page: &str) { if let Some(original_value) = self.authorized_static.get_mut(page) { *original_value += 1; } else { self.authorized_static.insert(page.to_owned(), 1); } } pub(crate) fn inc_unathorized_static(&mut self, page: &str) { if let Some(original_value) = self.unathorized_static.get_mut(page) { *original_value += 1; } else { self.unathorized_static.insert(page.to_owned(), 1); } } pub(crate) fn render_to_prometheus(&self) -> String { let mut s = String::new(); let mut pc = PrometheusMetric::build() .with_name("nas_gallery_authorized_access_to_static_content") .with_metric_type(MetricType::Counter) .with_help("Authorized access to static content") .build(); self.authorized_static.iter().for_each(|(key, val)| { pc.render_and_append_instance( &PrometheusInstance::new() .with_label("path", key.as_ref()) .with_value(*val), ); }); s.push_str(&pc.render()); let mut pc = PrometheusMetric::build() .with_name("nas_gallery_unauthorized_access_to_static_content") .with_metric_type(MetricType::Counter) .with_help("Unauthorized access to static content") .build(); self.unathorized_static.iter().for_each(|(key, val)| { pc.render_and_append_instance( &PrometheusInstance::new() .with_label("path", key.as_ref()) .with_value(*val), ); }); s.push_str(&pc.render()); s.push_str( &PrometheusMetric::build() .with_name("nas_gallery_authorized_access_to_dynamic_content") .with_metric_type(MetricType::Counter) .with_help("Authorized access to dynamic content") .build() .render_and_append_instance( &PrometheusInstance::new().with_value(self.authorized_dynamic), ) .render(), ); s.push_str( &PrometheusMetric::build() .with_name("nas_gallery_authorized_not_found") .with_metric_type(MetricType::Counter) .with_help("Authorized access to not found content") .build() .render_and_append_instance( &PrometheusInstance::new().with_value(self.authorized_not_found), ) .render(), ); s.push_str( &PrometheusMetric::build() .with_name("nas_gallery_authorized_thumb") .with_metric_type(MetricType::Counter) .with_help("Authorized access to thumbnail") .build() .render_and_append_instance( &PrometheusInstance::new().with_value(self.authorized_thumb), ) .render(), ); s.push_str( &PrometheusMetric::build() .with_name("nas_gallery_unauthorized_thumb") .with_metric_type(MetricType::Counter) .with_help("Authorized unaccess to thumbnail") .build() .render_and_append_instance( &PrometheusInstance::new().with_value(self.unauthorized_thumb), ) .render(), ); s.push_str( &PrometheusMetric::build() .with_name("nas_gallery_picture_thumb_access") .with_metric_type(MetricType::Counter) .with_help("Authorized unaccess picute thumb") .build() .render_and_append_instance( &PrometheusInstance::new().with_value(self.picture_thumb_access), ) .render(), ); s.push_str( &PrometheusMetric::build() .with_name("nas_gallery_picture_thumb_generation") .with_metric_type(MetricType::Counter) .with_help("Picture thumb generation (cache miss)") .build() .render_and_append_instance( &PrometheusInstance::new().with_value(self.picture_thumb_generation), ) .render(), ); s.push_str( &PrometheusMetric::build() .with_name("nas_gallery_video_thumb_access") .with_metric_type(MetricType::Counter) .with_help("Authorized unaccess picute thumb") .build() .render_and_append_instance( &PrometheusInstance::new().with_value(self.video_thumb_access), ) .render(), ); s.push_str( &PrometheusMetric::build() .with_name("nas_gallery_video_thumb_generation") .with_metric_type(MetricType::Counter) .with_help("Video thumb generation (cache miss)") .build() .render_and_append_instance( &PrometheusInstance::new().with_value(self.video_thumb_generation), ) .render(), ); let mut pc = PrometheusMetric::build() .with_name("nas_gallery_authorized_list_files") .with_metric_type(MetricType::Counter) .with_help("Authorized list files") .build(); self.authorized_list_files.iter().for_each(|(key, val)| { pc.render_and_append_instance( &PrometheusInstance::new() .with_label("file_type", key.as_str()) .with_value(*val), ); }); s.push_str(&pc.render()); let mut pc = PrometheusMetric::build() .with_name("nas_gallery_unauthorized_list_files") .with_metric_type(MetricType::Counter) .with_help("Unauthorized list files") .build(); self.unauthorized_list_files.iter().for_each(|(key, val)| { pc.render_and_append_instance( &PrometheusInstance::new() .with_label("file_type", key.as_str()) .with_value(*val), ); }); s.push_str(&pc.render()); s.push_str( &PrometheusMetric::build() .with_name("nas_gallery_authorized_first_level_folders") .with_metric_type(MetricType::Counter) .with_help("Authorized enumeration of first level folders") .build() .render_and_append_instance( &PrometheusInstance::new().with_value(self.authorized_first_level_folders), ) .render(), ); s.push_str( &PrometheusMetric::build() .with_name("nas_gallery_unauthorized_first_level_folders") .with_metric_type(MetricType::Counter) .with_help("Unauthorized enumeration of first level folders") .build() .render_and_append_instance( &PrometheusInstance::new().with_value(self.unauthorized_first_level_folders), ) .render(), ); s } }
use crate::file_type::FileType; use crate::options::Options; use prometheus_exporter_base::prelude::*; use rocket::State; use std::collections::HashMap; use std::sync::{Arc, RwLock}; #[inline] pub(crate) fn track_authorized_first_level_folders( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, ) { if options.prometheus_metrics_enabled { statistics.write().unwrap().authorized_first_level_folders += 1; } } #[inline] pub(crate) fn track_unauthorized_first_level_folders( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, ) { if options.prometheus_metrics_enabled { statistics.write().unwrap().unauthorized_first_level_folders += 1; } } #[inline] pub(crate) fn track_authorized_list_files( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, file_tye: FileType, ) { if options.prometheus_metrics_enabled { statistics .write() .unwrap() .inc_authorized_list_files(file_tye); } } #[inline] pub(crate) fn track_unauthorized_list_files( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, file_tye: FileType, ) { if options.prometheus_metrics_enabled { statistics .write() .unwrap() .inc_unauthorized_list_files(file_tye); } } #[inline] pub(crate) fn track_authorized_static( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, path: &str, ) { if options.prometheus_metrics_enabled { statistics.write().unwrap().inc_authorized_static(path); } } #[inline] pub(crate) fn track_authorized_dynamic( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, ) { if options.prometheus_metrics_enabled { statistics.write().unwrap().authorized_dynamic += 1; } } #[inline] pub(crate) fn track_unauthorized_dynamic( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, ) { if options.prometheus_metrics_enabled { statistics.write().unwrap().unauthorized_dynamic += 1; } } #[inline] pub(crate) fn track_picture_thumb_access( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, ) { if options.prometheus_metrics_enabled { statistics.write().unwrap().picture_thumb_access += 1; } } #[inline] pub(crate) fn track_picture_thumb_generation( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, ) { if options.prometheus_metrics_enabled { statistics.write().unwrap().picture_thumb_generation += 1; } } #[inline] pub(crate) fn track_video_thumb_access( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, ) { if options.prometheus_metrics_enabled { statistics.write().unwrap().video_thumb_access += 1; } } #[inline] pub(crate) fn track_video_thumb_generation( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, ) { if options.prometheus_metrics_enabled { statistics.write().unwrap().video_thumb_generation += 1; } } #[inline] pub(crate) fn track_unauthorized_static( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, path: &str, ) { if options.prometheus_metrics_enabled { statistics.write().unwrap().inc_unathorized_static(path); } } #[inline] pub(crate) fn track_authorized_not_found( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, ) { if options.prometheus_metrics_enabled { statistics.write().unwrap().authorized_not_found += 1; } } #[inline] pub(crate) fn track_unauthorized_thumb( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, ) { if options.prometheus_metrics_enabled { statistics.write().unwrap().unauthorized_thumb += 1; } } #[inline] pub(crate) fn track_authorized_thumb( options: &State<'_, Options>, statistics: &State<'_, Arc<RwLock<Statistics>>>, ) { if options.prometheus_metrics_enabled { statistics.write().unwrap().authorized_thumb += 1; } } #[derive(Debug)] pub struct Statistics { pub authorized_static: HashMap<String, u64>, pub unathorized_static: HashMap<String, u64>, pub unauthorized_dynamic: u64, pub authorized_dynamic: u64, pub authorized_not_found: u64, pub authorized_thumb: u64, pub unauthorized_thumb: u64, pub picture_thumb_access: u64, pub picture_thumb_generation: u64, pub video_thumb_access: u64, pub video_thumb_generation: u64, pub authorized_list_files: HashMap<FileType, u64>, pub unauthorized_list_files: HashMap<FileType, u64>, pub authorized_first_level_folders: u64, pub unauthorized_first_level_folders: u64, } impl Default for Statistics { fn default() -> Self { let mut authorized_list_files = HashMap::new(); authorized_list_files.insert(FileType::Preview, 0); authorized_list_files.insert(FileType::Extra, 0); authorized_list_files.insert(FileType::Folder, 0); let mut unauthorized_list_files = HashMap::new(); unauthorized_list_files.insert(FileType::Preview, 0); unauthorized_list_files.insert(FileType::Extra, 0); unauthorized_list_files.insert(FileType::Folder, 0); Self { authorized_static: HashMap::new(), unathorized_static: HashMap::new(), authorized_dynamic: 0, unauthorized_dynamic: 0, authorized_not_found: 0, authorized_thumb: 0, unauthorized_thumb: 0, picture_thumb_access: 0, picture_thumb_generation: 0, video_thumb_access: 0, video_thumb_generation: 0, authorized_list_files, unauthorized_list_files, authorized_first_level_folders: 0, unauthorized_first_level_folders: 0, } } } impl Statistics { pub(crate) fn inc_authorized_list_files(&mut self, file_type: FileType) { if let Some(original_value) = self.authorized_list_files.get_mut(&file_type) { *original_value += 1; } else { self.authorized_list_files.insert(file_type, 1); } } pub(crate) fn inc_unauthorized_list_files(&mut self, file_type: FileType) { if let Some(original_value) = self.unauthorized_list_files.get_mut(&file_type) { *original_value += 1; } else { self.unauthorized_list_files.insert(file_type, 1); } } pub(crate) fn inc_authorized_static(&mut self, page: &str) { if let Some(original_value) = self.authorized_static.get_mut(page) { *original_value += 1; } else { self.authorized_static.insert(page.to_owned(), 1); } } pub(crate) fn inc_unathorized_static(&mut self, page: &str) { if let Some(original_value) = self.unathorized_static.get_mut(page) { *original_value += 1; } else { self.unathorized_static.insert(page.to_owned(), 1); } } pub(crate) fn render_to_prometheus(&self) -> String { let mut s = String::new(); let mut pc = PrometheusMetric::build() .with_name("nas_gallery_authorized_access_to_static_content") .with_metric_type(MetricType::Counter) .with_help("Authorized access to static content") .build(); self.authorized_static.iter().for_each(|(key, val)| { pc.render_and_append_instance( &PrometheusInstance::new() .with_label("path", key.as_ref()) .with_value(*val), ); }); s.push_str(&pc.render());
self.unathorized_static.iter().for_each(|(key, val)| { pc.render_and_append_instance( &PrometheusInstance::new() .with_label("path", key.as_ref()) .with_value(*val), ); }); s.push_str(&pc.render()); s.push_str( &PrometheusMetric::build() .with_name("nas_gallery_authorized_access_to_dynamic_content") .with_metric_type(MetricType::Counter) .with_help("Authorized access to dynamic content") .build() .render_and_append_instance( &PrometheusInstance::new().with_value(self.authorized_dynamic), ) .render(), ); s.push_str( &PrometheusMetric::build() .with_name("nas_gallery_authorized_not_found") .with_metric_type(MetricType::Counter) .with_help("Authorized access to not found content") .build() .render_and_append_instance( &PrometheusInstance::new().with_value(self.authorized_not_found), ) .render(), ); s.push_str( &PrometheusMetric::build() .with_name("nas_gallery_authorized_thumb") .with_metric_type(MetricType::Counter) .with_help("Authorized access to thumbnail") .build() .render_and_append_instance( &PrometheusInstance::new().with_value(self.authorized_thumb), ) .render(), ); s.push_str( &PrometheusMetric::build() .with_name("nas_gallery_unauthorized_thumb") .with_metric_type(MetricType::Counter) .with_help("Authorized unaccess to thumbnail") .build() .render_and_append_instance( &PrometheusInstance::new().with_value(self.unauthorized_thumb), ) .render(), ); s.push_str( &PrometheusMetric::build() .with_name("nas_gallery_picture_thumb_access") .with_metric_type(MetricType::Counter) .with_help("Authorized unaccess picute thumb") .build() .render_and_append_instance( &PrometheusInstance::new().with_value(self.picture_thumb_access), ) .render(), ); s.push_str( &PrometheusMetric::build() .with_name("nas_gallery_picture_thumb_generation") .with_metric_type(MetricType::Counter) .with_help("Picture thumb generation (cache miss)") .build() .render_and_append_instance( &PrometheusInstance::new().with_value(self.picture_thumb_generation), ) .render(), ); s.push_str( &PrometheusMetric::build() .with_name("nas_gallery_video_thumb_access") .with_metric_type(MetricType::Counter) .with_help("Authorized unaccess picute thumb") .build() .render_and_append_instance( &PrometheusInstance::new().with_value(self.video_thumb_access), ) .render(), ); s.push_str( &PrometheusMetric::build() .with_name("nas_gallery_video_thumb_generation") .with_metric_type(MetricType::Counter) .with_help("Video thumb generation (cache miss)") .build() .render_and_append_instance( &PrometheusInstance::new().with_value(self.video_thumb_generation), ) .render(), ); let mut pc = PrometheusMetric::build() .with_name("nas_gallery_authorized_list_files") .with_metric_type(MetricType::Counter) .with_help("Authorized list files") .build(); self.authorized_list_files.iter().for_each(|(key, val)| { pc.render_and_append_instance( &PrometheusInstance::new() .with_label("file_type", key.as_str()) .with_value(*val), ); }); s.push_str(&pc.render()); let mut pc = PrometheusMetric::build() .with_name("nas_gallery_unauthorized_list_files") .with_metric_type(MetricType::Counter) .with_help("Unauthorized list files") .build(); self.unauthorized_list_files.iter().for_each(|(key, val)| { pc.render_and_append_instance( &PrometheusInstance::new() .with_label("file_type", key.as_str()) .with_value(*val), ); }); s.push_str(&pc.render()); s.push_str( &PrometheusMetric::build() .with_name("nas_gallery_authorized_first_level_folders") .with_metric_type(MetricType::Counter) .with_help("Authorized enumeration of first level folders") .build() .render_and_append_instance( &PrometheusInstance::new().with_value(self.authorized_first_level_folders), ) .render(), ); s.push_str( &PrometheusMetric::build() .with_name("nas_gallery_unauthorized_first_level_folders") .with_metric_type(MetricType::Counter) .with_help("Unauthorized enumeration of first level folders") .build() .render_and_append_instance( &PrometheusInstance::new().with_value(self.unauthorized_first_level_folders), ) .render(), ); s } }
let mut pc = PrometheusMetric::build() .with_name("nas_gallery_unauthorized_access_to_static_content") .with_metric_type(MetricType::Counter) .with_help("Unauthorized access to static content") .build();
assignment_statement
[ { "content": "fn generate_thumb_folder_path(options: &Options, size: u64, original_path: &PathBuf) -> PathBuf {\n\n trace!(\"original_path == {:?}\", &original_path);\n\n let path = Path::new(&options.thumb_folder_path).join(format!(\"{}x{}\", size, size));\n\n trace!(\"generate_thumb_folder_path == {:...
Rust
diesel/src/macros/queryable.rs
robertmaloney/diesel
332ba12617ff05e5077fc1879caf83fe2e7fd8ff
#[macro_export] macro_rules! Queryable { (() $($body:tt)*) => { Queryable! { $($body)* } }; ( $(#[$ignore:meta])* $(pub)* struct $($body:tt)* ) => { Queryable! { $($body)* } }; ( ( struct_name = $struct_name:ident, $($headers:tt)* ), fields = [$({ field_name: $field_name:ident, column_name: $column_name:ident, field_ty: $field_ty:ty, field_kind: $field_kind:ident, })+], ) => { Queryable! { $($headers)* row_ty = ($($field_ty,)+), row_pat = ($($field_name,)+), build_expr = $struct_name { $($field_name: $field_name),+ }, } }; ( $headers:tt, fields = [$({ column_name: $column_name:ident, field_ty: $field_ty:ty, field_kind: $field_kind:ident, })+], ) => { Queryable! { $headers, fields = [$({ field_ty: $field_ty, field_kind: $field_kind, })+], } }; ( ( struct_name = $struct_name:ident, $($headers:tt)* ), fields = [$({ field_ty: $field_ty:ty, field_kind: $field_kind:ident, })+], ) => { Queryable! { $($headers)* row_ty = ($($field_ty,)+), row_pat = ($($field_kind,)+), build_expr = $struct_name($($field_kind),+), } }; ( struct_ty = $struct_ty:ty, generics = ($($generics:ident),*), row_ty = $row_ty:ty, row_pat = $row_pat:pat, build_expr = $build_expr:expr, ) => { impl<$($generics,)* __DB, __ST> $crate::Queryable<__ST, __DB> for $struct_ty where __DB: $crate::backend::Backend + $crate::types::HasSqlType<__ST>, $row_ty: $crate::types::FromSqlRow<__ST, __DB>, { type Row = $row_ty; fn build(row: Self::Row) -> Self { let $row_pat = row; $build_expr } } }; ( $struct_name:ident <$($generics:ident),*> $body:tt $(;)* ) => { __diesel_parse_struct_body! { ( struct_name = $struct_name, struct_ty = $struct_name<$($generics),*>, generics = ($($generics),*), ), callback = Queryable, body = $body, } }; ( $struct_name:ident $body:tt $(;)* ) => { __diesel_parse_struct_body! { ( struct_name = $struct_name, struct_ty = $struct_name, generics = (), ), callback = Queryable, body = $body, } }; } #[cfg(test)] mod tests { use expression::dsl::sql; use prelude::*; use test_helpers::connection; use types::Integer; #[test] fn named_struct_definition() { #[derive(Debug, Clone, Copy, PartialEq, Eq)] struct MyStruct { foo: i32, bar: i32, } Queryable! { #[derive(Debug, Clone, Copy, PartialEq, Eq)] struct MyStruct { foo: i32, bar: i32, } } let conn = connection(); let data = ::select(sql::<(Integer, Integer)>("1, 2")).get_result(&conn); assert_eq!(Ok(MyStruct { foo: 1, bar: 2 }), data); } #[test] fn tuple_struct() { #[derive(Debug, Clone, Copy, PartialEq, Eq)] struct MyStruct(i32, i32); Queryable! { #[derive(Debug, Clone, Copy, PartialEq, Eq)] struct MyStruct(#[column_name(foo)] i32, #[column_name(bar)] i32); } let conn = connection(); let data = ::select(sql::<(Integer, Integer)>("1, 2")).get_result(&conn); assert_eq!(Ok(MyStruct(1, 2)), data); } #[test] fn tuple_struct_without_column_name_annotations() { #[derive(Debug, Clone, Copy, PartialEq, Eq)] struct MyStruct(i32, i32); Queryable! { #[derive(Debug, Clone, Copy, PartialEq, Eq)] struct MyStruct(i32, i32); } let conn = connection(); let data = ::select(sql::<(Integer, Integer)>("1, 2")).get_result(&conn); assert_eq!(Ok(MyStruct(1, 2)), data); } }
#[macro_export] macro_rules! Queryable { (() $($body:tt)*) => { Queryable! { $($body)* } }; ( $(#[$ignore:meta])* $(pub)* struct $($body:tt)* ) => { Queryable! { $($body)* } }; ( ( struct_name = $struct_name:ident, $($headers:tt)* ), fields = [$({ field_name: $field_name:ident, column_name: $column_name:ident, field_ty: $field_ty:ty, field_kind: $field_kind:ident, })+], ) => { Queryable! { $($headers)* row_ty = ($($field_ty,)+), row_pat = ($($field_name,)+), build_expr = $struct_name { $($field_name: $field_name),+ }, } }; ( $headers:tt, fields = [$({ column_name: $column_name:ident, field_ty: $field_ty:ty, field_kind: $field_kind:ident, })+], ) => { Queryable! { $headers, fields = [$({ field_ty: $field_ty, field_kind: $field_kind, })+], } }; ( ( struct_name = $struct_name:ident, $($headers:tt)* ), fields = [$({ field_ty: $field_ty:ty, field_kind: $field_kind:ident, })+], ) => { Queryable! { $($headers)* row_ty = ($($field_ty,)+), row_pat = ($($field_kind,)+), build_expr = $struct_name($($field_kind),+), } }; ( struct_ty = $struct_ty:ty, generics = ($($generics:ident),*), row_ty = $row_ty:ty, row_pat = $row_pat:pat, build_expr = $build_expr:expr, ) => { impl<$($generics,)* __DB, __ST> $crate::Queryable<__ST, __DB> for $struct_ty where __DB: $crate::backend::Backend + $crate::types::HasSqlType<__ST>, $row_ty: $crate::types::FromSqlRow<__ST, __DB>, { type Row = $row_ty; fn build(row: Self::Row) -> Self { let $row_pat = row; $build_expr } } }; ( $struct_name:ident <$($generics:ident),*> $body:tt $(;)* ) => { __diesel_parse_struct_body! { ( struct_name = $struct_name, struct_ty = $struct_name<$($generics),*>, generics = ($($generics),*), ), callback = Queryable, body = $body, } }; ( $struct_name:ident $body:tt $(;)* ) => { __diesel_parse_struct_body! { ( struct_name = $struct_name, struct_ty = $struct_name, generics = (), ), callback = Queryable, body = $body, } }; } #[cfg(test)] mod tests { use expression::dsl::sql; use prelude::*; use test_helpers::connection; use types::Integer; #[test] fn named_struct_definition() { #[derive(Debug, Clone, Copy, PartialEq, Eq)] struct MyStruct { foo: i32, bar: i32, } Queryable! { #[derive(Debug, Clone, Copy, PartialEq, Eq)] struct MyStruct { foo: i32, bar: i32, } } let conn = connection(); let data = ::select(sql::<(Integer, Integer)>("1, 2")).get_result(&conn); assert_eq!(Ok(MyStruct { foo: 1, bar: 2 }), data); } #[test] fn tuple_struct() { #[derive(Debug, Clone, Copy, PartialEq, Eq)] struct MyStruct(i32, i32); Queryable! { #[derive(Debug, Clone, Copy, PartialEq, Eq)] struct MyStruct(#[colum
#[test] fn tuple_struct_without_column_name_annotations() { #[derive(Debug, Clone, Copy, PartialEq, Eq)] struct MyStruct(i32, i32); Queryable! { #[derive(Debug, Clone, Copy, PartialEq, Eq)] struct MyStruct(i32, i32); } let conn = connection(); let data = ::select(sql::<(Integer, Integer)>("1, 2")).get_result(&conn); assert_eq!(Ok(MyStruct(1, 2)), data); } }
n_name(foo)] i32, #[column_name(bar)] i32); } let conn = connection(); let data = ::select(sql::<(Integer, Integer)>("1, 2")).get_result(&conn); assert_eq!(Ok(MyStruct(1, 2)), data); }
function_block-function_prefixed
[ { "content": "#[doc(hidden)]\n\npub fn setup_database<Conn: Connection>(conn: &Conn) -> QueryResult<usize> {\n\n conn.setup_helper_functions();\n\n create_schema_migrations_table_if_needed(conn)\n\n}\n\n\n", "file_path": "diesel/src/migrations/mod.rs", "rank": 0, "score": 262751.3958453731 }...
Rust
src/options.rs
avar/hyperfine
4b419c6ab48a2018a5418fb4b70e25dcc401eba1
use std::fmt; use std::process::Command; use crate::error::OptionsError; use crate::units::{Second, Unit}; #[cfg(not(windows))] pub const DEFAULT_SHELL: &str = "sh"; #[cfg(windows)] pub const DEFAULT_SHELL: &str = "cmd.exe"; #[derive(Debug)] pub enum Shell { Default(&'static str), Custom(Vec<String>), } impl Default for Shell { fn default() -> Self { Shell::Default(DEFAULT_SHELL) } } impl fmt::Display for Shell { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Shell::Default(cmd) => write!(f, "{}", cmd), Shell::Custom(cmdline) => write!(f, "{}", shell_words::join(cmdline)), } } } impl Shell { pub fn parse<'a>(s: &str) -> Result<Self, OptionsError<'a>> { let v = shell_words::split(s).map_err(OptionsError::ShellParseError)?; if v.is_empty() || v[0].is_empty() { return Err(OptionsError::EmptyShell); } Ok(Shell::Custom(v)) } pub fn command(&self) -> Command { match self { Shell::Default(cmd) => Command::new(cmd), Shell::Custom(cmdline) => { let mut c = Command::new(&cmdline[0]); c.args(&cmdline[1..]); c } } } } #[derive(Debug, Clone, Copy, PartialEq)] pub enum CmdFailureAction { RaiseError, Ignore, } #[derive(Debug, Clone, Copy, PartialEq)] pub enum OutputStyleOption { Basic, Full, NoColor, Color, Disabled, } pub struct Runs { pub min: u64, pub max: Option<u64>, } impl Default for Runs { fn default() -> Runs { Runs { min: 10, max: None } } } pub struct HyperfineOptions { pub warmup_count: u64, pub runs: Runs, pub min_time_sec: Second, pub failure_action: CmdFailureAction, pub preparation_command: Option<Vec<String>>, pub cleanup_command: Option<String>, pub output_style: OutputStyleOption, pub shell: Shell, pub show_output: bool, pub time_unit: Option<Unit>, pub names: Option<Vec<String>>, } impl Default for HyperfineOptions { fn default() -> HyperfineOptions { HyperfineOptions { names: None, warmup_count: 0, runs: Runs::default(), min_time_sec: 3.0, failure_action: CmdFailureAction::RaiseError, preparation_command: None, cleanup_command: None, output_style: OutputStyleOption::Full, shell: Shell::default(), show_output: false, time_unit: None, } } } #[test] fn test_shell_default_command() { let shell = Shell::default(); let s = format!("{}", shell); assert_eq!(&s, DEFAULT_SHELL); let cmd = shell.command(); let s = format!("{:?}", cmd); assert_eq!(s, format!("\"{}\"", DEFAULT_SHELL)); } #[test] fn test_shell_parse_command() { let shell = Shell::parse("shell -x 'aaa bbb'").unwrap(); let s = format!("{}", shell); assert_eq!(&s, "shell -x 'aaa bbb'"); let cmd = shell.command(); let s = format!("{:?}", cmd); assert_eq!(&s, r#""shell" "-x" "aaa bbb""#); match Shell::parse("shell 'foo").unwrap_err() { OptionsError::ShellParseError(_) => { /* ok */ } e => assert!(false, "Unexpected error: {}", e), } match Shell::parse("").unwrap_err() { OptionsError::EmptyShell => { /* ok */ } e => assert!(false, "Unexpected error: {}", e), } match Shell::parse("''").unwrap_err() { OptionsError::EmptyShell => { /* ok */ } e => assert!(false, "Unexpected error: {}", e), } }
use std::fmt; use std::process::Command; use crate::error::OptionsError; use crate::units::{Second, Unit}; #[cfg(not(windows))] pub const DEFAULT_SHELL: &str = "sh"; #[cfg(windows)] pub const DEFAULT_SHELL: &str = "cmd.exe"; #[derive(Debug)] pub enum Shell { Default(&'static str), Custom(Vec<String>), } impl Default for Shell { fn default() -> Self { Shell::Default(DEFAULT_SHELL) } } impl fmt::Display for Shell { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self {
} impl Shell { pub fn parse<'a>(s: &str) -> Result<Self, OptionsError<'a>> { let v = shell_words::split(s).map_err(OptionsError::ShellParseError)?; if v.is_empty() || v[0].is_empty() { return Err(OptionsError::EmptyShell); } Ok(Shell::Custom(v)) } pub fn command(&self) -> Command { match self { Shell::Default(cmd) => Command::new(cmd), Shell::Custom(cmdline) => { let mut c = Command::new(&cmdline[0]); c.args(&cmdline[1..]); c } } } } #[derive(Debug, Clone, Copy, PartialEq)] pub enum CmdFailureAction { RaiseError, Ignore, } #[derive(Debug, Clone, Copy, PartialEq)] pub enum OutputStyleOption { Basic, Full, NoColor, Color, Disabled, } pub struct Runs { pub min: u64, pub max: Option<u64>, } impl Default for Runs { fn default() -> Runs { Runs { min: 10, max: None } } } pub struct HyperfineOptions { pub warmup_count: u64, pub runs: Runs, pub min_time_sec: Second, pub failure_action: CmdFailureAction, pub preparation_command: Option<Vec<String>>, pub cleanup_command: Option<String>, pub output_style: OutputStyleOption, pub shell: Shell, pub show_output: bool, pub time_unit: Option<Unit>, pub names: Option<Vec<String>>, } impl Default for HyperfineOptions { fn default() -> HyperfineOptions { HyperfineOptions { names: None, warmup_count: 0, runs: Runs::default(), min_time_sec: 3.0, failure_action: CmdFailureAction::RaiseError, preparation_command: None, cleanup_command: None, output_style: OutputStyleOption::Full, shell: Shell::default(), show_output: false, time_unit: None, } } } #[test] fn test_shell_default_command() { let shell = Shell::default(); let s = format!("{}", shell); assert_eq!(&s, DEFAULT_SHELL); let cmd = shell.command(); let s = format!("{:?}", cmd); assert_eq!(s, format!("\"{}\"", DEFAULT_SHELL)); } #[test] fn test_shell_parse_command() { let shell = Shell::parse("shell -x 'aaa bbb'").unwrap(); let s = format!("{}", shell); assert_eq!(&s, "shell -x 'aaa bbb'"); let cmd = shell.command(); let s = format!("{:?}", cmd); assert_eq!(&s, r#""shell" "-x" "aaa bbb""#); match Shell::parse("shell 'foo").unwrap_err() { OptionsError::ShellParseError(_) => { /* ok */ } e => assert!(false, "Unexpected error: {}", e), } match Shell::parse("").unwrap_err() { OptionsError::EmptyShell => { /* ok */ } e => assert!(false, "Unexpected error: {}", e), } match Shell::parse("''").unwrap_err() { OptionsError::EmptyShell => { /* ok */ } e => assert!(false, "Unexpected error: {}", e), } }
Shell::Default(cmd) => write!(f, "{}", cmd), Shell::Custom(cmdline) => write!(f, "{}", shell_words::join(cmdline)), } }
function_block-function_prefixed
[ { "content": "/// Print error message to stderr and terminate\n\npub fn error(message: &str) -> ! {\n\n eprintln!(\"{} {}\", \"Error:\".red(), message);\n\n std::process::exit(1);\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 0, "score": 143262.61697711638 }, { "content": "/// Like ...
Rust
hal/src/common/thumbv7em/qspi.rs
tomoyuki-nakabayashi/atsamd
3cc824a779f3790ec606411b2a338dc884adf70a
use crate::{ gpio::{Floating, Input, Pa10, Pa11, Pa8, Pa9, Pb10, Pb11, PfH, Port}, target_device::qspi::instrframe, target_device::{MCLK, QSPI}, }; #[derive(Debug, Clone, Copy, Eq, PartialEq)] pub enum Error { CommandFunctionMismatch, } pub struct Qspi { qspi: QSPI, _sck: Pb10<PfH>, _cs: Pb11<PfH>, _io0: Pa8<PfH>, _io1: Pa9<PfH>, _io2: Pa10<PfH>, _io3: Pa11<PfH>, } impl Qspi { pub fn new( mclk: &mut MCLK, port: &mut Port, qspi: QSPI, _sck: Pb10<Input<Floating>>, _cs: Pb11<Input<Floating>>, _io0: Pa8<Input<Floating>>, _io1: Pa9<Input<Floating>>, _io2: Pa10<Input<Floating>>, _io3: Pa11<Input<Floating>>, ) -> Qspi { mclk.apbcmask.modify(|_, w| w.qspi_().set_bit()); mclk.ahbmask.modify(|_, w| { w.qspi_().set_bit(); w.qspi_2x_().clear_bit() }); let _sck = _sck.into_function_h(port); let _cs = _cs.into_function_h(port); let _io0 = _io0.into_function_h(port); let _io1 = _io1.into_function_h(port); let _io2 = _io2.into_function_h(port); let _io3 = _io3.into_function_h(port); qspi.ctrla.write(|w| w.swrst().set_bit()); qspi.baud.write(|w| unsafe { w.baud().bits((120_000_000u32 / 4_000_000u32) as u8); w.cpol().clear_bit(); w.cpha().clear_bit() }); qspi.ctrlb.write(|w| { w.mode().memory(); w.csmode().noreload(); w.csmode().lastxfer(); w.datalen()._8bits() }); qspi.ctrla.modify(|_, w| w.enable().set_bit()); Qspi { qspi, _sck, _cs, _io0, _io1, _io2, _io3, } } unsafe fn run_write_instruction( &self, command: Command, tfm: TransferMode, addr: u32, buf: &[u8], ) { if command == Command::EraseSector || command == Command::EraseBlock { self.qspi.instraddr.write(|w| w.addr().bits(addr)); } self.qspi .instrctrl .modify(|_, w| w.instr().bits(command.bits())); self.qspi.instrframe.write(|w| { tfm.instrframe( w, if command == Command::QuadPageProgram { instrframe::TFRTYPE_A::WRITEMEMORY } else { instrframe::TFRTYPE_A::WRITE }, ) }); self.qspi.instrframe.read().bits(); if buf.len() > 0 { core::ptr::copy(buf.as_ptr(), (QSPI_AHB + addr) as *mut u8, buf.len()); } self.qspi.ctrla.write(|w| { w.enable().set_bit(); w.lastxfer().set_bit() }); while self.qspi.intflag.read().instrend().bit_is_clear() {} self.qspi.intflag.write(|w| w.instrend().set_bit()); } unsafe fn run_read_instruction( &self, command: Command, tfm: TransferMode, addr: u32, buf: &mut [u8], ) { self.qspi .instrctrl .modify(|_, w| w.instr().bits(command.bits())); self.qspi.instrframe.write(|w| { tfm.instrframe( w, if command == Command::QuadRead { instrframe::TFRTYPE_A::READMEMORY } else { instrframe::TFRTYPE_A::READ }, ) }); self.qspi.instrframe.read().bits(); if buf.len() > 0 { core::ptr::copy((QSPI_AHB + addr) as *mut u8, buf.as_mut_ptr(), buf.len()); } self.qspi.ctrla.write(|w| { w.enable().set_bit(); w.lastxfer().set_bit() }); while self.qspi.intflag.read().instrend().bit_is_clear() {} self.qspi.intflag.write(|w| w.instrend().set_bit()); } pub fn run_command(&self, command: Command) -> Result<(), Error> { match command { Command::WriteEnable | Command::WriteDisable | Command::Reset | Command::EnableReset => (), _ => return Err(Error::CommandFunctionMismatch), } let tfm = TransferMode { instruction_enable: true, ..TransferMode::default() }; unsafe { self.run_read_instruction(command, tfm, 0, &mut []); } Ok(()) } pub fn read_command(&self, command: Command, response: &mut [u8]) -> Result<(), Error> { match command { Command::Read | Command::QuadRead | Command::ReadId | Command::ReadStatus | Command::ReadStatus2 => (), _ => return Err(Error::CommandFunctionMismatch), } let tfm = TransferMode { data_enable: true, instruction_enable: true, ..TransferMode::default() }; unsafe { self.run_read_instruction(command, tfm, 0, response); } Ok(()) } pub fn write_command(&self, command: Command, data: &[u8]) -> Result<(), Error> { match command { Command::PageProgram | Command::QuadPageProgram | Command::WriteStatus | Command::WriteStatus2 => (), _ => return Err(Error::CommandFunctionMismatch), } let tfm = TransferMode { data_enable: data.len() > 0, instruction_enable: true, ..TransferMode::default() }; unsafe { self.run_write_instruction(command, tfm, 0, data); } Ok(()) } pub fn erase_command(&self, command: Command, address: u32) -> Result<(), Error> { match command { Command::EraseSector | Command::EraseBlock | Command::EraseChip => (), _ => return Err(Error::CommandFunctionMismatch), } let tfm = TransferMode { address_enable: true, instruction_enable: true, ..TransferMode::default() }; unsafe { self.run_write_instruction(command, tfm, address, &[]); } Ok(()) } pub fn read_memory(&mut self, addr: u32, buf: &mut [u8]) { let tfm = TransferMode { quad_width: true, address_enable: true, data_enable: true, instruction_enable: true, dummy_cycles: 8, ..TransferMode::default() }; unsafe { self.run_read_instruction(Command::QuadRead, tfm, addr, buf) }; } pub fn write_memory(&mut self, addr: u32, buf: &[u8]) { self.qspi.instrframe.write(|w| { w.width().quad_output(); w.addrlen()._24bits(); w.tfrtype().writememory(); w.instren().set_bit(); w.dataen().set_bit(); w.addren().set_bit() }); let tfm = TransferMode { quad_width: true, address_enable: true, data_enable: true, instruction_enable: true, ..TransferMode::default() }; unsafe { self.run_write_instruction(Command::QuadPageProgram, tfm, addr, buf) }; } pub fn set_clk_divider(&mut self, value: u8) { self.qspi .baud .write(|w| unsafe { w.baud().bits(value.saturating_sub(1)) }); } } #[derive(Default, Debug, Copy, Clone)] struct TransferMode { quad_width: bool, data_enable: bool, opcode_enable: bool, address_enable: bool, instruction_enable: bool, dummy_cycles: u8, } impl TransferMode { unsafe fn instrframe( self, instrframe: &mut instrframe::W, tfrtype: instrframe::TFRTYPE_A, ) -> &mut instrframe::W { if self.quad_width { instrframe.width().quad_output(); } else { instrframe.width().single_bit_spi(); } if self.data_enable { instrframe.dataen().set_bit(); } if self.opcode_enable { instrframe.dataen().set_bit(); } if self.address_enable { instrframe.addren().set_bit(); } if self.instruction_enable { instrframe.instren().set_bit(); } if self.dummy_cycles > 0 { instrframe.dummylen().bits(self.dummy_cycles); } instrframe.addrlen()._24bits(); instrframe.optcodeen().clear_bit(); instrframe.tfrtype().variant(tfrtype); instrframe } } #[repr(u8)] #[derive(Debug, Copy, Clone, Eq, PartialEq)] pub enum Command { Read = 0x03, QuadRead = 0x6B, ReadId = 0x9F, PageProgram = 0x02, QuadPageProgram = 0x32, ReadStatus = 0x05, ReadStatus2 = 0x35, WriteStatus = 0x01, WriteStatus2 = 0x31, EnableReset = 0x66, Reset = 0x99, WriteEnable = 0x06, WriteDisable = 0x04, EraseSector = 0x20, EraseBlock = 0xD8, EraseChip = 0xC7, } impl Command { fn bits(self) -> u8 { self as u8 } } const QSPI_AHB: u32 = 0x04000000;
use crate::{ gpio::{Floating, Input, Pa10, Pa11, Pa8, Pa9, Pb10, Pb11, PfH, Port}, target_device::qspi::instrframe, target_device::{MCLK, QSPI}, }; #[derive(Debug, Clone, Copy, Eq, PartialEq)] pub enum Error { CommandFunctionMismatch, } pub struct Qspi { qspi: QSPI, _sck: Pb10<PfH>, _cs: Pb11<PfH>, _io0: Pa8<PfH>, _io1: Pa9<PfH>, _io2: Pa10<PfH>, _io3: Pa11<PfH>, } impl Qspi { pub fn new( mclk: &mut MCLK, port: &mut Port, qspi: QSPI, _sck: Pb10<Input<Floating>>, _cs: Pb11<Input<Floating>>, _io0: Pa8<Input<Floating>>, _io1: Pa9<Input<Floating>>, _io2: Pa10<Input<Floating>>, _io3: Pa11<Input<Floating>>, ) -> Qspi { mclk.apbcmask.modify(|_, w| w.qspi_().set_bit()); mclk.ahbmask.modify(|_, w| { w.qspi_().set_bit(); w.qspi_2x_().clear_bit() }); let _sck = _sck.into_function_h(port); let _cs = _cs.into_function_h(port); let _io0 = _io0.into_function_h(port); let _io1 = _io1.into_function_h(port); let _io2 = _io2.into_function_h(port); let _io3 = _io3.into_function_h(port); qspi.ctrla.write(|w| w.swrst().set_bit()); qspi.baud.write(|w| unsafe { w.baud().bits((120_000_000u32 / 4_000_000u32) as u8); w.cpol().clear_bit(); w.cpha().clear_bit() }); qspi.ctrlb.write(|w| { w.mode().memory(); w.csmode().noreload(); w.csmode().lastxfer(); w.datalen()._8bits() }); qspi.ctrla.modify(|_, w| w.enable().set_bit()); Qspi { qspi, _sck, _cs, _io0, _io1, _io2, _io3, } } unsafe fn run_write_instruction( &self, command: Command, tfm: TransferMode, addr: u32, buf: &[u8], ) { if command == Command::EraseSector || command == Command::EraseBlock { self.qspi.instraddr.write(|w| w.addr().bits(addr)); } self.qspi .instrctrl .modify(|_, w| w.instr().bits(command.bits())); self.qspi.instrframe.write(|w| { tfm.instrframe( w, if command == Command::QuadPageProgram { instrframe::TFRTYPE_A::WRITEMEMORY } else { instrframe::TFRTYPE_A::WRITE }, ) }); self.qspi.instrframe.read().bits(); if buf.len() > 0 { core::ptr::copy(buf.as_ptr(), (QSPI_AHB + addr) as *mut u8, buf.len()); } self.qspi.ctrla.write(|w| { w.enable().set_bit(); w.lastxfer().set_bit() }); while self.qspi.intflag.read().instrend().bit_is_clear() {} self.qspi.intflag.write(|w| w.instrend().set_bit()); } unsafe fn run_read_instruction( &self, command: Command, tfm: TransferMode, addr: u32, buf: &mut [u8], ) { self.qspi .instrctrl .modify(|_, w| w.instr().bits(command.bits())); self.qspi.instrframe.write(|w| { tfm.instrframe( w, if command == Command::QuadRead { instrframe::TFRTYPE_A::READMEMORY } else { instrframe::TFRTYPE_A::READ }, ) }); self.qspi.instrframe.read().bits(); if buf.len() > 0 { core::ptr::copy((QSPI_AHB + addr) as *mut u8, buf.as_mut_ptr(), buf.len()); } self.qspi.ctrla.write(|w| { w.enable().set_bit(); w.lastxfer().set_bit() }); while self.qspi.intflag.read().instrend().bit_is_clear() {} self.qspi.intflag.write(|w| w.instrend().set_bit()); } pub fn run_command(&self, command: Command) -> Result<(), Error> { match command { Command::WriteEnable | Command::WriteDisable | Command::Reset | Command::EnableReset => (), _ => return Err(Error::CommandFunctionMismatch), } let tfm = TransferMode { instruction_enable: true, ..TransferMode::default() }; unsafe { self.run_read_instruction(command, tfm, 0, &mut []); } Ok(()) } pub fn read_command(&self, command: Command, response: &mut [u8]) -> Result<(), Error> { match command { Command::Read | Command::QuadRead | Command::ReadId | Command::ReadStatus | Command::ReadStatus2 => (), _ => return Err(Error::CommandFunctionMismatch), } let tfm = TransferMode { data_enable: true, instruction_enable: true, ..TransferMode::default() }; unsafe { self.run_read_instruction(command, tfm, 0, response); } Ok(()) } pub fn write_command(&self, command: Command, data: &[u8]) -> Result<(), Error> { match command { Command::PageProgram | Command::QuadPageProgram | Command::WriteStatus | Command::WriteStatus2 => (), _ => return Err(Error::CommandFunctionMismatch), } let tfm = TransferMode { data_enable: data.len() > 0, instruction_enable: true, ..TransferMode::default() }; unsafe { self.run_write_instruction(command, tfm, 0, data); } Ok(()) } pub fn erase_command(&self, command: Command, address: u32) -> Result<(), Error> { match command { Command::EraseSector | Command::EraseBlock | Command::EraseChip => (), _ => return Err(Error::CommandFunctionMismatch), } let tfm = TransferMode { address_enable: true, instruction_enable: true, ..TransferMode::default() }; unsafe { self.run_write_instruction(command, tfm, address, &[]); } Ok(()) } pub fn read_memory(&mut self, addr: u32, buf: &mut [u8]) { let tfm = TransferMode { quad_width: true, address_enable: true, data_enable: true, instruction_enable: true, dummy_cycles: 8, ..TransferMode::default() }; unsafe { self.run_read_instruction(Command::QuadRead, tfm, addr, buf) }; }
pub fn set_clk_divider(&mut self, value: u8) { self.qspi .baud .write(|w| unsafe { w.baud().bits(value.saturating_sub(1)) }); } } #[derive(Default, Debug, Copy, Clone)] struct TransferMode { quad_width: bool, data_enable: bool, opcode_enable: bool, address_enable: bool, instruction_enable: bool, dummy_cycles: u8, } impl TransferMode { unsafe fn instrframe( self, instrframe: &mut instrframe::W, tfrtype: instrframe::TFRTYPE_A, ) -> &mut instrframe::W { if self.quad_width { instrframe.width().quad_output(); } else { instrframe.width().single_bit_spi(); } if self.data_enable { instrframe.dataen().set_bit(); } if self.opcode_enable { instrframe.dataen().set_bit(); } if self.address_enable { instrframe.addren().set_bit(); } if self.instruction_enable { instrframe.instren().set_bit(); } if self.dummy_cycles > 0 { instrframe.dummylen().bits(self.dummy_cycles); } instrframe.addrlen()._24bits(); instrframe.optcodeen().clear_bit(); instrframe.tfrtype().variant(tfrtype); instrframe } } #[repr(u8)] #[derive(Debug, Copy, Clone, Eq, PartialEq)] pub enum Command { Read = 0x03, QuadRead = 0x6B, ReadId = 0x9F, PageProgram = 0x02, QuadPageProgram = 0x32, ReadStatus = 0x05, ReadStatus2 = 0x35, WriteStatus = 0x01, WriteStatus2 = 0x31, EnableReset = 0x66, Reset = 0x99, WriteEnable = 0x06, WriteDisable = 0x04, EraseSector = 0x20, EraseBlock = 0xD8, EraseChip = 0xC7, } impl Command { fn bits(self) -> u8 { self as u8 } } const QSPI_AHB: u32 = 0x04000000;
pub fn write_memory(&mut self, addr: u32, buf: &[u8]) { self.qspi.instrframe.write(|w| { w.width().quad_output(); w.addrlen()._24bits(); w.tfrtype().writememory(); w.instren().set_bit(); w.dataen().set_bit(); w.addren().set_bit() }); let tfm = TransferMode { quad_width: true, address_enable: true, data_enable: true, instruction_enable: true, ..TransferMode::default() }; unsafe { self.run_write_instruction(Command::QuadPageProgram, tfm, addr, buf) }; }
function_block-full_function
[]
Rust
drivers/src/net/e1000.rs
YdrMaster/zCore
a35032b540120351b802890ec77603dda8f784b4
use alloc::collections::BTreeMap; use alloc::string::String; use alloc::sync::Arc; use alloc::vec::Vec; use smoltcp::iface::*; use smoltcp::phy::{self, DeviceCapabilities}; use smoltcp::time::Instant; use smoltcp::wire::*; use smoltcp::Result; use super::ProviderImpl; use super::PAGE_SIZE; use crate::net::get_sockets; use crate::scheme::{NetScheme, Scheme}; use crate::{DeviceError, DeviceResult}; use isomorphic_drivers::net::ethernet::intel::e1000::E1000; use isomorphic_drivers::net::ethernet::structs::EthernetAddress as DriverEthernetAddress; use lock::Mutex; #[derive(Clone)] pub struct E1000Driver(Arc<Mutex<E1000<ProviderImpl>>>); #[derive(Clone)] pub struct E1000Interface { iface: Arc<Mutex<Interface<'static, E1000Driver>>>, driver: E1000Driver, name: String, irq: usize, } impl Scheme for E1000Interface { fn name(&self) -> &str { "e1000" } fn handle_irq(&self, irq: usize) { if irq != self.irq { return; } let data = self.driver.0.lock().handle_interrupt(); if data { let timestamp = Instant::from_millis(0); let sockets = get_sockets(); let mut sockets = sockets.lock(); match self.iface.lock().poll(&mut sockets, timestamp) { Ok(p) => { info!("e1000 try_handle_interrupt poll: {:?}", p); } Err(err) => { warn!("poll got err {}", err); } } } } } impl NetScheme for E1000Interface { fn get_mac(&self) -> EthernetAddress { self.iface.lock().ethernet_addr() } fn get_ifname(&self) -> String { self.name.clone() } fn get_ip_address(&self) -> Vec<IpCidr> { Vec::from(self.iface.lock().ip_addrs()) } fn poll(&self) -> DeviceResult { let timestamp = Instant::from_millis(0); let sockets = get_sockets(); let mut sockets = sockets.lock(); match self.iface.lock().poll(&mut sockets, timestamp) { Ok(p) => { info!("e1000 NetScheme poll: {:?}", p); Ok(()) } Err(err) => { warn!("poll got err {}", err); Err(DeviceError::IoError) } } } fn recv(&self, buf: &mut [u8]) -> DeviceResult<usize> { if let Some(vec_recv) = self.driver.0.lock().receive() { buf.copy_from_slice(&vec_recv); Ok(vec_recv.len()) } else { Err(DeviceError::NotReady) } } fn send(&self, data: &[u8]) -> DeviceResult<usize> { if self.driver.0.lock().can_send() { let mut driver = self.driver.0.lock(); driver.send(data); Ok(data.len()) } else { Err(DeviceError::NotReady) } } } pub struct E1000RxToken(Vec<u8>); pub struct E1000TxToken(E1000Driver); impl phy::Device<'_> for E1000Driver { type RxToken = E1000RxToken; type TxToken = E1000TxToken; fn receive(&mut self) -> Option<(Self::RxToken, Self::TxToken)> { self.0 .lock() .receive() .map(|vec| (E1000RxToken(vec), E1000TxToken(self.clone()))) } fn transmit(&mut self) -> Option<Self::TxToken> { if self.0.lock().can_send() { Some(E1000TxToken(self.clone())) } else { None } } fn capabilities(&self) -> DeviceCapabilities { let mut caps = DeviceCapabilities::default(); caps.max_transmission_unit = 1536; caps.max_burst_size = Some(64); caps } } impl phy::RxToken for E1000RxToken { fn consume<R, F>(mut self, _timestamp: Instant, f: F) -> Result<R> where F: FnOnce(&mut [u8]) -> Result<R>, { f(&mut self.0) } } impl phy::TxToken for E1000TxToken { fn consume<R, F>(self, _timestamp: Instant, len: usize, f: F) -> Result<R> where F: FnOnce(&mut [u8]) -> Result<R>, { let mut buffer = [0u8; PAGE_SIZE]; let result = f(&mut buffer[..len]); let mut driver = (self.0).0.lock(); driver.send(&buffer); result } } pub fn init( name: String, irq: usize, header: usize, size: usize, index: usize, ) -> DeviceResult<E1000Interface> { info!("Probing e1000 {}", name); let mac: [u8; 6] = [0x54, 0x51, 0x9F, 0x71, 0xC0, index as u8]; let e1000 = E1000::new(header, size, DriverEthernetAddress::from_bytes(&mac)); let net_driver = E1000Driver(Arc::new(Mutex::new(e1000))); let ethernet_addr = EthernetAddress::from_bytes(&mac); let ip_addrs = [IpCidr::new(IpAddress::v4(10, 0, 2, (15 + index) as u8), 24)]; let default_v4_gw = Ipv4Address::new(10, 0, 2, 2); static mut ROUTES_STORAGE: [Option<(IpCidr, Route)>; 1] = [None; 1]; let mut routes = unsafe { Routes::new(&mut ROUTES_STORAGE[..]) }; routes.add_default_ipv4_route(default_v4_gw).unwrap(); let neighbor_cache = NeighborCache::new(BTreeMap::new()); let iface = InterfaceBuilder::new(net_driver.clone()) .ethernet_addr(ethernet_addr) .neighbor_cache(neighbor_cache) .ip_addrs(ip_addrs) .routes(routes) .finalize(); info!( "e1000 interface {} up with addr 10.0.2.{}/24", name, 15 + index ); let e1000_iface = E1000Interface { iface: Arc::new(Mutex::new(iface)), driver: net_driver, name, irq, }; Ok(e1000_iface) }
use alloc::collections::BTreeMap; use alloc::string::String; use alloc::sync::Arc; use alloc::vec::Vec; use smoltcp::iface::*; use smoltcp::phy::{self, DeviceCapabilities}; use smoltcp::time::Instant; use smoltcp::wire::*; use smoltcp::Result; use super::ProviderImpl; use super::PAGE_SIZE; use crate::net::get_sockets; use crate::scheme::{NetScheme, Scheme}; use crate::{DeviceError, DeviceResult}; use isomorphic_drivers::net::ethernet::intel::e1000::E1000; use isomorphic_drivers::net::ethernet::structs::EthernetAddress as DriverEthernetAddress; use lock::Mutex; #[derive(Clone)] pub struct E1000Driver(Arc<Mutex<E1000<ProviderImpl>>>); #[derive(Clone)] pub struct E1000Interface { iface: Arc<Mutex<Interface<'static, E1000Driver>>>, driver: E1000Driver, name: String, irq: usize, } impl Scheme for E1000Interface { fn name(&self) -> &str { "e1000" } fn handle_irq(&self, irq: usize) { if irq != self.irq { return; } let data = self.driver.0.lock().handle_interrupt(); if data { let timestamp = Instant::from_millis(0); let sockets = get_sockets(); let mut sockets = sockets.lock(); match self.iface.lock().poll(&mut sockets, times
poll got err {}", err); } } } } } impl NetScheme for E1000Interface { fn get_mac(&self) -> EthernetAddress { self.iface.lock().ethernet_addr() } fn get_ifname(&self) -> String { self.name.clone() } fn get_ip_address(&self) -> Vec<IpCidr> { Vec::from(self.iface.lock().ip_addrs()) } fn poll(&self) -> DeviceResult { let timestamp = Instant::from_millis(0); let sockets = get_sockets(); let mut sockets = sockets.lock(); match self.iface.lock().poll(&mut sockets, timestamp) { Ok(p) => { info!("e1000 NetScheme poll: {:?}", p); Ok(()) } Err(err) => { warn!("poll got err {}", err); Err(DeviceError::IoError) } } } fn recv(&self, buf: &mut [u8]) -> DeviceResult<usize> { if let Some(vec_recv) = self.driver.0.lock().receive() { buf.copy_from_slice(&vec_recv); Ok(vec_recv.len()) } else { Err(DeviceError::NotReady) } } fn send(&self, data: &[u8]) -> DeviceResult<usize> { if self.driver.0.lock().can_send() { let mut driver = self.driver.0.lock(); driver.send(data); Ok(data.len()) } else { Err(DeviceError::NotReady) } } } pub struct E1000RxToken(Vec<u8>); pub struct E1000TxToken(E1000Driver); impl phy::Device<'_> for E1000Driver { type RxToken = E1000RxToken; type TxToken = E1000TxToken; fn receive(&mut self) -> Option<(Self::RxToken, Self::TxToken)> { self.0 .lock() .receive() .map(|vec| (E1000RxToken(vec), E1000TxToken(self.clone()))) } fn transmit(&mut self) -> Option<Self::TxToken> { if self.0.lock().can_send() { Some(E1000TxToken(self.clone())) } else { None } } fn capabilities(&self) -> DeviceCapabilities { let mut caps = DeviceCapabilities::default(); caps.max_transmission_unit = 1536; caps.max_burst_size = Some(64); caps } } impl phy::RxToken for E1000RxToken { fn consume<R, F>(mut self, _timestamp: Instant, f: F) -> Result<R> where F: FnOnce(&mut [u8]) -> Result<R>, { f(&mut self.0) } } impl phy::TxToken for E1000TxToken { fn consume<R, F>(self, _timestamp: Instant, len: usize, f: F) -> Result<R> where F: FnOnce(&mut [u8]) -> Result<R>, { let mut buffer = [0u8; PAGE_SIZE]; let result = f(&mut buffer[..len]); let mut driver = (self.0).0.lock(); driver.send(&buffer); result } } pub fn init( name: String, irq: usize, header: usize, size: usize, index: usize, ) -> DeviceResult<E1000Interface> { info!("Probing e1000 {}", name); let mac: [u8; 6] = [0x54, 0x51, 0x9F, 0x71, 0xC0, index as u8]; let e1000 = E1000::new(header, size, DriverEthernetAddress::from_bytes(&mac)); let net_driver = E1000Driver(Arc::new(Mutex::new(e1000))); let ethernet_addr = EthernetAddress::from_bytes(&mac); let ip_addrs = [IpCidr::new(IpAddress::v4(10, 0, 2, (15 + index) as u8), 24)]; let default_v4_gw = Ipv4Address::new(10, 0, 2, 2); static mut ROUTES_STORAGE: [Option<(IpCidr, Route)>; 1] = [None; 1]; let mut routes = unsafe { Routes::new(&mut ROUTES_STORAGE[..]) }; routes.add_default_ipv4_route(default_v4_gw).unwrap(); let neighbor_cache = NeighborCache::new(BTreeMap::new()); let iface = InterfaceBuilder::new(net_driver.clone()) .ethernet_addr(ethernet_addr) .neighbor_cache(neighbor_cache) .ip_addrs(ip_addrs) .routes(routes) .finalize(); info!( "e1000 interface {} up with addr 10.0.2.{}/24", name, 15 + index ); let e1000_iface = E1000Interface { iface: Arc::new(Mutex::new(iface)), driver: net_driver, name, irq, }; Ok(e1000_iface) }
tamp) { Ok(p) => { info!("e1000 try_handle_interrupt poll: {:?}", p); } Err(err) => { warn!("
random
[ { "content": "pub trait IrqScheme: Scheme {\n\n /// Is a valid IRQ number.\n\n fn is_valid_irq(&self, irq_num: usize) -> bool;\n\n\n\n /// Disable IRQ.\n\n fn mask(&self, irq_num: usize) -> DeviceResult;\n\n\n\n /// Enable IRQ.\n\n fn unmask(&self, irq_num: usize) -> DeviceResult;\n\n\n\n /...
Rust
palette/src/rgb/channels.rs
alexfertel/palette
6765af7d69124e54b458fbfa10412c443ea32390
use crate::{cast::ComponentOrder, rgb}; #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct Abgr; impl<S, T> ComponentOrder<rgb::Rgba<S, T>, [T; 4]> for Abgr { #[inline] fn pack(color: rgb::Rgba<S, T>) -> [T; 4] { let [red, green, blue, alpha]: [T; 4] = color.into(); [alpha, blue, green, red] } #[inline] fn unpack(packed: [T; 4]) -> rgb::Rgba<S, T> { let [alpha, blue, green, red] = packed; rgb::Rgba::new(red, green, blue, alpha) } } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct Argb; impl<S, T> ComponentOrder<rgb::Rgba<S, T>, [T; 4]> for Argb { #[inline] fn pack(color: rgb::Rgba<S, T>) -> [T; 4] { let [red, green, blue, alpha]: [T; 4] = color.into(); [alpha, red, green, blue] } #[inline] fn unpack(packed: [T; 4]) -> rgb::Rgba<S, T> { let [alpha, red, green, blue] = packed; rgb::Rgba::new(red, green, blue, alpha) } } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct Bgra; impl<S, T> ComponentOrder<rgb::Rgba<S, T>, [T; 4]> for Bgra { #[inline] fn pack(color: rgb::Rgba<S, T>) -> [T; 4] { let [red, green, blue, alpha]: [T; 4] = color.into(); [blue, green, red, alpha] } #[inline] fn unpack(packed: [T; 4]) -> rgb::Rgba<S, T> { let [blue, green, red, alpha] = packed; rgb::Rgba::new(red, green, blue, alpha) } } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct Rgba; impl<S, T> ComponentOrder<rgb::Rgba<S, T>, [T; 4]> for Rgba { #[inline] fn pack(color: rgb::Rgba<S, T>) -> [T; 4] { let [red, green, blue, alpha]: [T; 4] = color.into(); [red, green, blue, alpha] } #[inline] fn unpack(packed: [T; 4]) -> rgb::Rgba<S, T> { let [red, green, blue, alpha] = packed; rgb::Rgba::new(red, green, blue, alpha) } } #[cfg(test)] mod test { use super::{Abgr, Argb, Bgra, Rgba}; use crate::{cast::Packed, Srgb, Srgba}; #[test] fn rgba() { let a1: Packed<Rgba, u32> = Srgb::new(0.5, 0.0, 0.0).into_format().into(); let a2: Packed<Rgba, u32> = Srgb::new(0.0, 1.0, 0.0).into_format().into(); let a3: Packed<Rgba, u32> = Srgb::new(0.0, 0.0, 0.5).into_format().into(); let x1: u32 = 0x8000_00FF; let x2: u32 = 0x00FF_00FF; let x3: u32 = 0x0000_80FF; assert_eq!(a1.color, x1); assert_eq!(a2.color, x2); assert_eq!(a3.color, x3); let unpacked: Srgb<u8> = Packed::<Rgba, u32>::from(0x80FF_80FF).into(); assert_relative_eq!( Srgb::new(0.5, 1.0, 0.5), unpacked.into_format(), epsilon = 0.01 ); let b1: Packed<Rgba, u32> = Srgba::new(0.5, 0.0, 0.0, 0.0).into_format().into(); let b2: Packed<Rgba, u32> = Srgba::new(0.0, 1.0, 0.0, 0.0).into_format().into(); let b3: Packed<Rgba, u32> = Srgba::new(0.0, 0.0, 0.5, 0.0).into_format().into(); let b4: Packed<Rgba, u32> = Srgba::new(0.0, 0.0, 0.0, 1.0).into_format().into(); let y1: u32 = 0x8000_0000; let y2: u32 = 0x00FF_0000; let y3: u32 = 0x0000_8000; let y4: u32 = 0x0000_00FF; assert_eq!(b1.color, y1); assert_eq!(b2.color, y2); assert_eq!(b3.color, y3); assert_eq!(b4.color, y4); let unpacked: Srgba<u8> = Packed::<Rgba, u32>::from(0x80FF_80FF).into(); assert_relative_eq!( Srgba::new(0.5, 1.0, 0.5, 1.0), unpacked.into_format(), epsilon = 0.01 ); } #[test] fn argb() { let a1: Packed<Argb, u32> = Srgb::new(0.5, 0.0, 0.0).into_format().into(); let a2: Packed<Argb, u32> = Srgb::new(0.0, 1.0, 0.0).into_format().into(); let a3: Packed<Argb, u32> = Srgb::new(0.0, 0.0, 0.5).into_format().into(); let x1: u32 = 0xFF80_0000; let x2: u32 = 0xFF00_FF00; let x3: u32 = 0xFF00_0080; assert_eq!(a1.color, x1); assert_eq!(a2.color, x2); assert_eq!(a3.color, x3); let unpacked: Srgb<u8> = Packed::<Argb, u32>::from(0x80FF_80FF).into(); assert_relative_eq!( Srgb::new(1.0, 0.5, 1.0), unpacked.into_format(), epsilon = 0.01 ); let b1: Packed<Argb, u32> = Srgba::new(0.5, 0.0, 0.0, 0.0).into_format().into(); let b2: Packed<Argb, u32> = Srgba::new(0.0, 1.0, 0.0, 0.0).into_format().into(); let b3: Packed<Argb, u32> = Srgba::new(0.0, 0.0, 0.5, 0.0).into_format().into(); let b4: Packed<Argb, u32> = Srgba::new(0.0, 0.0, 0.0, 1.0).into_format().into(); let y1: u32 = 0x0080_0000; let y2: u32 = 0x0000_FF00; let y3: u32 = 0x0000_0080; let y4: u32 = 0xFF00_0000; assert_eq!(b1.color, y1); assert_eq!(b2.color, y2); assert_eq!(b3.color, y3); assert_eq!(b4.color, y4); let unpacked: Srgba<u8> = Packed::<Argb, u32>::from(0x80FF_80FF).into(); assert_relative_eq!( Srgba::new(1.0, 0.5, 1.0, 0.5), unpacked.into_format(), epsilon = 0.01 ); } #[test] fn bgra() { let a1: Packed<Bgra, u32> = Srgb::new(0.5, 0.0, 0.0).into_format().into(); let a2: Packed<Bgra, u32> = Srgb::new(0.0, 1.0, 0.0).into_format().into(); let a3: Packed<Bgra, u32> = Srgb::new(0.0, 0.0, 0.5).into_format().into(); let x1: u32 = 0x0000_80FF; let x2: u32 = 0x00FF_00FF; let x3: u32 = 0x8000_00FF; assert_eq!(a1.color, x1); assert_eq!(a2.color, x2); assert_eq!(a3.color, x3); let unpacked: Srgb<u8> = Packed::<Bgra, u32>::from(0x80FF_FF80).into(); assert_relative_eq!( Srgb::new(1.0, 1.0, 0.5), unpacked.into_format(), epsilon = 0.01 ); let b1: Packed<Bgra, u32> = Srgba::new(0.5, 0.0, 0.0, 0.0).into_format().into(); let b2: Packed<Bgra, u32> = Srgba::new(0.0, 1.0, 0.0, 0.0).into_format().into(); let b3: Packed<Bgra, u32> = Srgba::new(0.0, 0.0, 0.5, 0.0).into_format().into(); let b4: Packed<Bgra, u32> = Srgba::new(0.0, 0.0, 0.0, 1.0).into_format().into(); let y1: u32 = 0x0000_8000; let y2: u32 = 0x00FF_0000; let y3: u32 = 0x8000_0000; let y4: u32 = 0x0000_00FF; assert_eq!(b1.color, y1); assert_eq!(b2.color, y2); assert_eq!(b3.color, y3); assert_eq!(b4.color, y4); let unpacked: Srgba<u8> = Packed::<Bgra, u32>::from(0x80FF_FF80).into(); assert_relative_eq!( Srgba::new(1.0, 1.0, 0.5, 0.5), unpacked.into_format(), epsilon = 0.01 ); } #[test] fn abgr() { let a1: Packed<Abgr, u32> = Srgb::new(0.5, 0.0, 0.0).into_format().into(); let a2: Packed<Abgr, u32> = Srgb::new(0.0, 1.0, 0.0).into_format().into(); let a3: Packed<Abgr, u32> = Srgb::new(0.0, 0.0, 0.5).into_format().into(); let x1: u32 = 0xFF00_0080; let x2: u32 = 0xFF00_FF00; let x3: u32 = 0xFF80_0000; assert_eq!(a1.color, x1); assert_eq!(a2.color, x2); assert_eq!(a3.color, x3); let unpacked: Srgb<u8> = Packed::<Abgr, u32>::from(0x80FF_FF80).into(); assert_relative_eq!( Srgb::new(0.5, 1.0, 1.0), unpacked.into_format(), epsilon = 0.01 ); let b1: Packed<Abgr, u32> = Srgba::new(0.5, 0.0, 0.0, 0.0).into_format().into(); let b2: Packed<Abgr, u32> = Srgba::new(0.0, 1.0, 0.0, 0.0).into_format().into(); let b3: Packed<Abgr, u32> = Srgba::new(0.0, 0.0, 0.5, 0.0).into_format().into(); let b4: Packed<Abgr, u32> = Srgba::new(0.0, 0.0, 0.0, 1.0).into_format().into(); let y1: u32 = 0x0000_0080; let y2: u32 = 0x0000_FF00; let y3: u32 = 0x0080_0000; let y4: u32 = 0xFF00_0000; assert_eq!(b1.color, y1); assert_eq!(b2.color, y2); assert_eq!(b3.color, y3); assert_eq!(b4.color, y4); let unpacked: Srgba<u8> = Packed::<Abgr, u32>::from(0x80FF_FF80).into(); assert_relative_eq!( Srgba::new(0.5, 1.0, 1.0, 0.5), unpacked.into_format(), epsilon = 0.01 ); } #[test] fn u32_to_color() { assert_eq!(0xFFFF_FF80, u32::from(Srgb::new(255u8, 255, 128))); assert_eq!(0x7FFF_FF80, u32::from(Srgba::new(127u8, 255u8, 255, 128))); } }
use crate::{cast::ComponentOrder, rgb}; #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct Abgr; impl<S, T> ComponentOrder<rgb::Rgba<S, T>, [T; 4]> for Abgr { #[inline] fn pack(color: rgb::Rgba<S, T>) -> [T; 4] { let [red, green, blue, alpha]: [T; 4] = color.into(); [alpha, blue, green, red] } #[inline] fn unpack(packed: [T; 4]) -> rgb::Rgba<S, T> { let [alpha, blue, green, red] = packed; rgb::Rgba::new(red, green, blue, alpha) } } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct Argb; impl<S, T> ComponentOrder<rgb::Rgba<S, T>, [T; 4]> for Argb { #[inline] fn pack(color: rgb::Rgba<S, T>) -> [T; 4] { let [red, green, blue, alpha]: [T; 4] = color.into(); [alpha, red, green, blue] } #[inline] fn unpack(packed: [T; 4]) -> rgb::Rgba<S, T> { let [alpha, red, green, blue] = packed; rgb::Rgba::new(red, green, blue, alpha) } } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct Bgra; impl<S, T> ComponentOrder<rgb::Rgba<S, T>, [T; 4]> for Bgra { #[inline] fn pack(color: rgb::Rgba<S, T>) -> [T; 4] { let [red, green, blue, alpha]: [T; 4] = color.into(); [blue, green, red, alpha] } #[inline] fn unpack(packed: [T; 4]) -> rgb::Rgba<S, T> { let [blue, green, red, alpha] = packed; rgb::Rgba::new(red, green, blue, alpha) } } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct Rgba; impl<S, T> ComponentOrder<rgb::Rgba<S, T>, [T; 4]> for Rgba { #[inline] fn pack(color: rgb::Rgba<S, T>) -> [T; 4] { let [red, green, blue, alpha]: [T; 4] = color.into(); [red, green, blue, alpha] } #[inline] fn unpack(packed: [T; 4]) -> rgb::Rgba<S, T> { let [red, green, blue, alpha] = packed; rgb::Rgba::new(red, green, blue, alpha) } } #[cfg(test)] mod test { use super::{Abgr, Argb, Bgra, Rgba}; use crate::{cast::Packed, Srgb, Srgba}; #[test] fn rgba() { let a1: Packed<Rgba, u32> = Srgb::new(0.5, 0.0, 0.0).into_format().into(); let a2: Packed<Rgba, u32> = Srgb::new(0.0, 1.0, 0.0).into_format().into(); let a3: Packed<Rgba, u32> = Srgb::new(0.0, 0.0, 0.5).into_format().into(); let x1: u32 = 0x8000_00FF; let x2: u32 = 0x00FF_00FF; let x3: u32 = 0x0000_80FF; assert_eq!(a1.color, x1); assert_eq!(a2.color, x2); assert_eq!(a3.color, x3); let unpacked: Srgb<u8> = Packed::<Rgba, u32>::from(0x80FF_80FF).into(); assert_relative_eq!( Srgb::new(0.5, 1.0, 0.5), unpacked.into_format(), epsilon = 0.01 ); let b1: Packed<Rgba, u32> = Srgba::new(0.5, 0.0, 0.0, 0.0).into_format().into(); let b2: Packed<Rgba, u32> = Srgba::new(0.0, 1.0, 0.0, 0.0).into_format().into(); let b3: Packed<Rgba, u32> = Srgba::new(0.0, 0.0, 0.5, 0.0).into_format().into(); let b4: Packed<Rgba, u32> = Srgba::new(0.0, 0.0, 0.0, 1.0).into_format().into(); let y1: u32 = 0x8000_0000; let y2: u32 = 0x00FF_0000; let y3: u32 = 0x0000_8000; let y4: u32 = 0x0000_00FF; assert_eq!(b1.color, y1); assert_eq!(b2.color, y2); assert_eq!(b3.color, y3); assert_eq!(b4.color, y4); let unpacked: Srgba<u8> = Packed::<Rgba, u32>::from(0x80FF_80FF).into(); assert_relative_eq!( Srgba::new(0.5, 1.0, 0.5, 1.0), unpacked.into_format(), epsilon = 0.01 ); } #[test]
#[test] fn bgra() { let a1: Packed<Bgra, u32> = Srgb::new(0.5, 0.0, 0.0).into_format().into(); let a2: Packed<Bgra, u32> = Srgb::new(0.0, 1.0, 0.0).into_format().into(); let a3: Packed<Bgra, u32> = Srgb::new(0.0, 0.0, 0.5).into_format().into(); let x1: u32 = 0x0000_80FF; let x2: u32 = 0x00FF_00FF; let x3: u32 = 0x8000_00FF; assert_eq!(a1.color, x1); assert_eq!(a2.color, x2); assert_eq!(a3.color, x3); let unpacked: Srgb<u8> = Packed::<Bgra, u32>::from(0x80FF_FF80).into(); assert_relative_eq!( Srgb::new(1.0, 1.0, 0.5), unpacked.into_format(), epsilon = 0.01 ); let b1: Packed<Bgra, u32> = Srgba::new(0.5, 0.0, 0.0, 0.0).into_format().into(); let b2: Packed<Bgra, u32> = Srgba::new(0.0, 1.0, 0.0, 0.0).into_format().into(); let b3: Packed<Bgra, u32> = Srgba::new(0.0, 0.0, 0.5, 0.0).into_format().into(); let b4: Packed<Bgra, u32> = Srgba::new(0.0, 0.0, 0.0, 1.0).into_format().into(); let y1: u32 = 0x0000_8000; let y2: u32 = 0x00FF_0000; let y3: u32 = 0x8000_0000; let y4: u32 = 0x0000_00FF; assert_eq!(b1.color, y1); assert_eq!(b2.color, y2); assert_eq!(b3.color, y3); assert_eq!(b4.color, y4); let unpacked: Srgba<u8> = Packed::<Bgra, u32>::from(0x80FF_FF80).into(); assert_relative_eq!( Srgba::new(1.0, 1.0, 0.5, 0.5), unpacked.into_format(), epsilon = 0.01 ); } #[test] fn abgr() { let a1: Packed<Abgr, u32> = Srgb::new(0.5, 0.0, 0.0).into_format().into(); let a2: Packed<Abgr, u32> = Srgb::new(0.0, 1.0, 0.0).into_format().into(); let a3: Packed<Abgr, u32> = Srgb::new(0.0, 0.0, 0.5).into_format().into(); let x1: u32 = 0xFF00_0080; let x2: u32 = 0xFF00_FF00; let x3: u32 = 0xFF80_0000; assert_eq!(a1.color, x1); assert_eq!(a2.color, x2); assert_eq!(a3.color, x3); let unpacked: Srgb<u8> = Packed::<Abgr, u32>::from(0x80FF_FF80).into(); assert_relative_eq!( Srgb::new(0.5, 1.0, 1.0), unpacked.into_format(), epsilon = 0.01 ); let b1: Packed<Abgr, u32> = Srgba::new(0.5, 0.0, 0.0, 0.0).into_format().into(); let b2: Packed<Abgr, u32> = Srgba::new(0.0, 1.0, 0.0, 0.0).into_format().into(); let b3: Packed<Abgr, u32> = Srgba::new(0.0, 0.0, 0.5, 0.0).into_format().into(); let b4: Packed<Abgr, u32> = Srgba::new(0.0, 0.0, 0.0, 1.0).into_format().into(); let y1: u32 = 0x0000_0080; let y2: u32 = 0x0000_FF00; let y3: u32 = 0x0080_0000; let y4: u32 = 0xFF00_0000; assert_eq!(b1.color, y1); assert_eq!(b2.color, y2); assert_eq!(b3.color, y3); assert_eq!(b4.color, y4); let unpacked: Srgba<u8> = Packed::<Abgr, u32>::from(0x80FF_FF80).into(); assert_relative_eq!( Srgba::new(0.5, 1.0, 1.0, 0.5), unpacked.into_format(), epsilon = 0.01 ); } #[test] fn u32_to_color() { assert_eq!(0xFFFF_FF80, u32::from(Srgb::new(255u8, 255, 128))); assert_eq!(0x7FFF_FF80, u32::from(Srgba::new(127u8, 255u8, 255, 128))); } }
fn argb() { let a1: Packed<Argb, u32> = Srgb::new(0.5, 0.0, 0.0).into_format().into(); let a2: Packed<Argb, u32> = Srgb::new(0.0, 1.0, 0.0).into_format().into(); let a3: Packed<Argb, u32> = Srgb::new(0.0, 0.0, 0.5).into_format().into(); let x1: u32 = 0xFF80_0000; let x2: u32 = 0xFF00_FF00; let x3: u32 = 0xFF00_0080; assert_eq!(a1.color, x1); assert_eq!(a2.color, x2); assert_eq!(a3.color, x3); let unpacked: Srgb<u8> = Packed::<Argb, u32>::from(0x80FF_80FF).into(); assert_relative_eq!( Srgb::new(1.0, 0.5, 1.0), unpacked.into_format(), epsilon = 0.01 ); let b1: Packed<Argb, u32> = Srgba::new(0.5, 0.0, 0.0, 0.0).into_format().into(); let b2: Packed<Argb, u32> = Srgba::new(0.0, 1.0, 0.0, 0.0).into_format().into(); let b3: Packed<Argb, u32> = Srgba::new(0.0, 0.0, 0.5, 0.0).into_format().into(); let b4: Packed<Argb, u32> = Srgba::new(0.0, 0.0, 0.0, 1.0).into_format().into(); let y1: u32 = 0x0080_0000; let y2: u32 = 0x0000_FF00; let y3: u32 = 0x0000_0080; let y4: u32 = 0xFF00_0000; assert_eq!(b1.color, y1); assert_eq!(b2.color, y2); assert_eq!(b3.color, y3); assert_eq!(b4.color, y4); let unpacked: Srgba<u8> = Packed::<Argb, u32>::from(0x80FF_80FF).into(); assert_relative_eq!( Srgba::new(1.0, 0.5, 1.0, 0.5), unpacked.into_format(), epsilon = 0.01 ); }
function_block-full_function
[ { "content": "#[test]\n\npub fn color_mine_from_rgb() {\n\n data_color_mine::run_from_rgb_tests();\n\n}\n", "file_path": "palette/tests/convert/mod.rs", "rank": 0, "score": 258515.81500016944 }, { "content": "#[test]\n\npub fn color_mine_from_linear_rgb() {\n\n data_color_mine::run_fro...
Rust
src/tests/default_connector_test.rs
aschaeffer/inexor-rgf-plugin-connector
97f904278f847e672869c6941c1dae3438f42081
use crate::behaviour::relation::connector::Connector; use crate::behaviour::relation::connector::CONNECTORS; use crate::model::{PropertyInstanceGetter, PropertyInstanceSetter}; use crate::tests::utils::create_connector; use crate::tests::utils::create_random_entity_instance; use crate::tests::utils::r_string; use serde_json::json; use std::sync::Arc; #[test] fn test_type_name_construction() { let type_name = "default_connector"; let outbound_property_name = "lhs"; let inbound_property_name = "result"; let full_type_name = Connector::type_name(type_name, outbound_property_name, inbound_property_name); assert_eq!("default_connector--lhs--result", full_type_name); } #[test] fn default_connector_test() { let outbound_property_name = r_string(); let inbound_property_name = r_string(); let outbound_entity = Arc::new(create_random_entity_instance( outbound_property_name.clone(), )); let inbound_entity = Arc::new(create_random_entity_instance(inbound_property_name.clone())); let type_name = "default_connector"; let r = Arc::new(create_connector( outbound_entity.clone(), type_name, inbound_entity.clone(), outbound_property_name.as_str(), inbound_property_name.as_str(), )); let propagation_function = CONNECTORS.get(type_name); let mut connector = Connector::from_relation(r.clone(), *propagation_function.unwrap()); connector .relation .outbound .set(outbound_property_name.clone(), json!(true)); assert!(connector .relation .inbound .as_bool(inbound_property_name.clone()) .unwrap()); connector .relation .outbound .set(outbound_property_name.clone(), json!(false)); assert!(!connector .relation .inbound .as_bool(inbound_property_name.clone()) .unwrap()); connector .relation .outbound .set(outbound_property_name.clone(), json!(123)); assert_eq!( 123, connector .relation .inbound .as_u64(inbound_property_name.clone()) .unwrap() ); connector .relation .outbound .set(outbound_property_name.clone(), json!(-123)); assert_eq!( -123, connector .relation .inbound .as_i64(inbound_property_name.clone()) .unwrap() ); connector .relation .outbound .set(outbound_property_name.clone(), json!(1.23)); assert_eq!( 1.23, connector .relation .inbound .as_f64(inbound_property_name.clone()) .unwrap() ); let s = r_string(); connector .relation .outbound .set(outbound_property_name.clone(), json!(s.clone())); assert_eq!( s, connector .relation .inbound .as_string(inbound_property_name.clone()) .unwrap() ); connector.disconnect(); connector.relation.outbound.set( outbound_property_name.clone(), json!("MUST NOT PROPAGATED ANYMORE"), ); assert_eq!( s, connector .relation .inbound .as_string(inbound_property_name.clone()) .unwrap() ); connector.connect(); let s2 = r_string(); connector .relation .outbound .set(outbound_property_name.clone(), json!(s2.clone())); assert_eq!( s2, connector .relation .inbound .as_string(inbound_property_name.clone()) .unwrap() ); }
use crate::behaviour::relation::connector::Connector; use crate::behaviour::relation::connector::CONNECTORS; use crate::model::{PropertyInstanceGetter, PropertyInstanceSetter}; use crate::tests::utils::create_connector; use crate::tests::utils::create_random_entity_instance; use crate::tests::utils::r_string; use serde_json::json; use std::sync::Arc; #[test] fn test_type_name_construction() { let type_name = "default_connector";
#[test] fn default_connector_test() { let outbound_property_name = r_string(); let inbound_property_name = r_string(); let outbound_entity = Arc::new(create_random_entity_instance( outbound_property_name.clone(), )); let inbound_entity = Arc::new(create_random_entity_instance(inbound_property_name.clone())); let type_name = "default_connector"; let r = Arc::new(create_connector( outbound_entity.clone(), type_name, inbound_entity.clone(), outbound_property_name.as_str(), inbound_property_name.as_str(), )); let propagation_function = CONNECTORS.get(type_name); let mut connector = Connector::from_relation(r.clone(), *propagation_function.unwrap()); connector .relation .outbound .set(outbound_property_name.clone(), json!(true)); assert!(connector .relation .inbound .as_bool(inbound_property_name.clone()) .unwrap()); connector .relation .outbound .set(outbound_property_name.clone(), json!(false)); assert!(!connector .relation .inbound .as_bool(inbound_property_name.clone()) .unwrap()); connector .relation .outbound .set(outbound_property_name.clone(), json!(123)); assert_eq!( 123, connector .relation .inbound .as_u64(inbound_property_name.clone()) .unwrap() ); connector .relation .outbound .set(outbound_property_name.clone(), json!(-123)); assert_eq!( -123, connector .relation .inbound .as_i64(inbound_property_name.clone()) .unwrap() ); connector .relation .outbound .set(outbound_property_name.clone(), json!(1.23)); assert_eq!( 1.23, connector .relation .inbound .as_f64(inbound_property_name.clone()) .unwrap() ); let s = r_string(); connector .relation .outbound .set(outbound_property_name.clone(), json!(s.clone())); assert_eq!( s, connector .relation .inbound .as_string(inbound_property_name.clone()) .unwrap() ); connector.disconnect(); connector.relation.outbound.set( outbound_property_name.clone(), json!("MUST NOT PROPAGATED ANYMORE"), ); assert_eq!( s, connector .relation .inbound .as_string(inbound_property_name.clone()) .unwrap() ); connector.connect(); let s2 = r_string(); connector .relation .outbound .set(outbound_property_name.clone(), json!(s2.clone())); assert_eq!( s2, connector .relation .inbound .as_string(inbound_property_name.clone()) .unwrap() ); }
let outbound_property_name = "lhs"; let inbound_property_name = "result"; let full_type_name = Connector::type_name(type_name, outbound_property_name, inbound_property_name); assert_eq!("default_connector--lhs--result", full_type_name); }
function_block-function_prefix_line
[ { "content": "#[test]\n\nfn propagation_function_test() {\n\n let expected_propagation_functions = vec![\n\n \"debug_connector\",\n\n \"default_connector\",\n\n \"parse_float_connector\",\n\n \"parse_int_connector\",\n\n \"to_string_connector\",\n\n \"trace_connector...
Rust
services/common/src/wrapper/v0/node_traits.rs
tiagolobocastro/Mayastor
c9cf777e7776f529a2433d29b8513d1c601684a6
use super::*; pub struct GrpcContext { pub client: MayaClient, } pub type MayaClient = MayastorClient<Channel>; impl GrpcContext { pub async fn new(endpoint: String) -> Result<GrpcContext, SvcError> { let uri = format!("http://{}", endpoint); let uri = http::uri::Uri::from_str(&uri).unwrap(); let endpoint = tonic::transport::Endpoint::from(uri) .timeout(std::time::Duration::from_secs(1)); let client = MayaClient::connect(endpoint) .await .context(GrpcConnect {})?; Ok(Self { client, }) } } #[async_trait] #[clonable] pub trait NodeReplicaTrait: Send + Sync + Debug + Clone { async fn fetch_replicas(&self) -> Result<Vec<Replica>, SvcError>; async fn create_replica( &self, request: &CreateReplica, ) -> Result<Replica, SvcError>; async fn share_replica( &self, request: &ShareReplica, ) -> Result<String, SvcError>; async fn unshare_replica( &self, request: &UnshareReplica, ) -> Result<(), SvcError>; async fn destroy_replica( &self, request: &DestroyReplica, ) -> Result<(), SvcError>; fn on_create_replica(&mut self, replica: &Replica); fn on_destroy_replica(&mut self, pool: &PoolId, replica: &ReplicaId); fn on_update_replica( &mut self, pool: &PoolId, replica: &ReplicaId, share: &Protocol, uri: &str, ); } #[async_trait] #[clonable] pub trait NodePoolTrait: Send + Sync + Debug + Clone { async fn fetch_pools(&self) -> Result<Vec<Pool>, SvcError>; async fn create_pool(&self, request: &CreatePool) -> Result<Pool, SvcError>; async fn destroy_pool(&self, request: &DestroyPool) -> Result<(), SvcError>; async fn on_create_pool(&mut self, pool: &Pool, replicas: &[Replica]); fn on_destroy_pool(&mut self, pool: &PoolId); } #[async_trait] #[clonable] #[allow(unused_variables)] pub trait NodeNexusTrait: Send + Sync + Debug + Clone { fn nexuses(&self) -> Vec<Nexus> { vec![] } async fn fetch_nexuses(&self) -> Result<Vec<Nexus>, SvcError> { Err(SvcError::NotImplemented {}) } async fn create_nexus( &self, request: &CreateNexus, ) -> Result<Nexus, SvcError> { Err(SvcError::NotImplemented {}) } async fn destroy_nexus( &self, request: &DestroyNexus, ) -> Result<(), SvcError> { Err(SvcError::NotImplemented {}) } async fn share_nexus( &self, request: &ShareNexus, ) -> Result<String, SvcError> { Err(SvcError::NotImplemented {}) } async fn unshare_nexus( &self, request: &UnshareNexus, ) -> Result<(), SvcError> { Err(SvcError::NotImplemented {}) } fn on_create_nexus(&mut self, nexus: &Nexus) {} fn on_update_nexus(&mut self, nexus: &NexusId, uri: &str) {} fn on_destroy_nexus(&mut self, nexus: &NexusId) {} } #[async_trait] #[clonable] #[allow(unused_variables)] pub trait NodeNexusChildTrait: Send + Sync + Debug + Clone { async fn fetch_children(&self) -> Result<Vec<Child>, SvcError> { Err(SvcError::NotImplemented {}) } async fn add_child( &self, request: &AddNexusChild, ) -> Result<Child, SvcError> { Err(SvcError::NotImplemented {}) } async fn remove_child( &self, request: &RemoveNexusChild, ) -> Result<(), SvcError> { Err(SvcError::NotImplemented {}) } fn on_add_child(&mut self, nexus: &NexusId, child: &Child) {} fn on_remove_child(&mut self, request: &RemoveNexusChild) {} } #[async_trait] #[clonable] pub trait NodeWrapperTrait: Send + Sync + Debug + Clone + NodeReplicaTrait + NodePoolTrait + NodeNexusTrait + NodeNexusChildTrait { #[allow(clippy::new_ret_no_self)] async fn new(node: &NodeId) -> Result<NodeWrapper, SvcError> where Self: Sized; async fn fetch_nodes() -> Result<Vec<Node>, SvcError> where Self: Sized, { MessageBus::get_nodes().await.context(BusGetNodes {}) } fn id(&self) -> NodeId; fn node(&self) -> Node; fn pools(&self) -> Vec<Pool>; fn pools_wrapper(&self) -> Vec<PoolWrapper>; fn replicas(&self) -> Vec<Replica>; fn is_online(&self) -> bool; fn online_only(&self) -> Result<(), SvcError> { if !self.is_online() { Err(SvcError::NodeNotOnline { node: self.node().id, }) } else { Ok(()) } } async fn update(&mut self); fn set_state(&mut self, state: NodeState); async fn grpc_client(&self) -> Result<GrpcContext, SvcError> { self.online_only()?; GrpcContext::new(self.node().grpc_endpoint.clone()).await } } pub type NodeWrapper = Box<dyn NodeWrapperTrait>; #[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct PoolWrapper { pool: Pool, replicas: Vec<Replica>, } impl PoolWrapper { pub fn new_from(pool: &Pool, replicas: &[Replica]) -> Self { Self { pool: pool.clone(), replicas: replicas.into(), } } pub fn pool(&self) -> Pool { self.pool.clone() } pub fn uuid(&self) -> PoolId { self.pool.id.clone() } pub fn node(&self) -> NodeId { self.pool.node.clone() } pub fn state(&self) -> PoolState { self.pool.state.clone() } pub fn free_space(&self) -> u64 { if self.pool.capacity >= self.pool.used { self.pool.capacity - self.pool.used } else { tracing::error!( "Pool '{}' has a capacity of '{} B' but is using '{} B'", self.pool.id, self.pool.capacity, self.pool.used ); 0 } } pub fn set_unknown(&mut self) { self.pool.state = PoolState::Unknown; } pub fn replicas(&self) -> Vec<Replica> { self.replicas.clone() } pub fn added_replica(&mut self, replica: &Replica) { self.replicas.push(replica.clone()) } pub fn removed_replica(&mut self, uuid: &ReplicaId) { self.replicas.retain(|replica| &replica.uuid != uuid) } pub fn updated_replica( &mut self, uuid: &ReplicaId, share: &Protocol, uri: &str, ) { if let Some(replica) = self .replicas .iter_mut() .find(|replica| &replica.uuid == uuid) { replica.share = share.clone(); replica.uri = uri.to_string(); } } } impl PartialOrd for PoolWrapper { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { match self.pool.state.partial_cmp(&other.pool.state) { Some(Ordering::Greater) => Some(Ordering::Greater), Some(Ordering::Less) => Some(Ordering::Less), Some(Ordering::Equal) => { match self.replicas.len().cmp(&other.replicas.len()) { Ordering::Greater => Some(Ordering::Greater), Ordering::Less => Some(Ordering::Less), Ordering::Equal => { Some(self.free_space().cmp(&other.free_space())) } } } None => None, } } } impl Ord for PoolWrapper { fn cmp(&self, other: &Self) -> Ordering { match self.pool.state.partial_cmp(&other.pool.state) { Some(Ordering::Greater) => Ordering::Greater, Some(Ordering::Less) => Ordering::Less, Some(Ordering::Equal) => { match self.replicas.len().cmp(&other.replicas.len()) { Ordering::Greater => Ordering::Greater, Ordering::Less => Ordering::Less, Ordering::Equal => { self.free_space().cmp(&other.free_space()) } } } None => Ordering::Equal, } } }
use super::*; pub struct GrpcContext { pub client: MayaClient, } pub type MayaClient = MayastorClient<Channel>; impl GrpcContext { pub async fn new(endpoint: String) -> Result<GrpcContext, SvcError> { let uri = format!("http://{}", endpoint); let uri = http::uri::Uri::from_str(&uri).unwrap(); let endpoint = tonic::transport::Endpoint::from(uri) .timeout(std::time::Duration::from_secs(1)); let client = MayaClient::connect(endpoint) .await .context(GrpcConnect {})?; Ok(Self { client, }) } } #[async_trait] #[clonable] pub trait NodeReplicaTrait: Send + Sync + Debug + Clone { async fn fetch_replicas(&self) -> Result<Vec<Replica>, SvcError>; async fn create_replica( &self, request: &CreateReplica, ) -> Result<Replica, SvcError>; async fn share_replica( &self, request: &ShareReplica, ) -> Result<String, SvcError>; async fn unshare_replica( &self, request: &UnshareReplica, ) -> Result<(), SvcError>; async fn destroy_replica( &self, request: &DestroyReplica, ) -> Result<(), SvcError>; fn on_create_replica(&mut self, replica: &Replica); fn on_destroy_replica(&mut self, pool: &PoolId, replica: &ReplicaId); fn on_update_replica( &mut self, pool: &PoolId, replica: &ReplicaId, share: &Protocol, uri: &str, ); } #[async_trait] #[clonable] pub trait NodePoolTrait: Send + Sync + Debug + Clone { async fn fetch_pools(&self) -> Result<Vec<Pool>, SvcError>; async fn create_pool(&self, request: &CreatePool) -> Result<Pool, SvcError>; async fn destroy_pool(&self, request: &DestroyPool) -> Result<(), SvcError>; async fn on_create_pool(&mut self, pool: &Pool, replicas: &[Replica]); fn on_destroy_pool(&mut self, pool: &PoolId); } #[async_trait] #[clonable] #[allow(unused_variables)] pub trait NodeNexusTrait: Send + Sync + Debug + Clone { fn nexuses(&self) -> Vec<Nexus> { vec![] } async fn fetch_nexuses(&self) -> Result<Vec<Nexus>, SvcError> { Err(SvcError::NotImplemented {}) } async fn create_nexus( &self, request: &CreateNexus, ) -> Result<Nexus, SvcError> { Err(SvcError::NotImplemented {}) }
async fn share_nexus( &self, request: &ShareNexus, ) -> Result<String, SvcError> { Err(SvcError::NotImplemented {}) } async fn unshare_nexus( &self, request: &UnshareNexus, ) -> Result<(), SvcError> { Err(SvcError::NotImplemented {}) } fn on_create_nexus(&mut self, nexus: &Nexus) {} fn on_update_nexus(&mut self, nexus: &NexusId, uri: &str) {} fn on_destroy_nexus(&mut self, nexus: &NexusId) {} } #[async_trait] #[clonable] #[allow(unused_variables)] pub trait NodeNexusChildTrait: Send + Sync + Debug + Clone { async fn fetch_children(&self) -> Result<Vec<Child>, SvcError> { Err(SvcError::NotImplemented {}) } async fn add_child( &self, request: &AddNexusChild, ) -> Result<Child, SvcError> { Err(SvcError::NotImplemented {}) } async fn remove_child( &self, request: &RemoveNexusChild, ) -> Result<(), SvcError> { Err(SvcError::NotImplemented {}) } fn on_add_child(&mut self, nexus: &NexusId, child: &Child) {} fn on_remove_child(&mut self, request: &RemoveNexusChild) {} } #[async_trait] #[clonable] pub trait NodeWrapperTrait: Send + Sync + Debug + Clone + NodeReplicaTrait + NodePoolTrait + NodeNexusTrait + NodeNexusChildTrait { #[allow(clippy::new_ret_no_self)] async fn new(node: &NodeId) -> Result<NodeWrapper, SvcError> where Self: Sized; async fn fetch_nodes() -> Result<Vec<Node>, SvcError> where Self: Sized, { MessageBus::get_nodes().await.context(BusGetNodes {}) } fn id(&self) -> NodeId; fn node(&self) -> Node; fn pools(&self) -> Vec<Pool>; fn pools_wrapper(&self) -> Vec<PoolWrapper>; fn replicas(&self) -> Vec<Replica>; fn is_online(&self) -> bool; fn online_only(&self) -> Result<(), SvcError> { if !self.is_online() { Err(SvcError::NodeNotOnline { node: self.node().id, }) } else { Ok(()) } } async fn update(&mut self); fn set_state(&mut self, state: NodeState); async fn grpc_client(&self) -> Result<GrpcContext, SvcError> { self.online_only()?; GrpcContext::new(self.node().grpc_endpoint.clone()).await } } pub type NodeWrapper = Box<dyn NodeWrapperTrait>; #[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct PoolWrapper { pool: Pool, replicas: Vec<Replica>, } impl PoolWrapper { pub fn new_from(pool: &Pool, replicas: &[Replica]) -> Self { Self { pool: pool.clone(), replicas: replicas.into(), } } pub fn pool(&self) -> Pool { self.pool.clone() } pub fn uuid(&self) -> PoolId { self.pool.id.clone() } pub fn node(&self) -> NodeId { self.pool.node.clone() } pub fn state(&self) -> PoolState { self.pool.state.clone() } pub fn free_space(&self) -> u64 { if self.pool.capacity >= self.pool.used { self.pool.capacity - self.pool.used } else { tracing::error!( "Pool '{}' has a capacity of '{} B' but is using '{} B'", self.pool.id, self.pool.capacity, self.pool.used ); 0 } } pub fn set_unknown(&mut self) { self.pool.state = PoolState::Unknown; } pub fn replicas(&self) -> Vec<Replica> { self.replicas.clone() } pub fn added_replica(&mut self, replica: &Replica) { self.replicas.push(replica.clone()) } pub fn removed_replica(&mut self, uuid: &ReplicaId) { self.replicas.retain(|replica| &replica.uuid != uuid) } pub fn updated_replica( &mut self, uuid: &ReplicaId, share: &Protocol, uri: &str, ) { if let Some(replica) = self .replicas .iter_mut() .find(|replica| &replica.uuid == uuid) { replica.share = share.clone(); replica.uri = uri.to_string(); } } } impl PartialOrd for PoolWrapper { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { match self.pool.state.partial_cmp(&other.pool.state) { Some(Ordering::Greater) => Some(Ordering::Greater), Some(Ordering::Less) => Some(Ordering::Less), Some(Ordering::Equal) => { match self.replicas.len().cmp(&other.replicas.len()) { Ordering::Greater => Some(Ordering::Greater), Ordering::Less => Some(Ordering::Less), Ordering::Equal => { Some(self.free_space().cmp(&other.free_space())) } } } None => None, } } } impl Ord for PoolWrapper { fn cmp(&self, other: &Self) -> Ordering { match self.pool.state.partial_cmp(&other.pool.state) { Some(Ordering::Greater) => Ordering::Greater, Some(Ordering::Less) => Ordering::Less, Some(Ordering::Equal) => { match self.replicas.len().cmp(&other.replicas.len()) { Ordering::Greater => Ordering::Greater, Ordering::Less => Ordering::Less, Ordering::Equal => { self.free_space().cmp(&other.free_space()) } } } None => Ordering::Equal, } } }
async fn destroy_nexus( &self, request: &DestroyNexus, ) -> Result<(), SvcError> { Err(SvcError::NotImplemented {}) }
function_block-full_function
[ { "content": "pub fn bdev_get_name(uri: &str) -> Result<String, NexusBdevError> {\n\n Ok(Uri::parse(uri)?.get_name())\n\n}\n\n\n\nimpl std::cmp::PartialEq<url::Url> for &Bdev {\n\n fn eq(&self, uri: &url::Url) -> bool {\n\n match Uri::parse(&uri.to_string()) {\n\n Ok(device) if device.ge...
Rust
tio-rust/tio/src/slip.rs
twinleaf/twinleaf-web-ui
69b57c080c1e72a79c6a9a8a0111c60309e5048b
/* SLIP protocol module Thomas Kornack Twinleaf LLC 2016 NOTE: this implementation was mostly copy/paste from old code. Did run `cargo fmt. */ use anyhow::{anyhow, Result}; use std::convert::TryInto; const SLIP_END: u8 = 0xC0; const SLIP_ESC: u8 = 0xDB; const SLIP_ESC_END: u8 = 0xDC; const SLIP_ESC_ESC: u8 = 0xDD; const SLIP_MAX_LEN: usize = 2048; pub fn tio_slip_decode(rx_slip: &[u8]) -> Result<Vec<u8>> { let mut rx_esc_next = false; let mut rx_buf = vec![]; if rx_slip.len() >= SLIP_MAX_LEN { return Err(anyhow!("TIO SLIP message too long: {}", rx_slip.len())); } for byte in rx_slip { if rx_esc_next { rx_esc_next = false; match *byte { SLIP_ESC_END => rx_buf.push(SLIP_END), SLIP_ESC_ESC => rx_buf.push(SLIP_ESC), _ => { return Err(anyhow!("Corrupt SLIP escape character {:x}.", byte)); } } } else { match *byte { SLIP_END => { if rx_buf.len() > 4 { let rx_crc = u32::from_le_bytes(rx_buf[rx_buf.len() - 4..].try_into()?); rx_buf.pop(); rx_buf.pop(); rx_buf.pop(); rx_buf.pop(); if rx_crc == crc32fast::hash(&rx_buf) { return Ok(rx_buf); } else { return Err(anyhow!("CRC-32 checksum failed")); } } } SLIP_ESC => { rx_esc_next = true; } _ => { rx_buf.push(*byte); } } } } Err(anyhow!("partial message received (not ending in END)")) } pub fn tio_slip_encode(msg: &[u8]) -> Vec<u8> { let mut slip_msg = Vec::with_capacity(msg.len() * 2); slip_msg.push(SLIP_END); for byte in msg { match *byte { SLIP_END => { slip_msg.push(SLIP_ESC); slip_msg.push(SLIP_ESC_END); } SLIP_ESC => { slip_msg.push(SLIP_ESC); slip_msg.push(SLIP_ESC_ESC); } _ => { slip_msg.push(*byte); } } } let tx_crc = crc32fast::hash(msg); for byte in tx_crc.to_le_bytes() { match byte { SLIP_END => { slip_msg.push(SLIP_ESC); slip_msg.push(SLIP_ESC_END); } SLIP_ESC => { slip_msg.push(SLIP_ESC); slip_msg.push(SLIP_ESC_ESC); } _ => { slip_msg.push(byte); } } } slip_msg.push(SLIP_END); slip_msg } #[cfg(test)] mod test { use super::*; #[test] fn encode_decode() { let mut msg: Vec<u8> = Vec::with_capacity(256); for i in 0..255 { msg.push(i); } let slip_msg = tio_slip_encode(&msg); let msg2 = tio_slip_decode(&slip_msg).unwrap(); assert_eq!(&msg[..], &msg2[..]) } }
/* SLIP protocol module Thomas Kornack Twinleaf LLC 2016 NOTE: this implementation was mostly copy/paste from old code. Did run `cargo fmt. */ use anyhow::{anyhow, Result}; use std::convert::TryInto; const SLIP_END: u8 = 0xC0; const SLIP_ESC: u8 = 0xDB; const SLIP_ESC_END: u8 = 0xDC; const SLIP_ESC_ESC: u8 = 0xDD; const SLIP_MAX_LEN: usize = 2048; pub fn tio_slip_decode(rx_slip: &[u8]) -> Result<Vec<u8>> { let mut rx_esc_next = false; let mut rx_buf = vec![]; if rx_slip.len() >= SLIP_MAX_LEN { return Err(anyhow!("TIO SLIP message too long: {}", rx_slip.len())); } for byte in rx_slip { if rx_esc_next { rx_esc_next = false; match *byte { SLIP_ESC_END => rx_buf.push(SLIP_END), SLIP_ESC_ESC => rx_buf.push(SLIP_ESC), _ => { return Err(anyhow!("Corrupt SLIP escape character {:x}.", byte)); } } } else { match *byte { SLIP_END => { if rx_buf.len() > 4 { let rx_crc = u32::from_le_bytes(rx_buf[rx_buf.len() - 4..].try_into()?); rx_buf.pop(); rx_buf.pop(); rx_buf.pop(); rx_buf.pop(); if rx_crc == crc32fast::hash(&rx_buf) { return Ok(rx_buf); } else { return Err(anyhow!("CRC-32 checksum failed")); } } } SLIP_ESC => { rx_esc_next = true; } _ => { rx_buf.push(*byte); } } } } Err(anyhow!("partial message received (not ending in END)")) } pub fn tio_slip_encode(msg: &[u8]) -> Vec<u8> { let mut slip_msg = Vec::with_capacity(msg.len() * 2); slip_msg.push(SLIP_END); for byte in msg { match *byte { SLIP_END => { slip_msg.push(SLIP_ESC); slip_msg.push(SLIP_ESC_END); } SLIP_ESC => { slip_msg.push(SLIP_ESC);
#[cfg(test)] mod test { use super::*; #[test] fn encode_decode() { let mut msg: Vec<u8> = Vec::with_capacity(256); for i in 0..255 { msg.push(i); } let slip_msg = tio_slip_encode(&msg); let msg2 = tio_slip_decode(&slip_msg).unwrap(); assert_eq!(&msg[..], &msg2[..]) } }
slip_msg.push(SLIP_ESC_ESC); } _ => { slip_msg.push(*byte); } } } let tx_crc = crc32fast::hash(msg); for byte in tx_crc.to_le_bytes() { match byte { SLIP_END => { slip_msg.push(SLIP_ESC); slip_msg.push(SLIP_ESC_END); } SLIP_ESC => { slip_msg.push(SLIP_ESC); slip_msg.push(SLIP_ESC_ESC); } _ => { slip_msg.push(byte); } } } slip_msg.push(SLIP_END); slip_msg }
function_block-function_prefix_line
[ { "content": "/// Takes a pointer to some bytes, returns a RawPacket\n\n///\n\n/// Could probably take a pointer to an array of bytes or something like that instead.\n\npub fn raw_packet_from_bytes(bytes: &[u8]) -> Result<JsValue, JsValue> {\n\n match RawPacket::from_bytes(bytes) {\n\n Ok(raw_packet) ...
Rust
rcalc/src/lib.rs
smackem/rust-calc
0a7ccdc5a81fd763ba6fbf807ea9e408f30985e5
extern crate regex; #[macro_use] extern crate lazy_static; extern crate num; #[macro_use] extern crate log; mod lexer; mod parser; mod value; mod interpreter; mod util; use std::collections::HashMap; use std::thread; use std::io::{ BufWriter, Write }; use interpreter::Context; use lexer::Lexer; use util::Boxable; pub use value::Value; pub use interpreter::RuntimeItem; pub static IT_IDENT: &'static str = "it"; pub struct Calculator { ctx: Box<Context>, lexer: Lexer, } impl Calculator { pub fn new() -> Calculator { let ctx = { let mut map: HashMap<String, RuntimeItem> = HashMap::new(); map.insert(IT_IDENT.to_string(), RuntimeItem::Value(Value::Integer(0))); map.insert("pi".to_string(), RuntimeItem::Value(Value::Float(std::f64::consts::PI))); map.insert("e".to_string(), RuntimeItem::Value(Value::Float(std::f64::consts::E))); interpreter::context_from_hashmap(map) }; Calculator { ctx: ctx, lexer: Lexer::new() } } pub fn calc(&mut self, src: &str) -> Result<&RuntimeItem, String> { let input = try!(self.lexer.lex(&src)); info!("Tokens: {:?}", input); let stmt = try!(parser::parse(&input)); info!("Ast: {:?}", stmt); let item = try!(interpreter::interpret(&stmt, &mut *self.ctx)); self.ctx.put(IT_IDENT, item); Result::Ok(self.ctx.get(IT_IDENT).unwrap()) } pub fn calc_parallel(&mut self, srcs: Vec<String>) -> &RuntimeItem { let mut threads = vec![]; for src in srcs { let context_map = self.get_context(); threads.push(thread::spawn(move || { let mut local_calc = Calculator::with_context(context_map); let _ = local_calc.calc(&src).unwrap(); local_calc.get_context() })); }; let mut its = vec![]; for thread in threads { match thread.join() { Result::Ok(map) => { for (ident, item) in map.iter() { self.ctx.put(ident, item.clone()); }; if let &RuntimeItem::Value(ref val) = map.get(IT_IDENT).unwrap() { its.push(val.clone()); }; }, Result::Err(x) => error!("{:?}", x), } } let it = RuntimeItem::Value(Value::Vector(its.arc())); self.ctx.put(IT_IDENT, it); self.ctx.get(IT_IDENT).unwrap() } pub fn write_json<T: Write>(&self, writer: &mut BufWriter<T>) -> ::std::io::Result<()> { let item_tuples = { let mut items = (*self.ctx).list(); items.sort_by_key(|&(ident, _)| ident); items }; try!(writer.write_all(b"{\n")); for (ident, item) in item_tuples { if let &RuntimeItem::Value(ref val) = item { try!(write!(writer, " \"{}\": ", &ident)); try!(val.write_json(writer)); try!(writer.write_all(b",\n")); } } writer.write_all(b"}") } } impl Calculator { fn with_context(map: HashMap<String, RuntimeItem>) -> Calculator { let ctx = interpreter::context_from_hashmap(map); Calculator { ctx: ctx, lexer: Lexer::new() } } fn get_context(&self) -> HashMap<String, RuntimeItem> { let mut map = HashMap::new(); for (ident, item) in self.ctx.list() { map.insert(ident.clone(), item.clone()); } map } }
extern crate regex; #[macro_use] extern crate lazy_static; extern crate num; #[macro_use] extern crate log; mod lexer; mod parser; mod value; mod interpreter; mod util; use std::collections::HashMap; use std::thread; use std::io::{ BufWriter, Write }; use interpreter::Context; use lexer::Lexer; use util::Boxable; pub use value::Value; pub use interpreter::RuntimeItem; pub static IT_IDENT: &'static str = "it"; pub struct Calculator { ctx: Box<Context>, lexer: Lexer, } impl Calculator { pub fn new() -> Calculator { let ctx = { let mut map: HashMap<String, RuntimeItem> = HashMap::new(); map.insert(IT_IDENT.to_string(), RuntimeItem::Value(Value::Integer(0))); map.insert("pi".to_string(), RuntimeItem::Value(Value::Float(std::f64::consts::PI))); map.insert("e".to_string(), RuntimeItem::Value(Value::Float(std::f64::consts::E))); interpreter::context_from_hashmap(map) }; Calculator { ctx: ctx, lexer: Lexer::new() } }
pub fn calc_parallel(&mut self, srcs: Vec<String>) -> &RuntimeItem { let mut threads = vec![]; for src in srcs { let context_map = self.get_context(); threads.push(thread::spawn(move || { let mut local_calc = Calculator::with_context(context_map); let _ = local_calc.calc(&src).unwrap(); local_calc.get_context() })); }; let mut its = vec![]; for thread in threads { match thread.join() { Result::Ok(map) => { for (ident, item) in map.iter() { self.ctx.put(ident, item.clone()); }; if let &RuntimeItem::Value(ref val) = map.get(IT_IDENT).unwrap() { its.push(val.clone()); }; }, Result::Err(x) => error!("{:?}", x), } } let it = RuntimeItem::Value(Value::Vector(its.arc())); self.ctx.put(IT_IDENT, it); self.ctx.get(IT_IDENT).unwrap() } pub fn write_json<T: Write>(&self, writer: &mut BufWriter<T>) -> ::std::io::Result<()> { let item_tuples = { let mut items = (*self.ctx).list(); items.sort_by_key(|&(ident, _)| ident); items }; try!(writer.write_all(b"{\n")); for (ident, item) in item_tuples { if let &RuntimeItem::Value(ref val) = item { try!(write!(writer, " \"{}\": ", &ident)); try!(val.write_json(writer)); try!(writer.write_all(b",\n")); } } writer.write_all(b"}") } } impl Calculator { fn with_context(map: HashMap<String, RuntimeItem>) -> Calculator { let ctx = interpreter::context_from_hashmap(map); Calculator { ctx: ctx, lexer: Lexer::new() } } fn get_context(&self) -> HashMap<String, RuntimeItem> { let mut map = HashMap::new(); for (ident, item) in self.ctx.list() { map.insert(ident.clone(), item.clone()); } map } }
pub fn calc(&mut self, src: &str) -> Result<&RuntimeItem, String> { let input = try!(self.lexer.lex(&src)); info!("Tokens: {:?}", input); let stmt = try!(parser::parse(&input)); info!("Ast: {:?}", stmt); let item = try!(interpreter::interpret(&stmt, &mut *self.ctx)); self.ctx.put(IT_IDENT, item); Result::Ok(self.ctx.get(IT_IDENT).unwrap()) }
function_block-full_function
[ { "content": "/// Interprets the given `Stmt`, using the specified `Context` for binding\n\n/// lookup and storage. Returns either the resulting `RuntimeItem` if successful\n\n/// or an error message.\n\npub fn interpret(stmt: &Stmt, ctx: &mut Context) -> Result<RuntimeItem, String> {\n\n let item = match *s...
Rust
ogu-lang/src/backend/modules/symbols/exprs/arithmetics.rs
lnds/Ogu
881ec221d7cc735f43753c8cd91b9b88e093a091
use anyhow::Result; use crate::backend::modules::symbols::exprs::comparable_trait::resolve_comparable; use crate::backend::modules::types::basic_type::{FLOAT_TYPE, INT_TYPE}; use crate::backend::modules::types::trait_type::TRAIT_NUM; use crate::backend::scopes::symbol::Symbol; use crate::backend::scopes::types::Type; use crate::backend::scopes::types::TypeClone; use crate::backend::scopes::Scope; #[derive(Clone, Debug)] pub(crate) struct ArithmeticExpr { op: Op, ty: Option<Box<dyn Type>>, } #[derive(Clone, Debug)] enum Op { Add(Box<dyn Symbol>, Box<dyn Symbol>), Sub(Box<dyn Symbol>, Box<dyn Symbol>), Mul(Box<dyn Symbol>, Box<dyn Symbol>), Div(Box<dyn Symbol>, Box<dyn Symbol>), IntDiv(Box<dyn Symbol>, Box<dyn Symbol>), Mod(Box<dyn Symbol>, Box<dyn Symbol>), Pow(Box<dyn Symbol>, Box<dyn Symbol>), } impl ArithmeticExpr { pub(crate) fn new_add(l: Box<dyn Symbol>, r: Box<dyn Symbol>) -> Box<dyn Symbol> { Box::new(ArithmeticExpr { op: Op::Add(l, r), ty: None, }) } pub(crate) fn new_sub(l: Box<dyn Symbol>, r: Box<dyn Symbol>) -> Box<dyn Symbol> { Box::new(ArithmeticExpr { op: Op::Sub(l, r), ty: None, }) } pub(crate) fn new_mul(l: Box<dyn Symbol>, r: Box<dyn Symbol>) -> Box<dyn Symbol> { Box::new(ArithmeticExpr { op: Op::Mul(l, r), ty: None, }) } pub(crate) fn new_div(l: Box<dyn Symbol>, r: Box<dyn Symbol>) -> Box<dyn Symbol> { Box::new(ArithmeticExpr { op: Op::Div(l, r), ty: None, }) } pub(crate) fn new_int_div(l: Box<dyn Symbol>, r: Box<dyn Symbol>) -> Box<dyn Symbol> { Box::new(ArithmeticExpr { op: Op::IntDiv(l, r), ty: None, }) } pub(crate) fn new_mod(l: Box<dyn Symbol>, r: Box<dyn Symbol>) -> Box<dyn Symbol> { Box::new(ArithmeticExpr { op: Op::Mod(l, r), ty: None, }) } pub(crate) fn new_pow(l: Box<dyn Symbol>, r: Box<dyn Symbol>) -> Box<dyn Symbol> { Box::new(ArithmeticExpr { op: Op::Pow(l, r), ty: None, }) } } impl Symbol for ArithmeticExpr { fn get_name(&self) -> &str { "arithmetic operation" } fn get_type(&self) -> Option<Box<dyn Type>> { self.ty.clone() } fn set_type(&mut self, ty: Option<Box<dyn Type>>) { self.ty = ty.clone(); if self.ty.is_some() { match &mut self.op { Op::Add(l, r) | Op::Sub(l, r) | Op::Mul(l, r) | Op::IntDiv(l, r) | Op::Div(l, r) | Op::Mod(l, r) | Op::Pow(l, r) => { l.set_type(ty.clone()); r.set_type(ty.clone()); } } } } fn resolve_type(&mut self, scope: &mut dyn Scope) -> Result<Option<Box<dyn Type>>> { let r = match &mut self.op { Op::Div(l, r) => { self.ty = resolve_comparable(l, r, scope, TRAIT_NUM)?; if l.get_type() == Some(INT_TYPE.clone_box()) || r.get_type() == Some(INT_TYPE.clone_box()) { self.ty = Some(FLOAT_TYPE.clone_box()); } Ok(self.get_type()) } Op::IntDiv(l, r) => { self.ty = resolve_comparable(l, r, scope, TRAIT_NUM)?; if l.get_type() == Some(FLOAT_TYPE.clone_box()) || r.get_type() == Some(FLOAT_TYPE.clone_box()) { self.ty = Some(INT_TYPE.clone_box()); } Ok(self.get_type()) } Op::Add(l, r) | Op::Sub(l, r) | Op::Mul(l, r) | Op::Mod(l, r) | Op::Pow(l, r) => { self.ty = resolve_comparable(l, r, scope, TRAIT_NUM)?; Ok(self.get_type()) } }; r } fn define_into(&self, scope: &mut dyn Scope) -> Option<Box<dyn Symbol>> { match &self.op { Op::Add(l, r) | Op::Sub(l, r) | Op::Mul(l, r) | Op::IntDiv(l, r) | Op::Div(l, r) | Op::Mod(l, r) | Op::Pow(l, r) => { l.define_into(scope); r.define_into(scope); } } None } }
use anyhow::Result; use crate::backend::modules::symbols::exprs::comparable_trait::resolve_comparable; use crate::backend::modules::types::basic_type::{FLOAT_TYPE, INT_TYPE}; use crate::backend::modules::types::trait_type::TRAIT_NUM; use crate::backend::scopes::symbol::Symbol; use crate::backend::scopes::types::Type; use crate::backend::scopes::types::TypeClone; use crate::backend::scopes::Scope; #[derive(Clone, Debug)] pub(crate) struct ArithmeticExpr { op: Op, ty: Option<Box<dyn Type>>, } #[derive(Clone, Debug)] enum Op { Add(Box<dyn Symbol>, Box<dyn Symbol>), Sub(Box<dyn Symbol>, Box<dyn Symbol>), Mul(Box<dyn Symbol>, Box<dyn Symbol>), Div(Box<dyn Symbol>, Box<dyn Symbol>), IntDiv(Box<dyn Symbol>, Box<dyn Symbol>), Mod(Box<dyn Symbol>, Box<dyn Symbol>), Pow(Box<dyn Symbol>, Box<dyn Symbol>), } impl ArithmeticExpr { pub(crate) fn new_add(l: Box<dyn Symbol>, r: Box<dyn Symbol>) -> Box<dyn Symbol> { Box::new(ArithmeticExpr { op: Op::Add(l, r), ty: None, }) } pub(crate) fn new_sub(l: Box<dyn Symbol>, r: Box<dyn Symbol>) -> Box<dyn Symbol> { Box::new(ArithmeticExpr { op: Op::Sub(l, r), ty: None, }) } pub(crate) fn new_mul(l: Box<dyn Symbol>, r: Box<dyn Symbol>) -> Box<dyn Symbol> { Box::new(ArithmeticExpr { op: Op::Mul(l, r), ty: None, }) } pub(crate) fn new_div(l: Box<dyn Symbol>, r: Box<dyn Symbol>) -> Box<dyn Symbol> { Box::new(ArithmeticExpr { op: Op::Div(l, r), ty: None, }) }
pub(crate) fn new_mod(l: Box<dyn Symbol>, r: Box<dyn Symbol>) -> Box<dyn Symbol> { Box::new(ArithmeticExpr { op: Op::Mod(l, r), ty: None, }) } pub(crate) fn new_pow(l: Box<dyn Symbol>, r: Box<dyn Symbol>) -> Box<dyn Symbol> { Box::new(ArithmeticExpr { op: Op::Pow(l, r), ty: None, }) } } impl Symbol for ArithmeticExpr { fn get_name(&self) -> &str { "arithmetic operation" } fn get_type(&self) -> Option<Box<dyn Type>> { self.ty.clone() } fn set_type(&mut self, ty: Option<Box<dyn Type>>) { self.ty = ty.clone(); if self.ty.is_some() { match &mut self.op { Op::Add(l, r) | Op::Sub(l, r) | Op::Mul(l, r) | Op::IntDiv(l, r) | Op::Div(l, r) | Op::Mod(l, r) | Op::Pow(l, r) => { l.set_type(ty.clone()); r.set_type(ty.clone()); } } } } fn resolve_type(&mut self, scope: &mut dyn Scope) -> Result<Option<Box<dyn Type>>> { let r = match &mut self.op { Op::Div(l, r) => { self.ty = resolve_comparable(l, r, scope, TRAIT_NUM)?; if l.get_type() == Some(INT_TYPE.clone_box()) || r.get_type() == Some(INT_TYPE.clone_box()) { self.ty = Some(FLOAT_TYPE.clone_box()); } Ok(self.get_type()) } Op::IntDiv(l, r) => { self.ty = resolve_comparable(l, r, scope, TRAIT_NUM)?; if l.get_type() == Some(FLOAT_TYPE.clone_box()) || r.get_type() == Some(FLOAT_TYPE.clone_box()) { self.ty = Some(INT_TYPE.clone_box()); } Ok(self.get_type()) } Op::Add(l, r) | Op::Sub(l, r) | Op::Mul(l, r) | Op::Mod(l, r) | Op::Pow(l, r) => { self.ty = resolve_comparable(l, r, scope, TRAIT_NUM)?; Ok(self.get_type()) } }; r } fn define_into(&self, scope: &mut dyn Scope) -> Option<Box<dyn Symbol>> { match &self.op { Op::Add(l, r) | Op::Sub(l, r) | Op::Mul(l, r) | Op::IntDiv(l, r) | Op::Div(l, r) | Op::Mod(l, r) | Op::Pow(l, r) => { l.define_into(scope); r.define_into(scope); } } None } }
pub(crate) fn new_int_div(l: Box<dyn Symbol>, r: Box<dyn Symbol>) -> Box<dyn Symbol> { Box::new(ArithmeticExpr { op: Op::IntDiv(l, r), ty: None, }) }
function_block-full_function
[ { "content": "type NewFn = fn(Box<dyn Symbol>, Box<dyn Symbol>) -> Box<dyn Symbol>;\n\n\n\nimpl UnaryOpExpr {\n\n pub(crate) fn new_add(expr: Option<Box<dyn Symbol>>) -> Box<dyn Symbol> {\n\n Self::make_lambda(expr, TRAIT_NUM, ArithmeticExpr::new_add)\n\n }\n\n\n\n pub(crate) fn new_mul(expr: Op...
Rust
src/ui/ui_section.rs
raybritton/weather-examiner
3ce2f6209e591bc039dc47081cb03c5e39dba39a
use crate::Error; use std::io::{stdout, stdin, Write}; use crossterm::style::{Print, Color}; use crossterm::{ExecutableCommand, QueueableCommand, cursor}; use crossterm::event::KeyCode; use crossterm::event::Event::Key; use crate::app::WeatherApp; use log::error; use crossterm::cursor::MoveTo; use crossterm::terminal::{Clear, ClearType}; use crate::models::SimpleDate; use crate::ui::utils::{consume_all_input, print_styled, print_styled_list}; use crate::extensions::MapToUnit; use std::any::Any; pub trait UiSection { fn run(&mut self, app: &mut WeatherApp) -> Result<(), Error>; fn read_input(&self, message: &str) -> Result<String, Error> { stdout() .execute(cursor::Show)? .execute(Print(message))?; let mut input = String::new(); while input.trim().is_empty() { if let Err(err) = stdin().read_line(&mut input) { error!("Can not read input: {}", err); std::process::exit(1); } } stdout() .execute(cursor::Hide)?; return Ok(input.trim().to_owned()); } fn wait_for_char(&self, message: &str) -> Result<KeyCode, Error> { consume_all_input()?; stdout() .execute(Print(message))?; crossterm::terminal::enable_raw_mode()?; loop { let result = crossterm::event::read()?; if let Key(key) = result { crossterm::terminal::disable_raw_mode()?; return Ok(key.code); } } } fn wait_for_char_no_delay(&self) -> Result<KeyCode, Error> { consume_all_input()?; crossterm::terminal::enable_raw_mode()?; loop { let result = crossterm::event::read()?; if let Key(key) = result { crossterm::terminal::disable_raw_mode()?; return Ok(key.code); } } } fn reset(&self, reset_pos: (u16, u16)) -> Result<(), Error> { stdout() .execute(MoveTo(reset_pos.0, reset_pos.1))? .execute(Clear(ClearType::FromCursorDown))?; Ok(()) } fn input_year_day_hour(&mut self) -> Result<SimpleDate, Error> { consume_all_input()?; stdout() .execute(cursor::Show)?; let year = self.read_input("\n\nEnter year\n")?.parse()?; let day = self.read_input("\n\nEnter day\n")?.parse()?; let hour = self.read_input("\n\nEnter hour\n")?.parse()?; stdout() .execute(cursor::Hide)?; Ok(SimpleDate::new(year, day, hour)) } fn input_year_day(&mut self) -> Result<SimpleDate, Error> { consume_all_input()?; stdout() .execute(cursor::Show)?; let year = self.read_input("\n\nEnter year\n")?.parse()?; let day = self.read_input("\n\nEnter day\n")?.parse()?; stdout() .execute(cursor::Hide)?; Ok(SimpleDate::new(year, day, 0)) } fn input_year_month(&mut self) -> Result<(u16, u8), Error> { consume_all_input()?; stdout() .execute(cursor::Show)?; let year = self.read_input("\n\nEnter year\n")?.parse()?; let month = self.read_input("\n\nEnter month\n")?.parse()?; stdout() .execute(cursor::Hide)?; Ok((year, month)) } fn menu(&mut self, options: Vec<&str>, exit: bool) -> Result<usize, Error> { options.iter() .enumerate() .try_for_each(|(i, option)| stdout() .queue(Print(format!("{}) {}\n", i + 1, option))) .map_to_unit() )?; if exit { stdout().queue(Print("\nesc) Exit\n"))?; } stdout().flush()?; loop { let input = self.wait_for_char("")?; if input == KeyCode::Esc { return Ok(0); } else if let KeyCode::Char(chr) = input { if let Some(num) = chr.to_digit(10).map(|num| num as usize) { if num <= options.len() { return Ok(num); } } } } } fn print_row<D, F, S>(&self, title: &str, header_color: Color, data: Vec<D>, formatter: F, styler: S) -> Result<(), Error> where D: Any, F: Fn(D) -> String, S: Fn(&D) -> Result<(), Error> { print_styled(&format!("\n{}", title), header_color, false)?; print_styled_list(data, formatter, styler)?; Ok(()) } }
use crate::Error; use std::io::{stdout, stdin, Write}; use crossterm::style::{Print, Color}; use crossterm::{ExecutableCommand, QueueableCommand, cursor}; use crossterm::event::KeyCode; use crossterm::event::Event::Key; use crate::app::WeatherApp; use log::error; use crossterm::cursor::MoveTo; use crossterm::terminal::{Clear, ClearType}; use crate::models::SimpleDate; use crate::ui::utils::{consume_all_input, print_styled, print_styled_list}; use crate::extensions::MapToUnit; use std::any::Any; pub trait UiSection { fn run(&mut self, app: &mut WeatherApp) -> Result<(), Error>; fn read_input(&self, message: &str) -> Result<String, Error> { stdout() .execute(cursor::Show)? .execute(Print(message))?; let mut input = String::new();
fn wait_for_char(&self, message: &str) -> Result<KeyCode, Error> { consume_all_input()?; stdout() .execute(Print(message))?; crossterm::terminal::enable_raw_mode()?; loop { let result = crossterm::event::read()?; if let Key(key) = result { crossterm::terminal::disable_raw_mode()?; return Ok(key.code); } } } fn wait_for_char_no_delay(&self) -> Result<KeyCode, Error> { consume_all_input()?; crossterm::terminal::enable_raw_mode()?; loop { let result = crossterm::event::read()?; if let Key(key) = result { crossterm::terminal::disable_raw_mode()?; return Ok(key.code); } } } fn reset(&self, reset_pos: (u16, u16)) -> Result<(), Error> { stdout() .execute(MoveTo(reset_pos.0, reset_pos.1))? .execute(Clear(ClearType::FromCursorDown))?; Ok(()) } fn input_year_day_hour(&mut self) -> Result<SimpleDate, Error> { consume_all_input()?; stdout() .execute(cursor::Show)?; let year = self.read_input("\n\nEnter year\n")?.parse()?; let day = self.read_input("\n\nEnter day\n")?.parse()?; let hour = self.read_input("\n\nEnter hour\n")?.parse()?; stdout() .execute(cursor::Hide)?; Ok(SimpleDate::new(year, day, hour)) } fn input_year_day(&mut self) -> Result<SimpleDate, Error> { consume_all_input()?; stdout() .execute(cursor::Show)?; let year = self.read_input("\n\nEnter year\n")?.parse()?; let day = self.read_input("\n\nEnter day\n")?.parse()?; stdout() .execute(cursor::Hide)?; Ok(SimpleDate::new(year, day, 0)) } fn input_year_month(&mut self) -> Result<(u16, u8), Error> { consume_all_input()?; stdout() .execute(cursor::Show)?; let year = self.read_input("\n\nEnter year\n")?.parse()?; let month = self.read_input("\n\nEnter month\n")?.parse()?; stdout() .execute(cursor::Hide)?; Ok((year, month)) } fn menu(&mut self, options: Vec<&str>, exit: bool) -> Result<usize, Error> { options.iter() .enumerate() .try_for_each(|(i, option)| stdout() .queue(Print(format!("{}) {}\n", i + 1, option))) .map_to_unit() )?; if exit { stdout().queue(Print("\nesc) Exit\n"))?; } stdout().flush()?; loop { let input = self.wait_for_char("")?; if input == KeyCode::Esc { return Ok(0); } else if let KeyCode::Char(chr) = input { if let Some(num) = chr.to_digit(10).map(|num| num as usize) { if num <= options.len() { return Ok(num); } } } } } fn print_row<D, F, S>(&self, title: &str, header_color: Color, data: Vec<D>, formatter: F, styler: S) -> Result<(), Error> where D: Any, F: Fn(D) -> String, S: Fn(&D) -> Result<(), Error> { print_styled(&format!("\n{}", title), header_color, false)?; print_styled_list(data, formatter, styler)?; Ok(()) } }
while input.trim().is_empty() { if let Err(err) = stdin().read_line(&mut input) { error!("Can not read input: {}", err); std::process::exit(1); } } stdout() .execute(cursor::Hide)?; return Ok(input.trim().to_owned()); }
function_block-function_prefix_line
[]
Rust
prae/src/core.rs
teenjuna/prae
fdaf48da574c2cac7bdacd8d1af9918837c18a25
use std::hash::Hash; use std::ops::{Deref, Index}; use std::{error, fmt}; pub trait Bound { type Target: fmt::Debug; type Error: fmt::Debug; fn apply(v: &mut Self::Target) -> Result<(), Self::Error>; } pub trait Guard where Self: Sized, { type Bound: Bound; fn new<V: Into<<Self::Bound as Bound>::Target>>(v: V) -> Result<Self, ConstructionError<Self>>; fn get(&self) -> &<Self::Bound as Bound>::Target; fn mutate(&mut self, f: impl FnOnce(&mut <Self::Bound as Bound>::Target)); fn try_mutate( &mut self, f: impl FnOnce(&mut <Self::Bound as Bound>::Target), ) -> Result<(), MutationError<Self>> where <Self::Bound as Bound>::Target: Clone; fn into_inner(self) -> <Self::Bound as Bound>::Target; #[cfg(feature = "unchecked")] fn new_unchecked<V: Into<<Self::Bound as Bound>::Target>>(v: V) -> Self; #[cfg(feature = "unchecked")] fn mutate_unchecked(&mut self, f: impl FnOnce(&mut <Self::Bound as Bound>::Target)); #[cfg(feature = "unchecked")] fn get_mut(&mut self) -> &mut <Self::Bound as Bound>::Target; } #[derive(Debug)] pub struct Guarded<B: Bound>(B::Target); impl<T, E, B> Guard for Guarded<B> where B: Bound<Target = T, Error = E>, E: fmt::Debug, { type Bound = B; fn new<V: Into<T>>(v: V) -> Result<Self, ConstructionError<Self>> { let mut v = v.into(); match B::apply(&mut v) { Ok(_) => Ok(Self(v)), Err(e) => Err(ConstructionError { inner: e, value: v }), } } fn get(&self) -> &T { &self.0 } fn mutate(&mut self, f: impl FnOnce(&mut T)) { f(&mut self.0); if let Err(e) = B::apply(&mut self.0) { panic!("mutation failed: {:?}", e); } } fn try_mutate(&mut self, f: impl FnOnce(&mut T)) -> Result<(), MutationError<Self>> where T: Clone, { let mut tmp = self.0.clone(); f(&mut tmp); match B::apply(&mut tmp) { Ok(_) => { self.0 = tmp; Ok(()) } Err(e) => Err(MutationError { inner: e, old_value: self.0.clone(), new_value: tmp, }), } } fn into_inner(self) -> T { self.0 } #[cfg(feature = "unchecked")] fn new_unchecked<V: Into<T>>(v: V) -> Self { Self(v.into()) } #[cfg(feature = "unchecked")] fn mutate_unchecked(&mut self, f: impl FnOnce(&mut T)) { f(&mut self.0); } #[cfg(feature = "unchecked")] fn get_mut(&mut self) -> &mut T { &mut self.0 } } #[derive(Debug)] pub struct ConstructionError<G: Guard> { pub inner: <G::Bound as Bound>::Error, pub value: <G::Bound as Bound>::Target, } impl<G: Guard> ConstructionError<G> { pub fn into_inner(self) -> <G::Bound as Bound>::Error { self.inner } } impl<G> fmt::Display for ConstructionError<G> where G: Guard, <G::Bound as Bound>::Error: fmt::Display, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "failed to create {} from value {:?}: {}", std::any::type_name::<G>(), self.value, self.inner, ) } } impl<G> error::Error for ConstructionError<G> where G: Guard + fmt::Debug, G::Bound: fmt::Debug, <G::Bound as Bound>::Error: fmt::Display, { } #[derive(Debug)] pub struct MutationError<G: Guard> { pub inner: <G::Bound as Bound>::Error, pub old_value: <G::Bound as Bound>::Target, pub new_value: <G::Bound as Bound>::Target, } impl<G: Guard> MutationError<G> { pub fn into_inner(self) -> <G::Bound as Bound>::Error { self.inner } } impl<G> fmt::Display for MutationError<G> where G: Guard, <G::Bound as Bound>::Error: fmt::Display, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "failed to mutate {} from value {:?} to {:?}: {}", std::any::type_name::<G>(), self.old_value, self.new_value, self.inner, ) } } impl<G> error::Error for MutationError<G> where G: Guard + fmt::Debug, G::Bound: fmt::Debug, <G::Bound as Bound>::Error: fmt::Display, { } impl<B> Clone for Guarded<B> where B: Bound, B::Target: Clone, { fn clone(&self) -> Self { todo!() } } impl<B: Bound> AsRef<B::Target> for Guarded<B> { fn as_ref(&self) -> &B::Target { &self.0 } } impl<B: Bound> Deref for Guarded<B> { type Target = B::Target; fn deref(&self) -> &B::Target { &self.0 } } impl<B> PartialEq for Guarded<B> where B: Bound, B::Target: PartialEq, { fn eq(&self, other: &Self) -> bool { self.0.eq(&other.0) } } impl<B: Bound> Eq for Guarded<B> where B::Target: Eq {} impl<B> PartialOrd for Guarded<B> where B: Bound, B::Target: PartialOrd, { fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { self.0.partial_cmp(&other.0) } } impl<B> Ord for Guarded<B> where B: Bound, B::Target: Ord, { fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.0.cmp(&other.0) } } impl<B: Bound> Copy for Guarded<B> where B::Target: Copy {} impl<B> Hash for Guarded<B> where B: Bound, B::Target: Hash, { fn hash<H: std::hash::Hasher>(&self, state: &mut H) { self.0.hash(state) } } impl<U, B> Index<U> for Guarded<B> where B: Bound, B::Target: Index<U>, { type Output = <B::Target as Index<U>>::Output; fn index(&self, index: U) -> &Self::Output { self.0.index(index) } } #[cfg(feature = "serde")] impl<'de, B> serde::Deserialize<'de> for Guarded<B> where B: Bound + fmt::Debug, B::Target: serde::Deserialize<'de> + std::fmt::Debug, B::Error: std::fmt::Display + std::fmt::Debug, { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de>, { Self::new(B::Target::deserialize(deserializer)?) .map_err(|e| serde::de::Error::custom(e.inner)) } } #[cfg(feature = "serde")] impl<B> serde::Serialize for Guarded<B> where B: Bound, B::Target: serde::Serialize, B::Error: std::fmt::Display + std::fmt::Debug, { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer, { B::Target::serialize(self.get(), serializer) } }
use std::hash::Hash; use std::ops::{Deref, Index}; use std::{error, fmt}; pub trait Bound { type Target: fmt::Debug; type Error: fmt::Debug; fn apply(v: &mut Self::Target) -> Result<(), Self::Error>; } pub trait Guard where Self: Sized, { type Bound: Bound; fn new<V: Into<<Self::Bound as Bound>::Target>>(v: V) -> Result<Self, ConstructionError<Self>>; fn get(&self) -> &<Self::Bound as Bound>::Target; fn mutate(&mut self, f: impl FnOnce(&mut <Self::Bound as Bound>::Target)); fn try_mutate( &mut self, f: impl FnOnce(&mut <Self::Bound as Bound>::Target), ) -> Result<(), MutationError<Self>> where <Self::Bound as Bound>::Target: Clone; fn into_inner(self) -> <Self::Bound as Bound>::Target; #[cfg(feature = "unchecked")] fn new_unchecked<V: Into<<Self::Bound as Bound>::Target>>(v: V) -> Self; #[cfg(feature = "unchecked")] fn mutate_unchecked(&mut self, f: impl FnOnce(&mut <Self::Bound as Bound>::Target)); #[cfg(feature = "unchecked")] fn get_mut(&mut self) -> &mut <Self::Bound as Bound>::Target; } #[derive(Debug)] pub struct Guarded<B: Bound>(B::Target); impl<T, E, B> Guard for Guarded<B> where B: Bound<Target = T, Error = E>, E: fmt::Debug, { type Bound = B; fn new<V: Into<T>>(v: V) -> Result<Self, ConstructionError<Self>> { let mut v = v.into(); match B::apply(&mut v) { Ok(_) => Ok(Self(v)), Err(e) => Err(ConstructionError { inner: e, value: v }), } } fn get(&self) -> &T { &self.0 } fn mutate(&mut self, f: impl FnOnce(&mut T)) { f(&mut self.0); if let Err(e) = B::apply(&mut self.0) { panic!("mutation failed: {:?}", e); } } fn try_mutate(&mut self, f: impl FnOnce(&mut T)) -> Result<(), MutationError<Self>> where T: Clone, { let mut tmp = self.0.clone(); f(&mut tmp); match B::apply(&mut tmp) { Ok(_) => { self.0 = tmp; Ok(()) } Err(e) => Err(MutationError { inner: e, old_value: self.0.clone(), new_value: tmp, }), } } fn into_inner(self) -> T { self.0 } #[cfg(feature = "unchecked")] fn new_unchecked<V: Into<T>>(v: V) -> Self { Self(v.into()) } #[cfg(feature = "unchecked")] fn mutate_unchecked(&mut self, f: impl FnOnce(&mut T)) { f(&mut self.0); } #[cfg(feature = "unchecked")] fn get_mut(&mut self) -> &mut T { &mut self.0 } } #[derive(Debug)] pub struct ConstructionError<G: Guard> { pub inner: <G::Bound as Bound>::Error, pub value: <G::Bound as Bound>::Target, } impl<G: Guard> ConstructionError<G> { pub fn into_inner(self) -> <G::Bound as Bound>::Error { self.inner } } impl<G> fmt::Display for ConstructionError<G> where G: Guard, <G::Bound as Bound>::Error: fmt::Display, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "failed to create {} from value {:?}: {}", std::any::type_name::<G>(), self.value, self.inner, ) } } impl<G> error::Error for ConstructionError<G> where G: Guard + fmt::Debug, G::Bound: fmt::Debug, <G::Bound as Bound>::Error: fmt::Display, { } #[derive(Debug)] pub struct MutationError<G: Guard> { pub inner: <G::Bound as Bound>::Error, pub old_value: <G::Bound as Bound>::Target, pub new_value: <G::Bound as Bound>::Target, } impl<G: Guard> MutationError<G> { pub fn into_inner(self) -> <G::Bound as Bound>::Error { self.inner } } impl<G> fmt::Display for MutationError<G> where G: Guard, <G::Bound as Bound>::Error: fmt::Display, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "failed to mutate {} from value {:?} to {:?}: {}", std::any::type_name::<G>(), self.old_value, self.new_value, self.inner, ) } } impl<G> error::Error for MutationError<G> where G: Guard + fmt::Debug, G::Bound: fmt::Debug, <G::Bound as Bound>::Error: fmt::Display, { } impl<B> Clone for Guarded<B> where B: Bound, B::Target: Clone, { fn clone(&self) -> Self { todo!() } } impl<B: Bound> AsRef<B::Target> for Guarded<B> { fn as_ref(&self) -> &B::Target { &self.0 } } impl<B: Bound> Deref for Guarded<B> { type Target = B::Target; fn deref(&self) -> &B::Target { &self.0 } } impl<B> PartialEq for Guarded<B> where B: Bound, B::Target: PartialEq, { fn eq(&self, other: &Self) -> bool { self.0.eq(&other.0) } } impl<B: Bound> Eq for Guarded<B> where B::Target: Eq {} impl<B> PartialOrd for Guarded<B> where B: Bound, B::Target: PartialOrd, { fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { self.0.partial_cmp(&other.0) } } impl<B> Ord for Guarded<B> where B: Bound, B::Target: Ord, { fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.0.cmp(&other.0) } } impl<B: Bound> Copy for Guarded<B> where B::Target: Copy {} impl<B> Hash for Guarded<B> where B: Bound, B::Target: Hash, { fn hash<H: std::hash::Hasher>(&self, state: &mut H) { self.0.hash(state) } } impl<U, B> Index<U> for Guarded<B> where B: Bound, B::Target: Index<U>, { type Output = <B::Target as Index<U>>::Output; fn index(&self, index: U) -> &Self::Output { self.0.index(index) } } #[cfg(feature = "serde")] impl<'de, B> serde::Deserialize<'de> for Guarded<B> where B: Bound + fmt::Debug, B::Target: serde::Deserialize<'de> + std::fmt::Debug, B::Error: std::fmt::Display + std::fmt::Debug, {
} #[cfg(feature = "serde")] impl<B> serde::Serialize for Guarded<B> where B: Bound, B::Target: serde::Serialize, B::Error: std::fmt::Display + std::fmt::Debug, { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer, { B::Target::serialize(self.get(), serializer) } }
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de>, { Self::new(B::Target::deserialize(deserializer)?) .map_err(|e| serde::de::Error::custom(e.inner)) }
function_block-full_function
[ { "content": "#[test]\n\nfn mutation_error_can_be_transormed_into_inner() {\n\n let _err = || -> Result<(), UsernameError> {\n\n let mut un = Username::new(\"user\").unwrap();\n\n un.try_mutate(|u| *u = \"\".to_owned())\n\n .map_err(|e| e.into_inner())?;\n\n Ok(())\n\n }();...
Rust
src/lib.rs
mthiesen/pcx
cc19d6e6504a4a969b1ec6a04ea5de7e619f62de
extern crate byteorder; #[cfg(test)] extern crate walkdir; #[cfg(test)] extern crate image; use std::io; pub use reader::Reader; pub use writer::{WriterRgb, WriterPaletted}; pub mod low_level; mod reader; mod writer; #[cfg(test)] mod test_samples; fn user_error<T>(error: &str) -> io::Result<T> { Err(io::Error::new(io::ErrorKind::InvalidInput, error)) } #[cfg(test)] mod tests { use std::iter; use {Reader, WriterRgb, WriterPaletted}; fn round_trip_rgb_separate(width: u16, height: u16) { let mut pcx = Vec::new(); { let mut writer = WriterRgb::new(&mut pcx, (width, height), (300, 300)).unwrap(); let r: Vec<u8> = iter::repeat(88).take(width as usize).collect(); let g: Vec<u8> = (0..width).map(|v| (v & 0xFF) as u8).collect(); let mut b: Vec<u8> = iter::repeat(88).take(width as usize).collect(); for y in 0..height { for x in 0..width { b[x as usize] = (y & 0xFF) as u8; } writer.write_row_from_separate(&r, &g, &b).unwrap(); } writer.finish().unwrap(); } let mut reader = Reader::new(&pcx[..]).unwrap(); assert_eq!(reader.dimensions(), (width, height)); assert_eq!(reader.is_paletted(), false); assert_eq!(reader.palette_length(), None); let mut r: Vec<u8> = iter::repeat(0).take(width as usize).collect(); let mut g: Vec<u8> = iter::repeat(0).take(width as usize).collect(); let mut b: Vec<u8> = iter::repeat(0).take(width as usize).collect(); for y in 0..height { reader.next_row_rgb_separate(&mut r, &mut g, &mut b).unwrap(); for x in 0..width { assert_eq!(r[x as usize], 88); assert_eq!(g[x as usize], (x & 0xFF) as u8); assert_eq!(b[x as usize], (y & 0xFF) as u8); } } } fn round_trip_rgb_interleaved(width: u16, height: u16) { let mut pcx = Vec::new(); let written_rgb: Vec<u8> = (0..(width as usize) * 3).map(|v| (v & 0xFF) as u8).collect(); { let mut writer = WriterRgb::new(&mut pcx, (width, height), (300, 300)).unwrap(); for _ in 0..height { writer.write_row(&written_rgb).unwrap(); } writer.finish().unwrap(); } let mut reader = Reader::new(&pcx[..]).unwrap(); assert_eq!(reader.dimensions(), (width, height)); assert_eq!(reader.is_paletted(), false); assert_eq!(reader.palette_length(), None); let mut read_rgb: Vec<u8> = iter::repeat(0).take((width as usize) * 3).collect(); for _ in 0..height { reader.next_row_rgb(&mut read_rgb).unwrap(); assert_eq!(written_rgb, read_rgb); } } fn round_trip_paletted(width: u16, height: u16) { let mut pcx = Vec::new(); let palette: Vec<u8> = (0..256 * 3).map(|v| (v % 0xFF) as u8).collect(); { let mut writer = WriterPaletted::new(&mut pcx, (width, height), (300, 300)).unwrap(); let mut p: Vec<u8> = iter::repeat(88).take(width as usize).collect(); for y in 0..height { for x in 0..width { p[x as usize] = (y & 0xFF) as u8; } writer.write_row(&p).unwrap(); } writer.write_palette(&palette).unwrap(); } let mut reader = Reader::new(&pcx[..]).unwrap(); assert_eq!(reader.dimensions(), (width, height)); assert!(reader.is_paletted()); assert_eq!(reader.palette_length(), Some(256)); let mut p: Vec<u8> = iter::repeat(0).take(width as usize).collect(); for y in 0..height { reader.next_row_paletted(&mut p).unwrap(); for x in 0..width { assert_eq!(p[x as usize], (y & 0xFF) as u8); } } let mut palette_read = [0; 3 * 256]; reader.read_palette(&mut palette_read).unwrap(); assert_eq!(&palette[..], &palette_read[..]); } #[test] fn small_round_trip() { for width in 1..40 { for height in 1..40 { round_trip_rgb_separate(width, height); round_trip_rgb_interleaved(width, height); round_trip_paletted(width, height); } } } #[test] fn large_round_trip_rgb() { round_trip_rgb_separate(0xFFFF - 1, 1); round_trip_rgb_separate(1, 0xFFFF); round_trip_rgb_interleaved(0xFFFF - 1, 1); round_trip_rgb_interleaved(1, 0xFFFF); } #[test] fn large_round_trip_paletted() { round_trip_paletted(0xFFFF - 1, 1); round_trip_paletted(1, 0xFFFF); } }
extern crate byteorder; #[cfg(test)] extern crate walkdir; #[cfg(test)] extern crate image; use std::io; pub use reader::Reader; pub use writer::{WriterRgb, WriterPaletted}; pub mod low_level; mod reader; mod writer; #[cfg(test)] mod test_samples; fn user_error<T>(error: &str) -> io::Result<T> { Err(io::Error::new(io::ErrorKind::InvalidInput, error)) } #[cfg(test)] mod tests { use std::iter; use {Reader, WriterRgb, WriterPaletted}; fn round_trip_rgb_separate(width: u16, height: u16) { let mut pcx = Vec::new(); { let mut writer = WriterRgb::new(&mut pcx, (width, height), (300, 300)).unwrap(); let r: Vec<u8> = iter::repeat(88).take(width as usize).collect(); let g: Vec<u8> = (0..width).map(|v| (v & 0xFF) as u8).collect(); let mut b: Vec<u8> = iter::repeat(88).take(width as usize).collect(); for y in 0..height { for x in 0..width { b[x as usize] = (y & 0xFF) as u8; } writer.write_row_from_separate(&r, &g, &b).unwrap(); } writer.finish().unwrap(); } let mut reader = Reader::new(&pcx[..]).unwrap(); assert_eq!(reader.dimensions(), (width, height)); assert_eq!(reader.is_paletted(), false); assert_eq!(reader.palette_length(), None); let mut r: Vec<u8> = iter::repeat(0).take(width as usize).collect(); let mut g: Vec<u8> = iter::repeat(0).take(width as usize).collect(); let mut b: Vec<u8> = iter::repeat(0).take(width as usize).collect(); for y in 0..height { reader.next_row_rgb_separate(&mut r, &mut g, &mut b).unwrap(); for x in 0..width { assert_eq!(r[x as usize], 88); assert_eq!(g[x as usize], (x & 0xFF) as u8); assert_eq!(b[x as usize], (y & 0xFF) as u8); } } } fn round_trip_rgb_interleaved(width: u16, height: u16) { let mut pcx = Vec::ne
fn round_trip_paletted(width: u16, height: u16) { let mut pcx = Vec::new(); let palette: Vec<u8> = (0..256 * 3).map(|v| (v % 0xFF) as u8).collect(); { let mut writer = WriterPaletted::new(&mut pcx, (width, height), (300, 300)).unwrap(); let mut p: Vec<u8> = iter::repeat(88).take(width as usize).collect(); for y in 0..height { for x in 0..width { p[x as usize] = (y & 0xFF) as u8; } writer.write_row(&p).unwrap(); } writer.write_palette(&palette).unwrap(); } let mut reader = Reader::new(&pcx[..]).unwrap(); assert_eq!(reader.dimensions(), (width, height)); assert!(reader.is_paletted()); assert_eq!(reader.palette_length(), Some(256)); let mut p: Vec<u8> = iter::repeat(0).take(width as usize).collect(); for y in 0..height { reader.next_row_paletted(&mut p).unwrap(); for x in 0..width { assert_eq!(p[x as usize], (y & 0xFF) as u8); } } let mut palette_read = [0; 3 * 256]; reader.read_palette(&mut palette_read).unwrap(); assert_eq!(&palette[..], &palette_read[..]); } #[test] fn small_round_trip() { for width in 1..40 { for height in 1..40 { round_trip_rgb_separate(width, height); round_trip_rgb_interleaved(width, height); round_trip_paletted(width, height); } } } #[test] fn large_round_trip_rgb() { round_trip_rgb_separate(0xFFFF - 1, 1); round_trip_rgb_separate(1, 0xFFFF); round_trip_rgb_interleaved(0xFFFF - 1, 1); round_trip_rgb_interleaved(1, 0xFFFF); } #[test] fn large_round_trip_paletted() { round_trip_paletted(0xFFFF - 1, 1); round_trip_paletted(1, 0xFFFF); } }
w(); let written_rgb: Vec<u8> = (0..(width as usize) * 3).map(|v| (v & 0xFF) as u8).collect(); { let mut writer = WriterRgb::new(&mut pcx, (width, height), (300, 300)).unwrap(); for _ in 0..height { writer.write_row(&written_rgb).unwrap(); } writer.finish().unwrap(); } let mut reader = Reader::new(&pcx[..]).unwrap(); assert_eq!(reader.dimensions(), (width, height)); assert_eq!(reader.is_paletted(), false); assert_eq!(reader.palette_length(), None); let mut read_rgb: Vec<u8> = iter::repeat(0).take((width as usize) * 3).collect(); for _ in 0..height { reader.next_row_rgb(&mut read_rgb).unwrap(); assert_eq!(written_rgb, read_rgb); } }
function_block-function_prefixed
[ { "content": "fn lane_proper_length(width: u16, bit_depth: u8) -> u16 {\n\n (((width as u32) * (bit_depth as u32) - 1) / 8 + 1) as u16\n\n}\n\n\n\nimpl Header {\n\n pub fn load<R: io::Read>(stream: &mut R) -> io::Result<Self> {\n\n let magic = stream.read_u8()?;\n\n if magic != MAGIC_BYTE {\...
Rust
usql/src/ast/types/literal.rs
koushiro/usql
ab2ac5e4ccf33223308773afc7e9a6c67944cd88
#[cfg(not(feature = "std"))] use alloc::string::String; use core::fmt; use crate::ast::utils::escape_single_quote_string; #[derive(Clone, Debug, Eq, PartialEq, Hash)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub enum Literal { Null, Boolean(bool), Number(String), String(String), NationalString(String), HexString(String), BitString(String), Date(Date), Time(Time), Timestamp(Timestamp), Interval(Interval), } impl fmt::Display for Literal { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Self::Null => f.write_str("NULL"), Self::Boolean(v) => { if *v { f.write_str("TRUE") } else { f.write_str("FALSE") } } Self::Number(v) => v.fmt(f), Self::String(v) => write!(f, "'{}'", escape_single_quote_string(v)), Self::NationalString(v) => write!(f, "N'{}'", v), Self::BitString(v) => write!(f, "B'{}'", v), Self::HexString(v) => write!(f, "X'{}'", v), Self::Date(v) => write!(f, "DATE '{}'", v), Self::Time(v) => write!(f, "TIME '{}'", v), Self::Timestamp(v) => write!(f, "TIMESTAMP '{}'", v), Self::Interval(v) => v.fmt(f), } } } #[derive(Clone, Debug, Eq, PartialEq, Hash)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct Date { pub value: String, } impl fmt::Display for Date { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.value) } } #[derive(Clone, Debug, Eq, PartialEq, Hash, Default)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct Time { pub value: String, } impl fmt::Display for Time { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.value) } } #[derive(Clone, Debug, Eq, PartialEq, Hash)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct Timestamp { pub value: String, } impl fmt::Display for Timestamp { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.value) } } #[derive(Clone, Debug, Eq, PartialEq, Hash)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct Interval { pub value: String, pub leading_field: Option<DateTimeField>, pub leading_precision: Option<u64>, pub tailing_field: Option<DateTimeField>, pub fractional_seconds_precision: Option<u64>, } impl fmt::Display for Interval { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match ( self.leading_field, self.leading_precision, self.fractional_seconds_precision, ) { ( Some(DateTimeField::Second), Some(leading_precision), Some(fractional_seconds_precision), ) => { assert!(self.tailing_field.is_none()); write!( f, "INTERVAL '{}' SECOND({}, {})", escape_single_quote_string(&self.value), leading_precision, fractional_seconds_precision )?; } _ => { write!(f, "INTERVAL '{}'", escape_single_quote_string(&self.value))?; if let Some(leading_field) = &self.leading_field { write!(f, " {}", leading_field)?; } if let Some(leading_precision) = &self.leading_precision { write!(f, "({})", leading_precision)?; } if let Some(tailing_field) = &self.tailing_field { write!(f, " TO {}", tailing_field)?; } if let Some(fractional_seconds_precision) = &self.fractional_seconds_precision { write!(f, "({})", fractional_seconds_precision)?; } } } Ok(()) } } #[doc(hidden)] #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub enum DateTimeField { Year, Month, Day, Hour, Minute, Second, } impl fmt::Display for DateTimeField { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(match self { Self::Year => "YEAR", Self::Month => "MONTH", Self::Day => "DAY", Self::Hour => "HOUR", Self::Minute => "MINUTE", Self::Second => "SECOND", }) } } #[cfg(test)] mod tests { use super::*; #[test] fn string_literal_display() { let string = Literal::String("hello".into()); assert_eq!(string.to_string(), "'hello'"); let national = Literal::NationalString("你好".into()); assert_eq!(national.to_string(), "N'你好'"); let bit = Literal::BitString("010101".into()); assert_eq!(bit.to_string(), "B'010101'"); let hex = Literal::HexString("1234567890abcdf".into()); assert_eq!(hex.to_string(), "X'1234567890abcdf'"); } #[test] fn datetime_literal_display() { let date = Date { value: "2021-11-29".into(), }; assert_eq!(Literal::Date(date).to_string(), "DATE '2021-11-29'"); let time = Time { value: "12:34:56".into(), }; assert_eq!(Literal::Time(time).to_string(), "TIME '12:34:56'"); let time = Time { value: "12:34:56.789".into(), }; assert_eq!(Literal::Time(time).to_string(), "TIME '12:34:56.789'"); let time = Time { value: "12:34:56.789+08:30".into(), }; assert_eq!(Literal::Time(time).to_string(), "TIME '12:34:56.789+08:30'"); let timestamp = Timestamp { value: "2021-11-29 12:34:56.789+08:30".into(), }; assert_eq!( Literal::Timestamp(timestamp).to_string(), "TIMESTAMP '2021-11-29 12:34:56.789+08:30'" ); } #[test] fn interval_literal_display() { let interval = Interval { value: "1-1".into(), leading_field: Some(DateTimeField::Year), leading_precision: None, tailing_field: Some(DateTimeField::Month), fractional_seconds_precision: None, }; assert_eq!( Literal::Interval(interval).to_string(), "INTERVAL '1-1' YEAR TO MONTH" ); let interval = Interval { value: "1:1:1.1".into(), leading_field: Some(DateTimeField::Hour), leading_precision: None, tailing_field: Some(DateTimeField::Second), fractional_seconds_precision: Some(5), }; assert_eq!( Literal::Interval(interval).to_string(), "INTERVAL '1:1:1.1' HOUR TO SECOND(5)" ); let interval = Interval { value: "1".into(), leading_field: Some(DateTimeField::Day), leading_precision: None, tailing_field: None, fractional_seconds_precision: None, }; assert_eq!(Literal::Interval(interval).to_string(), "INTERVAL '1' DAY"); let interval = Interval { value: "1.1".into(), leading_field: Some(DateTimeField::Second), leading_precision: Some(2), tailing_field: None, fractional_seconds_precision: Some(2), }; assert_eq!( Literal::Interval(interval).to_string(), "INTERVAL '1.1' SECOND(2, 2)" ); let interval = Interval { value: "1.1".into(), leading_field: Some(DateTimeField::Second), leading_precision: Some(2), tailing_field: None, fractional_seconds_precision: None, }; assert_eq!( Literal::Interval(interval).to_string(), "INTERVAL '1.1' SECOND(2)" ); let interval = Interval { value: "1.1".into(), leading_field: Some(DateTimeField::Second), leading_precision: None, tailing_field: None, fractional_seconds_precision: None, }; assert_eq!( Literal::Interval(interval).to_string(), "INTERVAL '1.1' SECOND" ); } }
#[cfg(not(feature = "std"))] use alloc::string::String; use core::fmt; use crate::ast::utils::escape_single_quote_string; #[derive(Clone, Debug, Eq, PartialEq, Hash)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub enum Literal { Null, Boolean(bool), Number(String), String(String), NationalString(String), HexString(String), BitString(String), Date(Date), Time(Time), Timestamp(Timestamp), Interval(Interval), } impl fmt::Display for Literal { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Self::Null => f.write_str("NULL"), Self::Boolean(v) => { if *v { f.write_str("TRUE") } else { f.write_str("FALSE") } } Self::Number(v) => v.fmt(f), Self::String(v) => write!(f, "'{}'", escape_single_quote_string(v)), Self::NationalString(v) => write!(f, "N'{}'", v), Self::BitString(v) => write!(f, "B'{}'", v), Self::HexString(v) => write!(f, "X'{}'", v), Self::Date(v) => write!(f, "DATE '{}'", v), Self::Time(v) => write!(f, "TIME '{}'", v), Self::Timestamp(v) => write!(f, "TIMESTAMP '{}'", v), Self::Interval(v) => v.fmt(f), } } } #[derive(Clone, Debug, Eq, PartialEq, Hash)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct Date { pub value: String, } impl fmt::Display for Date { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.value) } } #[derive(Clone, Debug, Eq, PartialEq, Hash, Default)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct Time { pub value: String, } impl fmt::Display for Time { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.value) } } #[derive(Clone, Debug, Eq, PartialEq, Hash)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct Timestamp { pub value: String, } impl fmt::Display for Timestamp { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.value) } } #[derive(Clone, Debug, Eq, PartialEq, Hash)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct Interval { pub value: String, pub leading_field: Option<DateTimeField>, pub leading_precision: Option<u64>, pub tailing_field: Option<DateTimeField>, pub fractional_seconds_precision: Option<u64>, } impl fmt::Display for Interval { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match ( self.leading_field, self.leading_precision, self.fractional_seconds_precision, ) { ( Some(DateTimeField::Second), Some(leading_precision), Some(fractional_seconds_precision), ) => { assert!(self.tailing_field.is_none()); write!( f, "INTERVAL '{}' SECOND({}, {})", escape_single_quote_string(&self.value), leading_precision, fractional_seconds_precision )?; } _ => { write!(f, "INTERVAL '{}'", escape_single_quote_string(&self.value))?; if let Some(leading_field) = &self.leading_field { write!(f, " {}", leading_field)?; } if let Some(leading_precision) = &self.leading_precision { write!(f, "({})", leading_precision)?; } if let Some(tailing_field) = &self.tailing_field { write!(f, " TO {}", tailing_field)?; } if let Some(fractional_seconds_precision) = &self.fractional_seconds_precision { write!(f, "({})", fractional_seconds_precision)?; } } } Ok(()) } } #[doc(hidden)] #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub enum DateTimeField { Year, Month, Day, Hour, Minute, Second, } impl fmt::Display for DateTimeField { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(match self { Self::Year => "YEAR", Self::Month => "MONTH", Self::Day => "DAY", Self::Hour => "HOUR", Self::Minute => "MINUTE", Self::Second => "SECOND", }) } } #[cfg(test)] mod tests { use super::*; #[test] fn string_literal_display() { let string = Literal::String("hello".into()); assert_eq!(string.to_string(), "'hello'"); let national = Literal::NationalString("你好".into()); assert_eq!(national.to_string(), "N'你好'"); let bit = Literal::BitString("010101".into()); assert_eq!(bit.to_string(), "B'010101'"); let hex = Literal::HexString("1234567890abcdf".into()); assert_eq!(hex.to_string(), "X'1234567890abcdf'"); } #[test] fn datetime_literal_display() { let date = Date { value: "2021-11-29".into(), }; assert_eq!(Literal::Date(date).to_string(), "DATE '2021-11-29'"); let time = Time { value: "12:34:56".into(), }; assert_eq!(Literal::Time(time).to_string(), "TIME '12:34:56'"); let time = Time { value: "12:34:56.789".into(), }; assert_eq!(Literal::Time(time).to_string(), "TIME '12:34:56.789'"); let time = Time { value: "12:34:56.789+08:30".into(), }; assert_eq!(Literal::Time(time).to_string(), "TIME '12:34:56.789+08:30'"); let timestamp = Timestamp { value: "2021-11-29 12:34:56.789+08:30".into(), }; assert_eq!( Literal::Timestamp(timestamp).to_string(), "TIMESTAMP '2021-11-29 12:34:56.789+08:30'" ); } #[test] fn interval_literal_display() { let interval = Interval { value: "1-1".into(), leading_field: Some(DateTimeField::Year), leading_precision: None, tailing_field: Some(DateTimeField::Month), fractional_seconds_precision: None, }; assert_eq!( Literal::Interval(interval).to_string(), "INTERVAL '1-1' YEAR TO MONTH" ); let interval = Interval { value: "1:1:1.1".into(), leading_field: Some(DateTimeField::Hour), leading_precision: None, tailing_field: Some(DateTimeField::Second), fractional_seconds_precision: Some(5), }; assert_eq!( Literal::Interval(interval).to_string(), "INTERVAL '1:1:1.1' HOUR TO SECOND(5)" ); let interval = Interval { value: "1".into(), leading_field: Some(DateTimeField::Day), leading_precision: None, tailing_field: None, fractional_seconds_precision: None, }; assert_eq!(Literal::Interval(interval).to_string(), "INTERVAL '1' DAY");
assert_eq!( Literal::Interval(interval).to_string(), "INTERVAL '1.1' SECOND(2, 2)" ); let interval = Interval { value: "1.1".into(), leading_field: Some(DateTimeField::Second), leading_precision: Some(2), tailing_field: None, fractional_seconds_precision: None, }; assert_eq!( Literal::Interval(interval).to_string(), "INTERVAL '1.1' SECOND(2)" ); let interval = Interval { value: "1.1".into(), leading_field: Some(DateTimeField::Second), leading_precision: None, tailing_field: None, fractional_seconds_precision: None, }; assert_eq!( Literal::Interval(interval).to_string(), "INTERVAL '1.1' SECOND" ); } }
let interval = Interval { value: "1.1".into(), leading_field: Some(DateTimeField::Second), leading_precision: Some(2), tailing_field: None, fractional_seconds_precision: Some(2), };
assignment_statement
[ { "content": "/// The configuration of the parser part of dialect.\n\npub trait DialectParserConf: Clone + Debug {}\n", "file_path": "usql/src/dialect.rs", "rank": 0, "score": 146335.68048831887 }, { "content": "/// The configuration of the lexer part of dialect.\n\npub trait DialectLexerCon...
Rust
src/types.rs
iJohnnyH/mangadex-rust
97b4cf31f0d2bbcb28981d02acd01c59e50a005f
use serde::{Deserialize, Serialize}; use serde_json::Value; use std::collections::HashMap; #[derive(Serialize, Deserialize, Debug)] pub struct DataObject { pub id: String, pub r#type: String, } #[derive(Serialize, Deserialize, Debug, PartialEq, Default)] #[serde(rename_all = "camelCase")] pub struct Author { #[serde(default = "default_str")] pub id: String, #[serde(default = "default_str")] pub name: String, #[serde(default = "default_str")] pub image_url: String, #[serde(default = "default_str")] pub bio: String, #[serde(default = "default_str")] pub created_at: String, #[serde(default = "default_str")] pub updated_at: String, } #[derive(Serialize, Deserialize, Debug, PartialEq)] pub struct MangaTag { pub id: String, pub name: String, } #[derive(Serialize, Deserialize, Debug, PartialEq)] #[serde(rename_all = "camelCase")] pub struct Manga { #[serde(flatten)] pub id: String, pub title: HashMap<String, String>, pub alt_titles: Vec<HashMap<String, String>>, pub description: HashMap<String, String>, pub links: HashMap<String, String>, pub original_language: String, pub last_volume: String, pub last_chapter: String, pub publication_demographic: String, pub status: String, pub year: String, pub content_rating: String, pub tags: Vec<MangaTag>, pub created_at: String, pub updated_at: String, pub author: Author, } fn _to_str_map(map: &serde_json::Map<String, Value>) -> HashMap<String, String> { let mut str_map = HashMap::new(); for (key, val) in map.iter() { str_map.insert(key.to_string(), val.as_str().unwrap().to_string()); } str_map } fn default_str() -> String { "".to_string() } impl From<Value> for Manga { fn from(i: Value) -> Manga { let attr = &i["data"]["attributes"]; let mut alt = Vec::new(); for map in attr["altTitles"].as_array().unwrap().iter() { alt.push(_to_str_map(map.as_object().unwrap())); } let mut tags = Vec::new(); for map in attr["tags"].as_array().unwrap().iter() { let id = map["id"].as_str().unwrap().to_string(); let name = map["attributes"]["name"]["en"] .as_str() .unwrap() .to_string(); tags.push(MangaTag { id, name }) } let mut author: Author = Author::default(); for relat in i["relationships"].as_array().unwrap().iter() { if relat["type"].as_str().unwrap_or("") == "author" { author = Author { id: relat["id"].as_str().unwrap_or("").to_string(), name: default_str(), image_url: default_str(), bio: default_str(), created_at: default_str(), updated_at: default_str(), }; } } Manga { id: i["data"]["id"].as_str().unwrap_or("").to_string(), title: _to_str_map(attr["title"].as_object().unwrap()), alt_titles: alt, description: _to_str_map(attr["description"].as_object().unwrap()), links: _to_str_map(attr["links"].as_object().unwrap()), original_language: attr["originalLanguage"].as_str().unwrap_or("").to_string(), last_volume: attr["lastVolume"].as_str().unwrap_or("").to_string(), last_chapter: attr["lastChapter"].as_str().unwrap_or("").to_string(), publication_demographic: attr["publicationDemographic"] .as_str() .unwrap_or("") .to_string(), status: attr["status"].as_str().unwrap_or("").to_string(), year: attr["year"].as_str().unwrap_or("").to_string(), content_rating: attr["contentRating"].as_str().unwrap_or("").to_string(), tags, created_at: attr["createdAt"].as_str().unwrap_or("").to_string(), updated_at: attr["updatedAt"].as_str().unwrap_or("").to_string(), author, } } }
use serde::{Deserialize, Serialize}; use serde_json::Value; use std::collections::HashMap; #[derive(Serialize, Deserialize, Debug)] pub struct DataObject { pub id: String, pub r#type: String, } #[derive(Serialize, Deserialize, Debug, PartialEq, Default)] #[serde(rename_all = "camelCase")] pub struct Author { #[serde(default = "default_str")] pub id: String, #[serde(default = "default_str")] pub name: String, #[serde(default = "default_str")] pub image_url: String, #[serde(default = "default_str")] pub bio: String, #[serde(default = "default_str")] pub created_at: String, #[serde(default = "default_str")] pub updated_at: String, } #[derive(Serialize, Deserialize, Debug, PartialEq)] pub struct MangaTag { pub id: String, pub name: String, } #[derive(Serialize, Deserialize, Debug, PartialEq)] #[serde(rename_all = "camelCase")] pub struct Manga { #[serde(flatten)] pub id: String, pub title: HashMap<String, String>, pub alt_titles: Vec<HashMap<String, String>>, pub description: HashMap<String, String>, pub links: HashMap<String, String>, pub original_language: String, pub last_volume: String, pub last_chapter: String, pub publication_demographic: String, pub status: String, pub year: String, pub content_rating: String, pub tags: Vec<MangaTag>, pub created_at: String, pub updated_at: String, pub author: Author, } fn _to_str_map(map: &serde_json::Map<String, Value>) -> HashMap<String, String> { let mut str_map = HashMap::new(); for (key, val) in map.iter() { str_map.insert(key.to_string(), val.as_str().unwrap().to_string()); } str_map } fn default_str() -> String { "".to_string() } impl From<Value> for Manga {
}
fn from(i: Value) -> Manga { let attr = &i["data"]["attributes"]; let mut alt = Vec::new(); for map in attr["altTitles"].as_array().unwrap().iter() { alt.push(_to_str_map(map.as_object().unwrap())); } let mut tags = Vec::new(); for map in attr["tags"].as_array().unwrap().iter() { let id = map["id"].as_str().unwrap().to_string(); let name = map["attributes"]["name"]["en"] .as_str() .unwrap() .to_string(); tags.push(MangaTag { id, name }) } let mut author: Author = Author::default(); for relat in i["relationships"].as_array().unwrap().iter() { if relat["type"].as_str().unwrap_or("") == "author" { author = Author { id: relat["id"].as_str().unwrap_or("").to_string(), name: default_str(), image_url: default_str(), bio: default_str(), created_at: default_str(), updated_at: default_str(), }; } } Manga { id: i["data"]["id"].as_str().unwrap_or("").to_string(), title: _to_str_map(attr["title"].as_object().unwrap()), alt_titles: alt, description: _to_str_map(attr["description"].as_object().unwrap()), links: _to_str_map(attr["links"].as_object().unwrap()), original_language: attr["originalLanguage"].as_str().unwrap_or("").to_string(), last_volume: attr["lastVolume"].as_str().unwrap_or("").to_string(), last_chapter: attr["lastChapter"].as_str().unwrap_or("").to_string(), publication_demographic: attr["publicationDemographic"] .as_str() .unwrap_or("") .to_string(), status: attr["status"].as_str().unwrap_or("").to_string(), year: attr["year"].as_str().unwrap_or("").to_string(), content_rating: attr["contentRating"].as_str().unwrap_or("").to_string(), tags, created_at: attr["createdAt"].as_str().unwrap_or("").to_string(), updated_at: attr["updatedAt"].as_str().unwrap_or("").to_string(), author, } }
function_block-full_function
[ { "content": "#[test]\n\nfn test_mangas() -> serde_json::Result<()> {\n\n let json_str = r#\"\n\n {\"limit\": 2,\"offset\": 0,\"results\": [{\"data\": {\"attributes\": {\"altTitles\": [{\"en\": \"I am the only the one who levels up\"},{\"en\": \"I level up alone\"},{\"en\": \"Na Honjaman Lebel-eob\"},...
Rust
virtio-devices/src/watchdog.rs
mythi/cloud-hypervisor
592babaadd0a38a20d77193bc302209f497dfa02
use super::Error as DeviceError; use super::{ ActivateError, ActivateResult, EpollHelper, EpollHelperError, EpollHelperHandler, Queue, VirtioCommon, VirtioDevice, VirtioDeviceType, EPOLL_HELPER_EVENT_LAST, VIRTIO_F_VERSION_1, }; use crate::seccomp_filters::{get_seccomp_filter, Thread}; use crate::{VirtioInterrupt, VirtioInterruptType}; use anyhow::anyhow; use seccomp::{SeccompAction, SeccompFilter}; use std::fs::File; use std::io::{self, Read}; use std::os::unix::io::{AsRawFd, FromRawFd, RawFd}; use std::result; use std::sync::atomic::AtomicBool; use std::sync::{Arc, Barrier, Mutex}; use std::thread; use std::time::Instant; use vm_memory::{Bytes, GuestAddressSpace, GuestMemoryAtomic, GuestMemoryMmap}; use vm_migration::{ Migratable, MigratableError, Pausable, Snapshot, SnapshotDataSection, Snapshottable, Transportable, }; use vmm_sys_util::eventfd::EventFd; const QUEUE_SIZE: u16 = 8; const QUEUE_SIZES: &[u16] = &[QUEUE_SIZE]; const QUEUE_AVAIL_EVENT: u16 = EPOLL_HELPER_EVENT_LAST + 1; const TIMER_EXPIRED_EVENT: u16 = EPOLL_HELPER_EVENT_LAST + 2; const WATCHDOG_TIMER_INTERVAL: i64 = 15; const WATCHDOG_TIMEOUT: u64 = WATCHDOG_TIMER_INTERVAL as u64 + 5; struct WatchdogEpollHandler { queues: Vec<Queue>, mem: GuestMemoryAtomic<GuestMemoryMmap>, interrupt_cb: Arc<dyn VirtioInterrupt>, queue_evt: EventFd, kill_evt: EventFd, pause_evt: EventFd, timer: File, last_ping_time: Arc<Mutex<Option<Instant>>>, reset_evt: EventFd, } impl WatchdogEpollHandler { fn process_queue(&mut self) -> bool { let queue = &mut self.queues[0]; let mut used_desc_heads = [(0, 0); QUEUE_SIZE as usize]; let mut used_count = 0; let mem = self.mem.memory(); for avail_desc in queue.iter(&mem) { let mut len = 0; if avail_desc.is_write_only() && mem.write_obj(1u8, avail_desc.addr).is_ok() { len = avail_desc.len; if self.last_ping_time.lock().unwrap().is_none() { info!( "First ping received. Starting timer (every {} seconds)", WATCHDOG_TIMER_INTERVAL ); if let Err(e) = timerfd_setup(&self.timer, WATCHDOG_TIMER_INTERVAL) { error!("Error programming timer fd: {:?}", e); } } self.last_ping_time.lock().unwrap().replace(Instant::now()); } used_desc_heads[used_count] = (avail_desc.index, len); used_count += 1; } for &(desc_index, len) in &used_desc_heads[..used_count] { queue.add_used(&mem, desc_index, len); } used_count > 0 } fn signal_used_queue(&self) -> result::Result<(), DeviceError> { self.interrupt_cb .trigger(&VirtioInterruptType::Queue, Some(&self.queues[0])) .map_err(|e| { error!("Failed to signal used queue: {:?}", e); DeviceError::FailedSignalingUsedQueue(e) }) } fn run( &mut self, paused: Arc<AtomicBool>, paused_sync: Arc<Barrier>, ) -> result::Result<(), EpollHelperError> { let mut helper = EpollHelper::new(&self.kill_evt, &self.pause_evt)?; helper.add_event(self.queue_evt.as_raw_fd(), QUEUE_AVAIL_EVENT)?; helper.add_event(self.timer.as_raw_fd(), TIMER_EXPIRED_EVENT)?; helper.run(paused, paused_sync, self)?; Ok(()) } } impl EpollHelperHandler for WatchdogEpollHandler { fn handle_event(&mut self, _helper: &mut EpollHelper, event: &epoll::Event) -> bool { let ev_type = event.data as u16; match ev_type { QUEUE_AVAIL_EVENT => { if let Err(e) = self.queue_evt.read() { error!("Failed to get queue event: {:?}", e); return true; } else if self.process_queue() { if let Err(e) = self.signal_used_queue() { error!("Failed to signal used queue: {:?}", e); return true; } } } TIMER_EXPIRED_EVENT => { let mut buf = vec![0; 8]; if let Err(e) = self.timer.read_exact(&mut buf) { error!("Error reading from timer fd: {:}", e); return true; } if let Some(last_ping_time) = self.last_ping_time.lock().unwrap().as_ref() { let now = Instant::now(); let gap = now.duration_since(*last_ping_time).as_secs(); if gap > WATCHDOG_TIMEOUT { error!("Watchdog triggered: {} seconds since last ping", gap); self.reset_evt.write(1).ok(); } } return false; } _ => { error!("Unexpected event: {}", ev_type); return true; } } false } } pub struct Watchdog { common: VirtioCommon, id: String, seccomp_action: SeccompAction, reset_evt: EventFd, last_ping_time: Arc<Mutex<Option<Instant>>>, timer: File, } #[derive(Serialize, Deserialize)] pub struct WatchdogState { pub avail_features: u64, pub acked_features: u64, pub enabled: bool, } impl Watchdog { pub fn new( id: String, reset_evt: EventFd, seccomp_action: SeccompAction, ) -> io::Result<Watchdog> { let avail_features = 1u64 << VIRTIO_F_VERSION_1; let timer_fd = timerfd_create().map_err(|e| { error!("Failed to create timer fd {}", e); e })?; let timer = unsafe { File::from_raw_fd(timer_fd) }; Ok(Watchdog { common: VirtioCommon { device_type: VirtioDeviceType::TYPE_WATCHDOG as u32, queue_sizes: QUEUE_SIZES.to_vec(), paused_sync: Some(Arc::new(Barrier::new(2))), avail_features, ..Default::default() }, id, seccomp_action, reset_evt, last_ping_time: Arc::new(Mutex::new(None)), timer, }) } fn state(&self) -> WatchdogState { WatchdogState { avail_features: self.common.avail_features, acked_features: self.common.acked_features, enabled: self.last_ping_time.lock().unwrap().is_some(), } } fn set_state(&mut self, state: &WatchdogState) -> io::Result<()> { self.common.avail_features = state.avail_features; self.common.acked_features = state.acked_features; if state.enabled { self.last_ping_time.lock().unwrap().replace(Instant::now()); } Ok(()) } } impl Drop for Watchdog { fn drop(&mut self) { if let Some(kill_evt) = self.common.kill_evt.take() { let _ = kill_evt.write(1); } } } fn timerfd_create() -> Result<RawFd, io::Error> { let res = unsafe { libc::timerfd_create(libc::CLOCK_MONOTONIC, 0) }; if res < 0 { Err(io::Error::last_os_error()) } else { Ok(res as RawFd) } } fn timerfd_setup(timer: &File, secs: i64) -> Result<(), io::Error> { let periodic = libc::itimerspec { it_interval: libc::timespec { tv_sec: secs, tv_nsec: 0, }, it_value: libc::timespec { tv_sec: secs, tv_nsec: 0, }, }; let res = unsafe { libc::timerfd_settime(timer.as_raw_fd(), 0, &periodic, std::ptr::null_mut()) }; if res < 0 { Err(io::Error::last_os_error()) } else { Ok(()) } } impl VirtioDevice for Watchdog { fn device_type(&self) -> u32 { self.common.device_type } fn queue_max_sizes(&self) -> &[u16] { &self.common.queue_sizes } fn features(&self) -> u64 { self.common.avail_features } fn ack_features(&mut self, value: u64) { self.common.ack_features(value) } fn activate( &mut self, mem: GuestMemoryAtomic<GuestMemoryMmap>, interrupt_cb: Arc<dyn VirtioInterrupt>, queues: Vec<Queue>, mut queue_evts: Vec<EventFd>, ) -> ActivateResult { self.common.activate(&queues, &queue_evts, &interrupt_cb)?; let kill_evt = self .common .kill_evt .as_ref() .unwrap() .try_clone() .map_err(|e| { error!("Failed to clone kill_evt eventfd: {}", e); ActivateError::BadActivate })?; let pause_evt = self .common .pause_evt .as_ref() .unwrap() .try_clone() .map_err(|e| { error!("Failed to clone pause_evt eventfd: {}", e); ActivateError::BadActivate })?; let reset_evt = self.reset_evt.try_clone().map_err(|e| { error!("Failed to clone reset_evt eventfd: {}", e); ActivateError::BadActivate })?; let timer = self.timer.try_clone().map_err(|e| { error!("Failed to clone timer fd: {}", e); ActivateError::BadActivate })?; let mut handler = WatchdogEpollHandler { queues, mem, interrupt_cb, queue_evt: queue_evts.remove(0), kill_evt, pause_evt, timer, last_ping_time: self.last_ping_time.clone(), reset_evt, }; let paused = self.common.paused.clone(); let paused_sync = self.common.paused_sync.clone(); let mut epoll_threads = Vec::new(); let virtio_watchdog_seccomp_filter = get_seccomp_filter(&self.seccomp_action, Thread::VirtioWatchdog) .map_err(ActivateError::CreateSeccompFilter)?; thread::Builder::new() .name("virtio_watchdog".to_string()) .spawn(move || { if let Err(e) = SeccompFilter::apply(virtio_watchdog_seccomp_filter) { error!("Error applying seccomp filter: {:?}", e); } else if let Err(e) = handler.run(paused, paused_sync.unwrap()) { error!("Error running worker: {:?}", e); } }) .map(|thread| epoll_threads.push(thread)) .map_err(|e| { error!("failed to clone the virtio-watchdog epoll thread: {}", e); ActivateError::BadActivate })?; self.common.epoll_threads = Some(epoll_threads); Ok(()) } fn reset(&mut self) -> Option<(Arc<dyn VirtioInterrupt>, Vec<EventFd>)> { self.common.reset() } } impl Pausable for Watchdog { fn pause(&mut self) -> result::Result<(), MigratableError> { info!("Watchdog paused - disabling timer"); timerfd_setup(&self.timer, 0) .map_err(|e| MigratableError::Pause(anyhow!("Error clearing timer: {:?}", e)))?; self.common.pause() } fn resume(&mut self) -> result::Result<(), MigratableError> { if self.last_ping_time.lock().unwrap().is_some() { info!( "Watchdog resumed - enabling timer (every {} seconds)", WATCHDOG_TIMER_INTERVAL ); self.last_ping_time.lock().unwrap().replace(Instant::now()); timerfd_setup(&self.timer, WATCHDOG_TIMER_INTERVAL) .map_err(|e| MigratableError::Resume(anyhow!("Error setting timer: {:?}", e)))?; } self.common.resume() } } impl Snapshottable for Watchdog { fn id(&self) -> String { self.id.clone() } fn snapshot(&mut self) -> std::result::Result<Snapshot, MigratableError> { let snapshot = serde_json::to_vec(&self.state()).map_err(|e| MigratableError::Snapshot(e.into()))?; let mut watchdog_snapshot = Snapshot::new(self.id.as_str()); watchdog_snapshot.add_data_section(SnapshotDataSection { id: format!("{}-section", self.id), snapshot, }); Ok(watchdog_snapshot) } fn restore(&mut self, snapshot: Snapshot) -> std::result::Result<(), MigratableError> { if let Some(watchdog_section) = snapshot.snapshot_data.get(&format!("{}-section", self.id)) { let watchdog_state = match serde_json::from_slice(&watchdog_section.snapshot) { Ok(state) => state, Err(error) => { return Err(MigratableError::Restore(anyhow!( "Could not deserialize watchdog {}", error ))) } }; return self.set_state(&watchdog_state).map_err(|e| { MigratableError::Restore(anyhow!("Could not restore watchdog state {:?}", e)) }); } Err(MigratableError::Restore(anyhow!( "Could not find watchdog snapshot section" ))) } } impl Transportable for Watchdog {} impl Migratable for Watchdog {}
use super::Error as DeviceError; use super::{ ActivateError, ActivateResult, EpollHelper, EpollHelperError, EpollHelperHandler, Queue, VirtioCommon, VirtioDevice, VirtioDeviceType, EPOLL_HELPER_EVENT_LAST, VIRTIO_F_VERSION_1, }; use crate::seccomp_filters::{get_seccomp_filter, Thread}; use crate::{VirtioInterrupt, VirtioInterruptType}; use anyhow::anyhow; use seccomp::{SeccompAction, SeccompFilter}; use std::fs::File; use std::io::{self, Read}; use std::os::unix::io::{AsRawFd, FromRawFd, RawFd}; use std::result; use std::sync::atomic::AtomicBool; use std::sync::{Arc, Barrier, Mutex}; use std::thread; use std::time::Instant; use vm_memory::{Bytes, GuestAddressSpace, GuestMemoryAtomic, GuestMemoryMmap}; use vm_migration::{ Migratable, MigratableError, Pausable, Snapshot, SnapshotDataSection, Snapshottable, Transportable, }; use vmm_sys_util::eventfd::EventFd; const QUEUE_SIZE: u16 = 8; const QUEUE_SIZES: &[u16] = &[QUEUE_SIZE]; const QUEUE_AVAIL_EVENT: u16 = EPOLL_HELPER_EVENT_LAST + 1; const TIMER_EXPIRED_EVENT: u16 = EPOLL_HELPER_EVENT_LAST + 2; const WATCHDOG_TIMER_INTERVAL: i64 = 15; const WATCHDOG_TIMEOUT: u64 = WATCHDOG_TIMER_INTERVAL as u64 + 5; struct WatchdogEpollHandler { queues: Vec<Queue>, mem: GuestMemoryAtomic<GuestMemoryMmap>, interrupt_cb: Arc<dyn VirtioInterrupt>, queue_evt: EventFd, kill_evt: EventFd, pause_evt: EventFd, timer: File, last_ping_time: Arc<Mutex<Option<Instant>>>, reset_evt: EventFd, } impl WatchdogEpollHandler { fn process_queue(&mut self) -> bool { let queue = &mut self.queues[0]; let mut used_desc_heads = [(0, 0); QUEUE_SIZE as usize]; let mut used_count = 0; let mem = self.mem.memory(); for avail_desc in queue.iter(&mem) { let mut len = 0; if avail_desc.is_write_only() && mem.write_obj(1u8, avail_desc.addr).is_ok() { len = avail_desc.len; if self.last_ping_time.lock().unwrap().is_none() { info!( "First ping received. Starting timer (every {} seconds)", WATCHDOG_TIMER_INTERVAL ); if let Err(e) = timerfd_setup(&self.timer, WATCHDOG_TIMER_INTERVAL) { error!("Error programming timer fd: {:?}", e); } } self.last_ping_time.lock().unwrap().replace(Instant::now()); } used_desc_heads[used_count] = (avail_desc.index, len); used_count += 1; } for &(desc_index, len) in &used_desc_heads[..used_count] { queue.add_used(&mem, desc_index, len); } used_count > 0 } fn signal_used_queue(&self) -> result::Result<(), DeviceError> { self.i
fn run( &mut self, paused: Arc<AtomicBool>, paused_sync: Arc<Barrier>, ) -> result::Result<(), EpollHelperError> { let mut helper = EpollHelper::new(&self.kill_evt, &self.pause_evt)?; helper.add_event(self.queue_evt.as_raw_fd(), QUEUE_AVAIL_EVENT)?; helper.add_event(self.timer.as_raw_fd(), TIMER_EXPIRED_EVENT)?; helper.run(paused, paused_sync, self)?; Ok(()) } } impl EpollHelperHandler for WatchdogEpollHandler { fn handle_event(&mut self, _helper: &mut EpollHelper, event: &epoll::Event) -> bool { let ev_type = event.data as u16; match ev_type { QUEUE_AVAIL_EVENT => { if let Err(e) = self.queue_evt.read() { error!("Failed to get queue event: {:?}", e); return true; } else if self.process_queue() { if let Err(e) = self.signal_used_queue() { error!("Failed to signal used queue: {:?}", e); return true; } } } TIMER_EXPIRED_EVENT => { let mut buf = vec![0; 8]; if let Err(e) = self.timer.read_exact(&mut buf) { error!("Error reading from timer fd: {:}", e); return true; } if let Some(last_ping_time) = self.last_ping_time.lock().unwrap().as_ref() { let now = Instant::now(); let gap = now.duration_since(*last_ping_time).as_secs(); if gap > WATCHDOG_TIMEOUT { error!("Watchdog triggered: {} seconds since last ping", gap); self.reset_evt.write(1).ok(); } } return false; } _ => { error!("Unexpected event: {}", ev_type); return true; } } false } } pub struct Watchdog { common: VirtioCommon, id: String, seccomp_action: SeccompAction, reset_evt: EventFd, last_ping_time: Arc<Mutex<Option<Instant>>>, timer: File, } #[derive(Serialize, Deserialize)] pub struct WatchdogState { pub avail_features: u64, pub acked_features: u64, pub enabled: bool, } impl Watchdog { pub fn new( id: String, reset_evt: EventFd, seccomp_action: SeccompAction, ) -> io::Result<Watchdog> { let avail_features = 1u64 << VIRTIO_F_VERSION_1; let timer_fd = timerfd_create().map_err(|e| { error!("Failed to create timer fd {}", e); e })?; let timer = unsafe { File::from_raw_fd(timer_fd) }; Ok(Watchdog { common: VirtioCommon { device_type: VirtioDeviceType::TYPE_WATCHDOG as u32, queue_sizes: QUEUE_SIZES.to_vec(), paused_sync: Some(Arc::new(Barrier::new(2))), avail_features, ..Default::default() }, id, seccomp_action, reset_evt, last_ping_time: Arc::new(Mutex::new(None)), timer, }) } fn state(&self) -> WatchdogState { WatchdogState { avail_features: self.common.avail_features, acked_features: self.common.acked_features, enabled: self.last_ping_time.lock().unwrap().is_some(), } } fn set_state(&mut self, state: &WatchdogState) -> io::Result<()> { self.common.avail_features = state.avail_features; self.common.acked_features = state.acked_features; if state.enabled { self.last_ping_time.lock().unwrap().replace(Instant::now()); } Ok(()) } } impl Drop for Watchdog { fn drop(&mut self) { if let Some(kill_evt) = self.common.kill_evt.take() { let _ = kill_evt.write(1); } } } fn timerfd_create() -> Result<RawFd, io::Error> { let res = unsafe { libc::timerfd_create(libc::CLOCK_MONOTONIC, 0) }; if res < 0 { Err(io::Error::last_os_error()) } else { Ok(res as RawFd) } } fn timerfd_setup(timer: &File, secs: i64) -> Result<(), io::Error> { let periodic = libc::itimerspec { it_interval: libc::timespec { tv_sec: secs, tv_nsec: 0, }, it_value: libc::timespec { tv_sec: secs, tv_nsec: 0, }, }; let res = unsafe { libc::timerfd_settime(timer.as_raw_fd(), 0, &periodic, std::ptr::null_mut()) }; if res < 0 { Err(io::Error::last_os_error()) } else { Ok(()) } } impl VirtioDevice for Watchdog { fn device_type(&self) -> u32 { self.common.device_type } fn queue_max_sizes(&self) -> &[u16] { &self.common.queue_sizes } fn features(&self) -> u64 { self.common.avail_features } fn ack_features(&mut self, value: u64) { self.common.ack_features(value) } fn activate( &mut self, mem: GuestMemoryAtomic<GuestMemoryMmap>, interrupt_cb: Arc<dyn VirtioInterrupt>, queues: Vec<Queue>, mut queue_evts: Vec<EventFd>, ) -> ActivateResult { self.common.activate(&queues, &queue_evts, &interrupt_cb)?; let kill_evt = self .common .kill_evt .as_ref() .unwrap() .try_clone() .map_err(|e| { error!("Failed to clone kill_evt eventfd: {}", e); ActivateError::BadActivate })?; let pause_evt = self .common .pause_evt .as_ref() .unwrap() .try_clone() .map_err(|e| { error!("Failed to clone pause_evt eventfd: {}", e); ActivateError::BadActivate })?; let reset_evt = self.reset_evt.try_clone().map_err(|e| { error!("Failed to clone reset_evt eventfd: {}", e); ActivateError::BadActivate })?; let timer = self.timer.try_clone().map_err(|e| { error!("Failed to clone timer fd: {}", e); ActivateError::BadActivate })?; let mut handler = WatchdogEpollHandler { queues, mem, interrupt_cb, queue_evt: queue_evts.remove(0), kill_evt, pause_evt, timer, last_ping_time: self.last_ping_time.clone(), reset_evt, }; let paused = self.common.paused.clone(); let paused_sync = self.common.paused_sync.clone(); let mut epoll_threads = Vec::new(); let virtio_watchdog_seccomp_filter = get_seccomp_filter(&self.seccomp_action, Thread::VirtioWatchdog) .map_err(ActivateError::CreateSeccompFilter)?; thread::Builder::new() .name("virtio_watchdog".to_string()) .spawn(move || { if let Err(e) = SeccompFilter::apply(virtio_watchdog_seccomp_filter) { error!("Error applying seccomp filter: {:?}", e); } else if let Err(e) = handler.run(paused, paused_sync.unwrap()) { error!("Error running worker: {:?}", e); } }) .map(|thread| epoll_threads.push(thread)) .map_err(|e| { error!("failed to clone the virtio-watchdog epoll thread: {}", e); ActivateError::BadActivate })?; self.common.epoll_threads = Some(epoll_threads); Ok(()) } fn reset(&mut self) -> Option<(Arc<dyn VirtioInterrupt>, Vec<EventFd>)> { self.common.reset() } } impl Pausable for Watchdog { fn pause(&mut self) -> result::Result<(), MigratableError> { info!("Watchdog paused - disabling timer"); timerfd_setup(&self.timer, 0) .map_err(|e| MigratableError::Pause(anyhow!("Error clearing timer: {:?}", e)))?; self.common.pause() } fn resume(&mut self) -> result::Result<(), MigratableError> { if self.last_ping_time.lock().unwrap().is_some() { info!( "Watchdog resumed - enabling timer (every {} seconds)", WATCHDOG_TIMER_INTERVAL ); self.last_ping_time.lock().unwrap().replace(Instant::now()); timerfd_setup(&self.timer, WATCHDOG_TIMER_INTERVAL) .map_err(|e| MigratableError::Resume(anyhow!("Error setting timer: {:?}", e)))?; } self.common.resume() } } impl Snapshottable for Watchdog { fn id(&self) -> String { self.id.clone() } fn snapshot(&mut self) -> std::result::Result<Snapshot, MigratableError> { let snapshot = serde_json::to_vec(&self.state()).map_err(|e| MigratableError::Snapshot(e.into()))?; let mut watchdog_snapshot = Snapshot::new(self.id.as_str()); watchdog_snapshot.add_data_section(SnapshotDataSection { id: format!("{}-section", self.id), snapshot, }); Ok(watchdog_snapshot) } fn restore(&mut self, snapshot: Snapshot) -> std::result::Result<(), MigratableError> { if let Some(watchdog_section) = snapshot.snapshot_data.get(&format!("{}-section", self.id)) { let watchdog_state = match serde_json::from_slice(&watchdog_section.snapshot) { Ok(state) => state, Err(error) => { return Err(MigratableError::Restore(anyhow!( "Could not deserialize watchdog {}", error ))) } }; return self.set_state(&watchdog_state).map_err(|e| { MigratableError::Restore(anyhow!("Could not restore watchdog state {:?}", e)) }); } Err(MigratableError::Restore(anyhow!( "Could not find watchdog snapshot section" ))) } } impl Transportable for Watchdog {} impl Migratable for Watchdog {}
nterrupt_cb .trigger(&VirtioInterruptType::Queue, Some(&self.queues[0])) .map_err(|e| { error!("Failed to signal used queue: {:?}", e); DeviceError::FailedSignalingUsedQueue(e) }) }
function_block-function_prefixed
[ { "content": "fn is_valid_alignment(fd: RawFd, alignment: usize) -> bool {\n\n let layout = Layout::from_size_align(alignment, alignment).unwrap();\n\n let ptr = unsafe { alloc_zeroed(layout) };\n\n\n\n let ret = unsafe {\n\n ::libc::pread(\n\n fd,\n\n ptr as *mut c_void,\n...
Rust
intel8080cpu/src/intel8080cpu.rs
AgustinCB/emulators
7d4602cee4f35bd478843bee23315ba099f56526
use alloc::boxed::Box; use alloc::fmt; use alloc::string::{String, ToString}; use alloc::vec::Vec; use super::cpu::{InputDevice, OutputDevice}; use super::CpuError; use helpers::two_bytes_to_word; pub const ROM_MEMORY_LIMIT: usize = 8192; pub(crate) const MAX_INPUT_OUTPUT_DEVICES: usize = 0x100; pub const HERTZ: i64 = 2_000_000; #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)] pub enum RegisterType { A, B, C, D, E, H, L, Sp, Psw, } impl fmt::Display for RegisterType { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let s = match self { RegisterType::A => String::from("A"), RegisterType::B => String::from("B"), RegisterType::C => String::from("C"), RegisterType::D => String::from("D"), RegisterType::E => String::from("E"), RegisterType::H => String::from("H"), RegisterType::L => String::from("L"), RegisterType::Sp => String::from("SP"), RegisterType::Psw => String::from("PSW"), }; write!(f, "{}", s) } } pub type Address = [u8; 2]; #[derive(Debug, Fail)] #[fail(display = "{} isn't a valid register.", register)] pub struct LocationParsingError { register: String, } #[derive(Clone, Copy, Debug, PartialEq)] pub enum Location { Register { register: RegisterType }, Memory, } impl ToString for Location { fn to_string(&self) -> String { match self { Location::Register { register } => register.to_string(), Location::Memory => String::from("M"), } } } impl Location { pub fn from(location: &str) -> Result<Self, LocationParsingError> { match location { "A" => Ok(Location::Register { register: RegisterType::A, }), "B" => Ok(Location::Register { register: RegisterType::B, }), "C" => Ok(Location::Register { register: RegisterType::C, }), "D" => Ok(Location::Register { register: RegisterType::D, }), "E" => Ok(Location::Register { register: RegisterType::E, }), "H" => Ok(Location::Register { register: RegisterType::H, }), "L" => Ok(Location::Register { register: RegisterType::L, }), "M" => Ok(Location::Memory), "SP" => Ok(Location::Register { register: RegisterType::Sp, }), "PSW" => Ok(Location::Register { register: RegisterType::Psw, }), _ => Err(LocationParsingError { register: String::from(location), }), } } } #[derive(Debug)] pub(crate) struct RegisterSet { a: u8, b: u8, c: u8, d: u8, e: u8, h: u8, l: u8, sp: u16, } impl RegisterSet { pub(crate) fn new() -> RegisterSet { RegisterSet { a: 0, b: 0, c: 0, d: 0, e: 0, h: 0, l: 0, sp: 0xffff, } } } #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub(crate) enum State { Running, Stopped, HardStop, Halted, } pub trait Printer { fn print(&mut self, bytes: &[u8]); } #[derive(Debug)] pub(crate) struct Flags { pub(crate) sign: bool, pub(crate) zero: bool, pub(crate) parity: bool, pub(crate) carry: bool, pub(crate) auxiliary_carry: bool, } impl Flags { fn new() -> Flags { Flags { sign: true, zero: true, parity: true, carry: true, auxiliary_carry: true, } } } pub struct Intel8080Cpu<'a> { pub(crate) registers: RegisterSet, pub(crate) pc: u16, pub memory: [u8; ROM_MEMORY_LIMIT * 8], pub(crate) cp_m_compatibility: bool, pub(crate) flags: Flags, pub interruptions_enabled: bool, pub(crate) state: State, pub(crate) prev_state: State, pub(crate) inputs: Vec<Option<Box<dyn InputDevice>>>, pub(crate) outputs: Vec<Option<Box<dyn OutputDevice>>>, pub(crate) printer: Option<&'a mut dyn Printer>, } impl<'a> Intel8080Cpu<'a> { pub fn new_cp_m_compatible( rom_memory: [u8; ROM_MEMORY_LIMIT], screen: &mut dyn Printer, ) -> Intel8080Cpu { let mut cpu = Intel8080Cpu::new(rom_memory); cpu.cp_m_compatibility = true; cpu.printer = Some(screen); cpu } pub fn new<'b>(rom_memory: [u8; ROM_MEMORY_LIMIT]) -> Intel8080Cpu<'b> { let registers = RegisterSet::new(); let mut memory = [0; ROM_MEMORY_LIMIT * 8]; for i in 0..(ROM_MEMORY_LIMIT * 8) { let value = if i < rom_memory.len() { rom_memory[i] } else { 0 }; memory[i] = value; } Intel8080Cpu { registers, pc: 0, memory, flags: Flags::new(), interruptions_enabled: true, state: State::Running, prev_state: State::Running, inputs: Intel8080Cpu::make_inputs_vector(), outputs: Intel8080Cpu::make_outputs_vector(), cp_m_compatibility: false, printer: None, } } pub fn get_debug_string(&self) -> String { let registers_string = alloc::format!("{:?}", self.registers) .replace("{", "{\n ") .replace("}", "\n}"); let flags_string = alloc::format!("{:?}", self.flags) .replace("true", "t") .replace("false", "f") .replace(", aux", ",\n aux") .replace("{", "{\n ") .replace("}", "\n}"); alloc::format!("PC: {:?}\n{}\n{}", self.pc, &registers_string, &flags_string) } fn make_inputs_vector() -> Vec<Option<Box<dyn InputDevice>>> { let mut v = Vec::with_capacity(MAX_INPUT_OUTPUT_DEVICES); for _ in 0..MAX_INPUT_OUTPUT_DEVICES { v.push(None); } v } fn make_outputs_vector() -> Vec<Option<Box<dyn OutputDevice>>> { let mut v = Vec::with_capacity(MAX_INPUT_OUTPUT_DEVICES); for _ in 0..MAX_INPUT_OUTPUT_DEVICES { v.push(None); } v } pub fn is_hard_stopped(&self) -> bool { match self.state { State::HardStop => true, _ => false, } } pub fn toggle_hard_stop(&mut self) { match self.state { State::HardStop => { self.state = self.prev_state; } _ => { self.prev_state = self.state; self.state = State::HardStop; } } } #[inline] pub(crate) fn update_flags(&mut self, answer: u16, with_carry: bool) { self.flags.zero = answer.trailing_zeros() >= 8; self.flags.sign = (answer & 0x80) != 0; if with_carry { self.flags.carry = answer > 0xff; } self.flags.parity = (answer as u8).count_ones() % 2 == 0; } #[inline] pub(crate) fn get_current_hl_value(&self) -> u16 { let high_value = self.registers.h; let low_value = self.registers.l; two_bytes_to_word(high_value, low_value) } #[inline] pub(crate) fn get_current_bc_value(&self) -> u16 { let high_value = self.registers.b; let low_value = self.registers.c; two_bytes_to_word(high_value, low_value) } #[inline] pub(crate) fn get_current_de_value(&self) -> u16 { let high_value = self.registers.d; let low_value = self.registers.e; two_bytes_to_word(high_value, low_value) } #[inline] pub(crate) fn get_value_in_memory_at_hl(&self) -> u8 { let source_value_address: u16 = self.get_current_hl_value(); self.memory[source_value_address as usize] } #[inline] pub(crate) fn set_value_in_memory_at_hl(&mut self, value: u8) { let source_value_address: u16 = self.get_current_hl_value(); self.memory[source_value_address as usize] = value; } #[inline] pub(crate) fn get_current_a_value(&self) -> Result<u8, CpuError> { self.get_current_single_register_value(RegisterType::A) } #[inline] pub(crate) fn get_current_sp_value(&self) -> u16 { self.registers.sp } #[inline] pub(crate) fn get_current_single_register_value( &self, register: RegisterType, ) -> Result<u8, CpuError> { match register { RegisterType::A => Ok(self.registers.a), RegisterType::B => Ok(self.registers.b), RegisterType::C => Ok(self.registers.c), RegisterType::D => Ok(self.registers.d), RegisterType::E => Ok(self.registers.e), RegisterType::H => Ok(self.registers.h), RegisterType::L => Ok(self.registers.l), _ => Err(CpuError::VirtualRegister { register }), } } #[inline] pub(crate) fn save_to_a(&mut self, new_value: u8) -> Result<(), CpuError> { self.save_to_single_register(new_value, RegisterType::A) } #[inline] pub(crate) fn save_to_sp(&mut self, new_value: u16) { self.registers.sp = new_value; } #[inline] pub(crate) fn save_to_single_register( &mut self, new_value: u8, register: RegisterType, ) -> Result<(), CpuError> { match register { RegisterType::A => { self.registers.a = new_value; Ok(()) } RegisterType::B => { self.registers.b = new_value; Ok(()) } RegisterType::C => { self.registers.c = new_value; Ok(()) } RegisterType::D => { self.registers.d = new_value; Ok(()) } RegisterType::E => { self.registers.e = new_value; Ok(()) } RegisterType::H => { self.registers.h = new_value; Ok(()) } RegisterType::L => { self.registers.l = new_value; Ok(()) } _ => Err(CpuError::VirtualRegister { register }), } } #[inline] pub(crate) fn perform_jump(&mut self, high_byte: u8, low_byte: u8) { let new_pc = two_bytes_to_word(high_byte, low_byte); self.pc = new_pc; } pub(crate) fn execute_noop(&self) {} }
use alloc::boxed::Box; use alloc::fmt; use alloc::string::{String, ToString}; use alloc::vec::Vec; use super::cpu::{InputDevice, OutputDevice}; use super::CpuError; use helpers::two_bytes_to_word; pub const ROM_MEMORY_LIMIT: usize = 8192; pub(crate) const MAX_INPUT_OUTPUT_DEVICES: usize = 0x100; pub const HERTZ: i64 = 2_000_000; #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)] pub enum RegisterType { A, B, C, D, E, H, L, Sp, Psw, } impl fmt::Display for RegisterType { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let s = match self { RegisterType::A => String::from("A"), RegisterType::B => String::from("B"), RegisterType::C => String::from("C"), RegisterType::D => String::from("D"), RegisterType::E => String::from("E"), RegisterType::H => String::from("H"), RegisterType::L => String::from("L"), RegisterType::Sp => String::from("SP"), RegisterType::Psw => String::from("PSW"), }; write!(f, "{}", s) } } pub type Address = [u8; 2]; #[derive(Debug, Fail)] #[fail(display = "{} isn't a valid register.", register)] pub struct LocationParsingError { register: String, } #[derive(Clone, Copy, Debug, PartialEq)] pub enum Location { Register { register: RegisterType }, Memory, } impl ToString for Location { fn to_string(&self) -> String { match self { Location::Register { register } => register.to_string(), Location::Memory => String::from("M"), } } } impl Location { pub fn from(location: &str) -> Result<Self, LocationParsingError> { match location { "A" => Ok(Location::Register { register: RegisterType::A, }), "B" => Ok(Location::Register { register: RegisterType::B, }), "C" => Ok(Location::Register { register: RegisterType::C, }), "D" => Ok(Location::Register { register: RegisterType::D, }), "E" => Ok(Location::Register { register: RegisterType::E, }), "H" => Ok(Location::Register { register: RegisterType::H, }), "L" => Ok(Location::Register { register: RegisterType::L, }), "M" => Ok(Location::Memory), "SP" => Ok(Location::Register { register: RegisterType::Sp, }), "PSW" => Ok(Location::Register { register: RegisterType::Psw, }), _ => Err(LocationParsingError { register: String::from(location), }), } } } #[derive(Debug)] pub(crate) struct RegisterSet { a: u8, b: u8, c: u8, d: u8, e: u8, h: u8, l: u8, sp: u16, } impl RegisterSet { pub(crate) fn new() -> RegisterSet { RegisterSet { a: 0, b: 0, c: 0, d: 0, e: 0, h: 0, l: 0, sp: 0xffff, } } } #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub(crate) enum State { Running, Stopped, HardStop, Halted, } pub trait Printer { fn print(&mut self, bytes: &[u8]); } #[derive(Debug)] pub(crate) struct Flags { pub(crate) sign: bool, pub(crate) zero: bool, pub(crate) parity: bool, pub(crate) carry: bool, pub(crate) auxiliary_carry: bool, } impl Flags { fn new() -> Flags { Flags { sign: true, zero: true, parity: true, carry: true, auxiliary_carry: true, } } } pub struct Intel8080Cpu<'a> { pub(crate) registers: RegisterSet, pub(crate) pc: u16, pub memory: [u8; ROM_MEMORY_LIMIT * 8], pub(crate) cp_m_compatibility: bool, pub(crate) flags: Flags, pub interruptions_enabled: bool, pub(crate) state: State, pub(crate) prev_state: State, pub(crate) inputs: Vec<Option<Box<dyn InputDevice>>>, pub(crate) outputs: Vec<Option<Box<dyn OutputDevice>>>, pub(crate) printer: Option<&'a mut dyn Printer>, } impl<'a> Intel8080Cpu<'a> { pub fn new_cp_m_compatible( rom_memory: [u8; ROM_MEMORY_LIMIT], screen: &mut dyn Printer, ) -> Intel8080Cpu { let mut cpu = Intel8080Cpu::new(rom_memory); cpu.cp_m_compatibility = true; cpu.printer = Some(screen); cpu } pub fn new<'b>(rom_memory: [u8; ROM_MEMORY_LIMIT]) -> Intel8080Cpu<'b> { let registers = RegisterSet::new(); let mut memory = [0; ROM_MEMORY_LIMIT * 8]; for i in 0..(ROM_MEMORY_LIMIT * 8) { let value = if i < rom_memory.len() { rom_memory[i] } else { 0 }; memory[i] = value; } Intel8080Cpu { registers, pc: 0, memory, flags: Flags::new(), interruptions_enabled: true, state: State::Running, prev_state: State::Running, inputs: Intel8080Cpu::make_inputs_vector(), outputs: Intel8080Cpu::make_outputs_vector(), cp_m_compatibility: false, printer: None, } } pub fn get_debug_string(&self) -> String { let registers_string = alloc::format!("{:?}", self.registers) .replace("{", "{\n ") .replace("}", "\n}"); let flags_string = alloc::format!("{:?}", self.flags) .replace("true", "t") .replace("false", "f") .replace(", aux", ",\n aux") .replace("{", "{\n ") .replace("}", "\n}"); alloc::format!("PC: {:?}\n{}\n{}", self.pc, &registers_string, &flags_string) } fn make_inputs_vector() -> Vec<Option<Box<dyn InputDevice>>> { let mut v = Vec::with_capacity(MAX_INPUT_OUTPUT_DEVICES); for _ in 0..MAX_INPUT_OUTPUT_DEVICES { v.push(None); } v } fn make_outputs_vector() -> Vec<Option<Box<dyn OutputDevice>>> { let mut v = Vec::with_capacity(MAX_INPUT_OUTPUT_DEVICES); for _ in 0..MAX_INPUT_OUTPUT_DEVICES { v.push(None); } v }
pub fn toggle_hard_stop(&mut self) { match self.state { State::HardStop => { self.state = self.prev_state; } _ => { self.prev_state = self.state; self.state = State::HardStop; } } } #[inline] pub(crate) fn update_flags(&mut self, answer: u16, with_carry: bool) { self.flags.zero = answer.trailing_zeros() >= 8; self.flags.sign = (answer & 0x80) != 0; if with_carry { self.flags.carry = answer > 0xff; } self.flags.parity = (answer as u8).count_ones() % 2 == 0; } #[inline] pub(crate) fn get_current_hl_value(&self) -> u16 { let high_value = self.registers.h; let low_value = self.registers.l; two_bytes_to_word(high_value, low_value) } #[inline] pub(crate) fn get_current_bc_value(&self) -> u16 { let high_value = self.registers.b; let low_value = self.registers.c; two_bytes_to_word(high_value, low_value) } #[inline] pub(crate) fn get_current_de_value(&self) -> u16 { let high_value = self.registers.d; let low_value = self.registers.e; two_bytes_to_word(high_value, low_value) } #[inline] pub(crate) fn get_value_in_memory_at_hl(&self) -> u8 { let source_value_address: u16 = self.get_current_hl_value(); self.memory[source_value_address as usize] } #[inline] pub(crate) fn set_value_in_memory_at_hl(&mut self, value: u8) { let source_value_address: u16 = self.get_current_hl_value(); self.memory[source_value_address as usize] = value; } #[inline] pub(crate) fn get_current_a_value(&self) -> Result<u8, CpuError> { self.get_current_single_register_value(RegisterType::A) } #[inline] pub(crate) fn get_current_sp_value(&self) -> u16 { self.registers.sp } #[inline] pub(crate) fn get_current_single_register_value( &self, register: RegisterType, ) -> Result<u8, CpuError> { match register { RegisterType::A => Ok(self.registers.a), RegisterType::B => Ok(self.registers.b), RegisterType::C => Ok(self.registers.c), RegisterType::D => Ok(self.registers.d), RegisterType::E => Ok(self.registers.e), RegisterType::H => Ok(self.registers.h), RegisterType::L => Ok(self.registers.l), _ => Err(CpuError::VirtualRegister { register }), } } #[inline] pub(crate) fn save_to_a(&mut self, new_value: u8) -> Result<(), CpuError> { self.save_to_single_register(new_value, RegisterType::A) } #[inline] pub(crate) fn save_to_sp(&mut self, new_value: u16) { self.registers.sp = new_value; } #[inline] pub(crate) fn save_to_single_register( &mut self, new_value: u8, register: RegisterType, ) -> Result<(), CpuError> { match register { RegisterType::A => { self.registers.a = new_value; Ok(()) } RegisterType::B => { self.registers.b = new_value; Ok(()) } RegisterType::C => { self.registers.c = new_value; Ok(()) } RegisterType::D => { self.registers.d = new_value; Ok(()) } RegisterType::E => { self.registers.e = new_value; Ok(()) } RegisterType::H => { self.registers.h = new_value; Ok(()) } RegisterType::L => { self.registers.l = new_value; Ok(()) } _ => Err(CpuError::VirtualRegister { register }), } } #[inline] pub(crate) fn perform_jump(&mut self, high_byte: u8, low_byte: u8) { let new_pc = two_bytes_to_word(high_byte, low_byte); self.pc = new_pc; } pub(crate) fn execute_noop(&self) {} }
pub fn is_hard_stopped(&self) -> bool { match self.state { State::HardStop => true, _ => false, } }
function_block-full_function
[ { "content": "fn get_instructions_for_cpu(cpu: &str, bytes: [u8; ROM_MEMORY_LIMIT]) -> InstructionsResult {\n\n match cpu {\n\n \"mos6502\" => get_instructions::<Mos6502Instruction>(bytes),\n\n \"intel8080\" => get_instructions::<Intel8080Instruction>(bytes),\n\n \"smoked\" => get_instru...
Rust
src/lib.rs
andrewjensen/blockly-parser-rs
b9ecec23b874dd0bf1956627efbb9b664d9c697e
extern crate sxd_document; use std::collections::HashMap; use sxd_document::{ parser, Package, }; use sxd_document::dom::{ Document, ChildOfRoot, Element, ChildOfElement, }; #[derive(Debug)] pub struct Program { pub groups: Vec<StatementBody> } #[derive(PartialEq, Debug)] pub struct StatementBody { pub blocks: Vec<Block> } #[derive(PartialEq, Debug)] pub struct Block { pub block_type: String, pub id: String, pub fields: HashMap<String, FieldValue>, pub statements: HashMap<String, StatementBody>, } #[derive(PartialEq, Debug)] pub enum FieldValue { SimpleField(String), ExpressionField(Block), } impl Program { pub fn new() -> Self { Self { groups: Vec::new() } } } impl StatementBody { fn new(first_block: Option<Element>) -> Self { let mut blocks = Vec::new(); if let Some(el) = first_block { let mut block_el: Element; block_el = el; loop { blocks.push(Block::new(block_el)); if let Some(next_block) = get_next_block_element(&block_el) { block_el = next_block; } else { break; } } } Self { blocks } } } impl Block { fn new(block_el: Element) -> Self { let mut block = Self { block_type: "".to_string(), id: "".to_string(), fields: HashMap::new(), statements: HashMap::new() }; for attribute in block_el.attributes().iter() { let name = attribute.name().local_part(); let value = attribute.value().to_string(); match name { "type" => { block.block_type = value; }, "id" => { block.id = value; }, _ => {} } } for child in block_el.children().iter() { if let &ChildOfElement::Element(child_el) = child { let child_name = child_el.name().local_part(); match child_name { "statement" => { let statement_el = child_el; let statement_name = get_attribute(statement_el, "name").unwrap(); let statement_body = StatementBody::new(get_first_child_element(statement_el)); block.statements.insert(statement_name, statement_body); }, "field" => { let field_el = child_el; let field_name = get_attribute(field_el, "name").unwrap(); let field_value = FieldValue::new(field_el); block.fields.insert(field_name, field_value); }, _ => {} } } } block } } impl FieldValue { fn new(field_el: Element) -> Self { for child in field_el.children().iter() { match child { &ChildOfElement::Text(text_node) => { let value = text_node.text().to_string(); return FieldValue::SimpleField(value); }, _ => panic!("TODO: Implement expression fields") } } panic!("Expected child nodes for field"); } } pub fn program_from_xml(xml: &str) -> Program { let mut program = Program::new(); let package: Package = parser::parse(xml).expect("Failed to parse XML!"); let document: Document = package.as_document(); let xml_element = get_xml_element(document).expect("Failed to find XML element!"); for child in xml_element.children().iter() { if let &ChildOfElement::Element(el) = child { let element_name = el.name().local_part(); match element_name { "block" => { program.groups.push(StatementBody::new(Some(el))); }, _ => {} } } } program } fn get_next_block_element<'b>(block_el: &Element<'b>) -> Option<Element<'b>> { let next_el: Option<Element> = block_el.children() .iter() .filter_map(|child| { if let &ChildOfElement::Element(el) = child { if el.name().local_part() == "next" { return Some(el); } } None }) .next(); if let Some(next_el) = next_el { let next_block_el: Option<Element> = next_el.children() .iter() .filter_map(|&child| { if let ChildOfElement::Element(el) = child { if el.name().local_part() == "block" { return Some(el); } } None }) .next(); return next_block_el; } None } fn get_xml_element(document: Document) -> Option<Element> { document.root() .children() .iter() .filter_map(|child| { if let &ChildOfRoot::Element(el) = child { if el.name().local_part() == "xml" { return Some(el); } } None }) .next() } fn get_first_child_element(element: Element) -> Option<Element> { element.children() .iter() .filter_map(|child| { if let &ChildOfElement::Element(el) = child { return Some(el); } None }) .next() } fn get_attribute(element: Element, attribute_name: &str) -> Option<String> { element.attributes() .iter() .filter_map(|attribute| { let name = attribute.name().local_part(); if name == attribute_name { let value = attribute.value().to_string(); return Some(value); } None }) .next() } #[cfg(test)] mod test { use super::*; fn get_fragment_root(package: &Package) -> Option<Element> { package.as_document() .root() .children() .iter() .filter_map(|child| { if let &ChildOfRoot::Element(el) = child { return Some(el); } None }) .next() } #[test] fn test_new_block() { let xml: &str = r#" <block type="inner_loop" id="]Lb|t?wfd#;s)[llJx8Y"> <field name="COUNT">3</field> <statement name="BODY"> </statement> </block> "#; let fragment: Package = parser::parse(xml).expect("Failed to parse XML!"); let root_element = get_fragment_root(&fragment).unwrap(); let block = Block::new(root_element); assert_eq!(block.block_type, "inner_loop"); assert_eq!(block.id, "]Lb|t?wfd#;s)[llJx8Y"); let count_field = block.fields.get("COUNT"); assert!(count_field.is_some()); assert_eq!(count_field.unwrap(), &FieldValue::SimpleField("3".to_string())); } #[test] fn test_get_next_block_element() { let xml: &str = r#" <block type="led_on" id="^3xb.m4E9i0;3$R10(=5"> <field name="TIME">300</field> <next> <block type="led_off" id="HX4*sB9=gbJtq$Y{ke6b"> <field name="TIME">100</field> </block> </next> </block> "#; let fragment: Package = parser::parse(xml).expect("Failed to parse XML!"); let root_element = get_fragment_root(&fragment).unwrap(); let next_block = get_next_block_element(&root_element); assert!(next_block.is_some()); let next_block_unwrapped = next_block.unwrap(); assert_eq!(get_attribute(next_block_unwrapped, "type"), Some("led_off".to_string())); assert_eq!(get_attribute(next_block_unwrapped, "id"), Some("HX4*sB9=gbJtq$Y{ke6b".to_string())); } #[test] fn test_program_from_xml_advanced() { let xml: &str = r#" <xml xmlns="http://www.w3.org/1999/xhtml"> <variables></variables> <block type="main_loop" id="[.)/fqUYv92(mzb{?:~u" deletable="false" movable="false" x="50" y="50"> <statement name="BODY"> <block type="inner_loop" id="]Lb|t?wfd#;s)[llJx8Y"> <field name="COUNT">3</field> <statement name="BODY"> <block type="led_on" id="^3xb.m4E9i0;3$R10(=5"> <field name="TIME">300</field> <next> <block type="led_off" id="HX4*sB9=gbJtq$Y{ke6b"> <field name="TIME">100</field> </block> </next> </block> </statement> <next> <block type="led_on" id="kB~f~7W`wkGa0i4z3mHw"> <field name="TIME">100</field> <next> <block type="led_off" id="$fdlZB)btzA8YtB/!xz`"> <field name="TIME">100</field> </block> </next> </block> </next> </block> </statement> </block> </xml> "#; let program: Program = program_from_xml(xml); assert_eq!(program.groups.len(), 1); let group = program.groups.get(0).unwrap(); assert_eq!(group.blocks.len(), 1); let main_loop_block = group.blocks.get(0).unwrap(); assert_eq!(main_loop_block.block_type, "main_loop"); assert_eq!(main_loop_block.id, "[.)/fqUYv92(mzb{?:~u"); let main_loop_statements = &main_loop_block.statements; assert_eq!(main_loop_statements.len(), 1); assert!(main_loop_statements.contains_key("BODY")); let main_loop_body = main_loop_statements.get("BODY"); let main_loop_body_statement = main_loop_body.as_ref().unwrap(); assert_eq!(main_loop_body_statement.blocks.len(), 3); let inner_loop_block = main_loop_body_statement.blocks.get(0).unwrap(); assert_eq!(inner_loop_block.block_type, "inner_loop"); assert_eq!(inner_loop_block.id, "]Lb|t?wfd#;s)[llJx8Y"); assert_eq!(inner_loop_block.fields.get("COUNT"), Some(&FieldValue::SimpleField("3".to_string()))); let inner_loop_statement_maybe = inner_loop_block.statements.get("BODY"); assert!(inner_loop_statement_maybe.is_some()); let inner_loop_statement = inner_loop_statement_maybe.unwrap(); assert_eq!(inner_loop_statement.blocks.len(), 2); let led_on_block = inner_loop_statement.blocks.get(0).unwrap(); assert_eq!(led_on_block.block_type, "led_on"); assert_eq!(led_on_block.id, "^3xb.m4E9i0;3$R10(=5"); assert_eq!(led_on_block.fields.get("TIME"), Some(&FieldValue::SimpleField("300".to_string()))); let led_off_block = inner_loop_statement.blocks.get(1).unwrap(); assert_eq!(led_off_block.block_type, "led_off"); assert_eq!(led_off_block.id, "HX4*sB9=gbJtq$Y{ke6b"); } }
extern crate sxd_document; use std::collections::HashMap; use sxd_document::{ parser, Package, }; use sxd_document::dom::{ Document, ChildOfRoot, Element, ChildOfElement, }; #[derive(Debug)] pub struct Program { pub groups: Vec<StatementBody> } #[derive(PartialEq, Debug)] pub struct StatementBody { pub blocks: Vec<Block> } #[derive(PartialEq, Debug)] pub struct Block { pub block_type: String, pub id: String, pub fields: HashMap<String, FieldValue>, pub statements: HashMap<String, StatementBody>, } #[derive(PartialEq, Debug)] pub enum FieldValue { SimpleField(String), ExpressionField(Block), } impl Program { pub fn new() -> Self { Self { groups: Vec::new() } } } impl StatementBody { fn new(first_block: Option<Element>) -> Self { let mut blocks = Vec::new(); if let Some(el) = first_block { let mut block_el: Element; block_el = el; loop { blocks.push(Block::new(block_el)); if let Some(next_block) = get_next_block_element(&block_el) { block_el = next_block; } else { break; } } } Self { blocks } } } impl Block { fn new(block_el: Element) -> Self { let mut block = Self { block_type: "".to_string(), id: "".to_string(), fields: HashMap::new(), statements: HashMap::new() }; for attribute in block_el.attributes().iter() { let name = attribute.name().local_part(); let value = attribute.value().to_string(); match name { "type" => { block.block_type = value; }, "id" => { block.id = value; }, _ => {} } } for child in block_el.children().iter() { if let &ChildOfElement::Element(child_el) = child { let child_name = child_el.name().local_part(); match child_name { "statement" => { let statement_el = child_el; let statement_name = get_attribute(statement_el, "name").unwrap(); let statement_body = StatementBody::new(get_first_child_element(statement_el)); block.statements.insert(statement_name, statement_body); }, "field" => { let field_el = child_el; let field_name = get_attribute(field_el, "name").unwrap(); let field_value = FieldValue::new(field_el); block.fields.insert(field_name, field_value); }, _ => {} } } } block } } impl FieldValue { fn new(field_el: Element) -> Self { for child in field_el.children().iter() { match child { &ChildOfElement::Text(text_node) => { let value = text_node.text().to_string(); return FieldValue::SimpleField(value); }, _ => panic!("TODO: Implement expression fields") } } panic!("Expected child nodes for field"); } } pub fn program_from_xml(xml: &str) -> Program { let mut program = Program::new(); let package: Package = parser::parse(xml).expect("Failed to parse XML!"); let document: Document = package.as_document(); let xml_element = get_xml_element(document).expect("Failed to find XML element!"); for child in xml_element.children().iter() { if let &ChildOfElement::Element(el) = child { let element_name = el.name().local_part(); match element_name { "block" => { program.groups.push(StatementBody::new(Some(el))); }, _ => {} } } } program } fn get_next_block_element<'b>(block_el: &Element<'b>) -> Option<Element<'b>> { let next_el: Option<Element> = block_el.children() .iter() .filter_map(|child| { if let &ChildOfElement::Element(el) = child { if el.name().local_part() == "next" { return Some(el); } } None }) .next(); if let Some(next_el) = next_el { let next_block_el: Option<Element> = next_el.children() .iter() .filter_map(|&child| { if let ChildOfElement::Element(el) = child { if el.name().local_part() == "block" { return Some(el); } } None }) .next(); return next_block_el; } None } fn get_xml_element(document: Document) -> Option<Element> { document.root() .children() .iter() .filter_map(|child| { if let &ChildOfRoot::Element(el) = child { if el.name().local_part() == "xml" { return Some(el); } } None }) .next() } fn get_first_child_element(element: Element) -> Option<Element> { element.children() .iter() .filter_map(|child| { if let &ChildOfElement::Element(el) = child { return Some(el); } None }) .next() } fn get_attribute(element: Element, attribute_name: &str) -> Option<String> { element.attributes() .iter() .filter_map(|attribute| { let name = attribute.name().local_part(); if name == attribute_name { let value = attribute.value().to_string(); return Some(value); } None }) .next() } #[cfg(test)] mod test { use super::*; fn get_fragment_root(package: &Package) -> Option<Element> { package.as_document() .root() .children() .iter() .filter_map(|child| { if let &ChildOfRoot::Element(el) = child { return Some(el); } None }) .next() } #[test] fn test_new_block() { let xml: &str = r#" <block type="inner_loop" id="]Lb|t?wfd#;s)[llJx8Y"> <field name="COUNT">3</field> <statement name="BODY"> </statement> </block> "#; let fragment: Package = parser::parse(xml).expect("Failed to parse XML!"); let root_element = get_fragment_root(&fragment).unwrap(); let block = Block::new(root_element); assert_eq!(block.block_type, "inner_loop"); assert_eq!(block.id, "]Lb|t?wfd#;s)[llJx8Y"); let count_field = block.fields.get("COUNT"); assert!(count_field.is_some()); assert_eq!(count_field.unwrap(), &FieldValue::SimpleField("3".to_string())); } #[test] fn test_get_next_block_element() { let xml: &str = r#" <block type="led_on" id="^3xb.m4E9i0;3$R10(=5"> <field name="TIME">300</field> <next> <block type="led_off" id="HX4*sB9=gbJtq$Y{ke6b"> <field name="TIME">100</field> </block> </next> </block> "#; let fragment: Package = parser::parse(xml).expect("Failed to parse XML!"); let root_element = get_fragment_root(&fragment).unwrap(); let next_block = get_next_block_element(&root_element); assert!(next_block.is_some()); let next_block_unwrapped = next_block.unwrap(); assert_eq!(get_attribute(next_block_unwrapped, "type"), Some("led_off".to_string())); assert_eq!(get_attribute(next_block_unwrapped, "id"), Some("HX4*sB9=gbJtq$Y{ke6b".to_string())); } #[test] fn test_program_from_xml_advanced() { let xml: &str = r#" <xml xmlns="http://www.w3.org/1999/xhtml"> <variables></variables> <block type="main_loop" id="[.)/fqUYv92(mzb{?:~u" deletable="false" movable="false" x="50" y="50"> <statement name="BODY"> <block type="inner_loop" id="]Lb|t?wfd#;s)[llJx8Y"> <field name="COUNT">3</field> <statement name="BODY"> <block type="led_on" id="^3xb.m4E9i0;3$R10(=5"> <field name="TIME">300</field> <next> <block type="led_off" id="HX4*sB9=gbJtq$Y{ke6b"> <field name="TIME">100</field> </block> </next> </block> </statement> <next> <block type="led_on" id="kB~f~7W`wkGa0i4z3mHw"> <field name="TIME">100</field> <next> <block type="led_off" id="$fdlZB)btzA8YtB/!xz`"> <
}
field name="TIME">100</field> </block> </next> </block> </next> </block> </statement> </block> </xml> "#; let program: Program = program_from_xml(xml); assert_eq!(program.groups.len(), 1); let group = program.groups.get(0).unwrap(); assert_eq!(group.blocks.len(), 1); let main_loop_block = group.blocks.get(0).unwrap(); assert_eq!(main_loop_block.block_type, "main_loop"); assert_eq!(main_loop_block.id, "[.)/fqUYv92(mzb{?:~u"); let main_loop_statements = &main_loop_block.statements; assert_eq!(main_loop_statements.len(), 1); assert!(main_loop_statements.contains_key("BODY")); let main_loop_body = main_loop_statements.get("BODY"); let main_loop_body_statement = main_loop_body.as_ref().unwrap(); assert_eq!(main_loop_body_statement.blocks.len(), 3); let inner_loop_block = main_loop_body_statement.blocks.get(0).unwrap(); assert_eq!(inner_loop_block.block_type, "inner_loop"); assert_eq!(inner_loop_block.id, "]Lb|t?wfd#;s)[llJx8Y"); assert_eq!(inner_loop_block.fields.get("COUNT"), Some(&FieldValue::SimpleField("3".to_string()))); let inner_loop_statement_maybe = inner_loop_block.statements.get("BODY"); assert!(inner_loop_statement_maybe.is_some()); let inner_loop_statement = inner_loop_statement_maybe.unwrap(); assert_eq!(inner_loop_statement.blocks.len(), 2); let led_on_block = inner_loop_statement.blocks.get(0).unwrap(); assert_eq!(led_on_block.block_type, "led_on"); assert_eq!(led_on_block.id, "^3xb.m4E9i0;3$R10(=5"); assert_eq!(led_on_block.fields.get("TIME"), Some(&FieldValue::SimpleField("300".to_string()))); let led_off_block = inner_loop_statement.blocks.get(1).unwrap(); assert_eq!(led_off_block.block_type, "led_off"); assert_eq!(led_off_block.id, "HX4*sB9=gbJtq$Y{ke6b"); }
function_block-function_prefix_line
[ { "content": "# Blockly Parser\n\n\n\nDeserialize XML generated by the [Google Blockly](https://developers.google.com/blockly/) UI into data structures.\n\n\n\n## Example usage:\n\n\n\n```rust\n\nextern crate blockly_parser;\n\n\n\nuse blockly_parser::{\n\n Program,\n\n StatementBody,\n\n Block,\n\n ...
Rust
src/alpha_bleed.rs
Vorlias/tarmac
dd3e436be29ae5161a1a74321d3f352b65858a71
use std::collections::VecDeque; use image::{DynamicImage, GenericImage, GenericImageView, Rgba}; pub(crate) fn alpha_bleed(img: &mut DynamicImage) { let (w, h) = img.dimensions(); let mut can_be_sampled = Mask2::new(w, h); let mut visited = Mask2::new(w, h); let mut to_visit = VecDeque::new(); let adjacent_positions = |x, y| { DIRECTIONS.iter().filter_map(move |(x_offset, y_offset)| { let x_source = (x as i32) + x_offset; let y_source = (y as i32) + y_offset; if x_source < 0 || y_source < 0 || x_source >= w as i32 || y_source >= h as i32 { return None; } Some((x_source as u32, y_source as u32)) }) }; for y in 0..h { for x in 0..w { let pixel = img.get_pixel(x, y); if pixel[3] != 0 { can_be_sampled.set(x, y); visited.set(x, y); continue; } let borders_opaque = adjacent_positions(x, y).any(|(x_source, y_source)| { let source = img.get_pixel(x_source, y_source); source[3] != 0 }); if borders_opaque { visited.set(x, y); to_visit.push_back((x, y)); } } } while let Some((x, y)) = to_visit.pop_front() { let mut new_color = (0, 0, 0); let mut contributing = 0; for (x_source, y_source) in adjacent_positions(x, y) { if can_be_sampled.get(x_source, y_source) { let source = img.get_pixel(x_source, y_source); contributing += 1; new_color.0 += source[0] as u16; new_color.1 += source[1] as u16; new_color.2 += source[2] as u16; } else if !visited.get(x_source, y_source) { visited.set(x_source, y_source); to_visit.push_back((x_source, y_source)); } } let pixel = Rgba([ (new_color.0 / contributing) as u8, (new_color.1 / contributing) as u8, (new_color.2 / contributing) as u8, 0, ]); img.put_pixel(x, y, pixel); can_be_sampled.set(x, y); } } const DIRECTIONS: &[(i32, i32)] = &[ (1, 0), (1, 1), (0, 1), (-1, 1), (-1, 0), (-1, -1), (0, -1), (1, -1), ]; struct Mask2 { size: (u32, u32), data: Vec<bool>, } impl Mask2 { fn new(w: u32, h: u32) -> Self { Self { size: (w, h), data: vec![false; (w * h) as usize], } } fn get(&self, x: u32, y: u32) -> bool { let index = x + y * self.size.0; self.data[index as usize] } fn set(&mut self, x: u32, y: u32) { let index = x + y * self.size.0; self.data[index as usize] = true; } }
use std::collections::VecDeque; use image::{DynamicImage, GenericImage, GenericImageView, Rgba}; pub(crate) fn alpha_bleed(img: &mut DynamicImage) { let (w, h) = img.dimensions(); let mut can_be_sampled = Mask2::new(w, h); let mut visited = Mask2::new(w, h); let mut to_visit = VecDeque::new(); let adjacent_positions = |x, y| { DIRECTIONS.iter().filter_map(move |(x_offset, y_offset)| { let x_source = (x as i32) + x_offset; let y_source = (y as i32) + y_offset; if x_source < 0 || y_source < 0 || x_source >= w as i32 || y_source >= h as i32 { return None; } Some((x_source as u32, y_source as u32)) }) }; for y in 0..h { for x in 0..w { let pixel = img.get_pixel(x, y); if pixel[3] != 0 { can_be_sampled.set(x, y); visited.set(x, y); continue; } let borders_opaque = adjacent_positions(x, y).any(|(x_source, y_source)| { let source = img.get_pixel(x_source, y_source); source[3] != 0 }); if borders_opaque {
data: vec![false; (w * h) as usize], } } fn get(&self, x: u32, y: u32) -> bool { let index = x + y * self.size.0; self.data[index as usize] } fn set(&mut self, x: u32, y: u32) { let index = x + y * self.size.0; self.data[index as usize] = true; } }
visited.set(x, y); to_visit.push_back((x, y)); } } } while let Some((x, y)) = to_visit.pop_front() { let mut new_color = (0, 0, 0); let mut contributing = 0; for (x_source, y_source) in adjacent_positions(x, y) { if can_be_sampled.get(x_source, y_source) { let source = img.get_pixel(x_source, y_source); contributing += 1; new_color.0 += source[0] as u16; new_color.1 += source[1] as u16; new_color.2 += source[2] as u16; } else if !visited.get(x_source, y_source) { visited.set(x_source, y_source); to_visit.push_back((x_source, y_source)); } } let pixel = Rgba([ (new_color.0 / contributing) as u8, (new_color.1 / contributing) as u8, (new_color.2 / contributing) as u8, 0, ]); img.put_pixel(x, y, pixel); can_be_sampled.set(x, y); } } const DIRECTIONS: &[(i32, i32)] = &[ (1, 0), (1, 1), (0, 1), (-1, 1), (-1, 0), (-1, -1), (0, -1), (1, -1), ]; struct Mask2 { size: (u32, u32), data: Vec<bool>, } impl Mask2 { fn new(w: u32, h: u32) -> Self { Self { size: (w, h),
random
[ { "content": "fn default_max_spritesheet_size() -> (u32, u32) {\n\n (1024, 1024)\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\n#[serde(deny_unknown_fields, rename_all = \"kebab-case\")]\n\npub struct InputConfig {\n\n /// A glob that will match all files that should be consider...
Rust
web_ui/src/pages/preview_2d.rs
jacobh/roller
4640ac9e2feec6aa211ba0b47b77f287a439f67b
use im_rc::HashMap; use itertools::Itertools; use yew::prelude::*; use roller_protocol::fixture::{FixtureId, FixtureParams, FixtureState}; use crate::pure::{Pure, PureComponent}; fn sorted_unique<T>(items: impl Iterator<Item = T>) -> Vec<T> where T: PartialOrd + PartialEq, { items.fold(Vec::with_capacity(8), |mut output, a| { for (i, b) in output.iter().enumerate() { if &a == b { return output; } else if b > &a { output.insert(i, a); return output; } } output.push(a); output }) } fn find_index<T>(vec: &Vec<T>, item: &T) -> Option<usize> where T: PartialEq, { vec.iter() .enumerate() .filter_map(|(i, x)| if x == item { Some(i) } else { None }) .nth(0) } struct FixtureRef<'a> { id: &'a FixtureId, params: &'a FixtureParams, state: &'a FixtureState, } impl<'a> From<(&'a FixtureId, &'a FixtureParams, &'a FixtureState)> for FixtureRef<'a> { fn from( (id, params, state): (&'a FixtureId, &'a FixtureParams, &'a FixtureState), ) -> FixtureRef<'a> { FixtureRef { id, params, state } } } pub type Preview2dPage = Pure<PurePreview2dPage>; #[derive(Properties, Clone, PartialEq)] pub struct PurePreview2dPage { pub fixture_states: HashMap<FixtureId, (FixtureParams, Option<FixtureState>)>, } impl PureComponent for PurePreview2dPage { fn render(&self) -> Html { let fixtures: Vec<FixtureRef<'_>> = self .fixture_states .iter() .filter_map(|(fixture_id, (params, state))| match state { Some(state) => Some((fixture_id, params, state)), None => None, }) .map(FixtureRef::from) .collect(); let sorted_rows: Vec<isize> = sorted_unique( fixtures .iter() .filter_map(|fixture| fixture.params.location.as_ref()) .map(|location| location.y), ); let sorted_columns: Vec<isize> = sorted_unique( fixtures .iter() .filter_map(|fixture| fixture.params.location.as_ref()) .map(|location| location.x) .unique(), ); let fixture_grid: Vec<Vec<Vec<FixtureRef<'_>>>> = { let mut grid: Vec<Vec<_>> = (0..sorted_rows.len()) .map(|_row_idx| { (0..sorted_columns.len()) .map(|_col_idx| Vec::with_capacity(1)) .collect() }) .collect(); for fixture in fixtures.into_iter() { if let Some(location) = fixture.params.location.as_ref() { let row_idx = find_index(&sorted_rows, &location.y).unwrap(); let col_idx = find_index(&sorted_columns, &location.x).unwrap(); grid[row_idx][col_idx].push(fixture); } } grid }; html! { <div class="page-contents"> <h2>{"Fixtures"}</h2> <div> {fixture_grid.iter().rev().map(|row| html! { <div class="preview__row"> {row.iter().map(|column| if let Some(fixture) = column.first() { html! { <PreviewCell fixture_state={fixture.state.clone()}/> } } else { html! { <div class="preview__cell"></div> } } ).collect::<Html>()} </div> }).collect::<Html>()} </div> </div> } } } pub type PreviewCell = Pure<PurePreviewCell>; #[derive(Properties, Clone, PartialEq)] pub struct PurePreviewCell { pub fixture_state: FixtureState, } impl PureComponent for PurePreviewCell { fn render(&self) -> Html { let beam = &self.fixture_state.beams[0]; let color = beam.color.unwrap_or((0.0, 0.0, 0.0)); let opacity = self.fixture_state.dimmer * beam.dimmer; let fill_style = format!( "background-color: rgb({}, {}, {}); opacity: {};", color.0 * 255.0, color.1 * 255.0, color.2 * 255.0, opacity ); html! { <div class="preview__cell preview__cell--active"> <div class="preview__cell-fill" style={fill_style}/> </div> } } }
use im_rc::HashMap; use itertools::Itertools; use yew::prelude::*; use roller_protocol::fixture::{FixtureId, FixtureParams, FixtureState}; use crate::pure::{Pure, PureComponent}; fn sorted_unique<T>(items: impl Iterator<Item = T>) -> Vec<T> where T: PartialOrd + PartialEq, { items.fold(Vec::with_capacity(8), |mut output, a| { for (i, b) in output.iter().enumerate() { if &a == b { return output; } else if b > &a { output.insert(i, a); return output; } } output.push(a); output }) } fn find_index<T>(vec: &Vec<T>, item: &T) -> Option<usize> where T: PartialEq, { vec.iter() .enumerate() .filter_map(|(i, x)| if x == item { Some(i) } else { None }) .nth(0) } struct FixtureRef<'a> { id: &'a FixtureId, params: &'a FixtureParams, state: &'a FixtureState, } impl<'a> From<(&'a FixtureId, &'a FixtureParams, &'a FixtureState)> for FixtureRef<'a> { fn from( (id, params, state): (&'a FixtureId, &'a FixtureParams, &'a FixtureState), ) -> FixtureRef<'a> { FixtureRef { id, params, state } } } pub type Preview2dPage = Pure<PurePreview2dPage>; #[derive(Properties, Clone, PartialEq)] pub struct PurePreview2dPage { pub fixture_states: HashMap<FixtureId, (FixtureParams, Option<FixtureState>)>, } impl PureComponent for PurePreview2dPage { fn render(&self) -> Html { let fixtures: Vec<FixtureRef<'_>> = self .fixture_states .iter() .filter_map(|(fixture_id, (params, state))| match state { Some(state) => Some((fixture_id, params, state)), None => None, }) .map(FixtureRef::from) .collect(); let sorted_rows: Vec<isize> = sorted_unique( fixtures
}) .collect(); for fixture in fixtures.into_iter() { if let Some(location) = fixture.params.location.as_ref() { let row_idx = find_index(&sorted_rows, &location.y).unwrap(); let col_idx = find_index(&sorted_columns, &location.x).unwrap(); grid[row_idx][col_idx].push(fixture); } } grid }; html! { <div class="page-contents"> <h2>{"Fixtures"}</h2> <div> {fixture_grid.iter().rev().map(|row| html! { <div class="preview__row"> {row.iter().map(|column| if let Some(fixture) = column.first() { html! { <PreviewCell fixture_state={fixture.state.clone()}/> } } else { html! { <div class="preview__cell"></div> } } ).collect::<Html>()} </div> }).collect::<Html>()} </div> </div> } } } pub type PreviewCell = Pure<PurePreviewCell>; #[derive(Properties, Clone, PartialEq)] pub struct PurePreviewCell { pub fixture_state: FixtureState, } impl PureComponent for PurePreviewCell { fn render(&self) -> Html { let beam = &self.fixture_state.beams[0]; let color = beam.color.unwrap_or((0.0, 0.0, 0.0)); let opacity = self.fixture_state.dimmer * beam.dimmer; let fill_style = format!( "background-color: rgb({}, {}, {}); opacity: {};", color.0 * 255.0, color.1 * 255.0, color.2 * 255.0, opacity ); html! { <div class="preview__cell preview__cell--active"> <div class="preview__cell-fill" style={fill_style}/> </div> } } }
.iter() .filter_map(|fixture| fixture.params.location.as_ref()) .map(|location| location.y), ); let sorted_columns: Vec<isize> = sorted_unique( fixtures .iter() .filter_map(|fixture| fixture.params.location.as_ref()) .map(|location| location.x) .unique(), ); let fixture_grid: Vec<Vec<Vec<FixtureRef<'_>>>> = { let mut grid: Vec<Vec<_>> = (0..sorted_rows.len()) .map(|_row_idx| { (0..sorted_columns.len()) .map(|_col_idx| Vec::with_capacity(1)) .collect()
function_block-random_span
[]
Rust
src/interface/cli.rs
TomRegan/roots
7d8ed6675878c6ddcecc18b19cdfca08983b96ef
use { clap::{App, AppSettings, Arg, SubCommand}, database::query::{list_fields, list_titles}, std::path::Path, }; use application::book::Book; use internet::metadata; use crate::application::command::Command; use crate::configuration::Configuration; pub struct Application { cfg: Configuration, } impl Application { pub fn new(cfg: Configuration) -> Self { Application { cfg } } pub fn run(self) -> Result<(), ()> { let cmd: Command = parse_command_line(); handle_command(self.cfg, cmd) } } fn handle_command(cfg: Configuration, cmd: Command) -> Result<(), ()> { match cmd { Command::Config { .. } => handle_config_command(cfg, cmd), Command::Fields => handle_fields_command(cfg, cmd), Command::Find { .. } => Ok(()), Command::Import { .. } => handle_import_command(cfg, cmd), Command::Info { .. } => handle_info_command(cfg, cmd), Command::List { .. } => handle_list_command(cfg, cmd), Command::Update => handle_update_command(cfg, cmd), } } fn handle_config_command(cfg: Configuration, cmd: Command) -> Result<(), ()> { match cmd { Command::Config { path: true, default: false, } => { println!("{}", cfg.get_source()); Ok(()) } Command::Config { path: false, default: true, } => { println!("{}", &Configuration::default()); Ok(()) } Command::Config { path: false, default: false, } => { println!("{}", cfg); Ok(()) } _ => Err(()), } } fn handle_fields_command(_cfg: Configuration, cmd: Command) -> Result<(), ()> { match cmd { Command::Fields => { let available_fields = list_fields(); if available_fields.is_empty() { println!("No available fields, is roots initialised?") } else { for f in available_fields { println!("{}", f); } } Ok(()) } _ => Err(()), } } fn handle_import_command(_cfg: Configuration, _cmd: Command) -> Result<(), ()> { println!("No files found to import."); Ok(()) } fn handle_info_command(_cfg: Configuration, cmd: Command) -> Result<(), ()> { match cmd { Command::Info { path, fetch } => { let book = Book::new(Path::new(&path)); if fetch { let result = metadata::request(&book); let volumes = result.map(|r| r.items).unwrap(); let books = volumes.iter().map(Book::from).collect::<Vec<Book>>(); println!("{:#?}", books); } println!("{:#?}", book); Ok(()) } _ => Err(()), } } fn handle_list_command(_cfg: Configuration, cmd: Command) -> Result<(), ()> { match cmd { Command::List { .. } => { let available_titles = list_titles(); if available_titles.is_empty() { println!("No titles to list, is roots initialised?"); } else { for t in available_titles { println!("{}", t); } } Ok(()) } _ => Err(()), } } fn handle_update_command(_cfg: Configuration, cmd: Command) -> Result<(), ()> { match cmd { Command::Update => { let available_titles = list_titles(); if available_titles.is_empty() { println!("No titles found, is roots initialised?"); } Ok(()) } _ => Err(()), } } fn parse_command_line() -> Command { let matches = App::new("root") .bin_name("root") .version(crate_version!()) .version_short("v") .about("roots e-book manager") .setting(AppSettings::VersionlessSubcommands) .setting(AppSettings::SubcommandRequiredElseHelp) .subcommand( SubCommand::with_name("config") .about("Shows the configuration") .arg( Arg::with_name("path") .short("p") .long("path") .conflicts_with("default") .help("Display the configuration file path"), ) .arg( Arg::with_name("default") .short("d") .long("default") .conflicts_with("path") .help("Display configuration defaults"), ), ) .subcommand( SubCommand::with_name("fields") .about("Shows fields that can be used in queries"), ).subcommand( SubCommand::with_name("find") .about("Find metadata online") .arg( Arg::with_name("scores") .short("s") .long("show-scores") .help("Show multiple results ranked by score"), ), ) .subcommand( SubCommand::with_name("import") .about("Imports new e-books") .usage( "root import <path> EXAMPLES: root import ~/Downloads/ -> imports books from ~/Downloads/", ) .arg( Arg::with_name("path") .help("Path to directory containing e-books") .required(true), ), ) .subcommand( SubCommand::with_name("info") .about("Display information for a file") .usage( "root info <path> EXAMPLES: root info file.epub -> displays information for 'file.epub'", ) .arg( Arg::with_name("path") .help("Path to e-book file") .required(true), ) .arg( Arg::with_name("fetch") .short("f") .long("fetch") .help("Fetches missing information from the web"), ), ) .subcommand( SubCommand::with_name("list") .about("Queries the library") .usage( "root list [FLAGS] EXAMPLES: root list author:forster -> All titles by Forster root list --author howards end -> All authors of matching titles root list --isbn -> All known titles with ISBNs", ) .arg( Arg::with_name("author") .short("a") .long("author") .help("Show a list of matching authors"), ) .arg( Arg::with_name("isbn") .short("i") .long("isbn") .help("Show the ISBN number of each title"), ) .arg( Arg::with_name("table") .short("t") .long("table") .help("Print the matches in a table"), ), ) .subcommand(SubCommand::with_name("update").about("Updates the library")) .get_matches(); match matches.subcommand() { ("config", Some(config)) => Command::Config { path: config.is_present("path"), default: config.is_present("default"), }, ("fields", _) => Command::Fields, ("find", Some(find)) => Command::Find { show_scores: find.is_present("scores") }, ("import", Some(import)) => Command::Import { path: import.value_of("path").map(|v| String::from(v)).unwrap(), }, ("info", Some(info)) => Command::Info { path: info.value_of("path").unwrap().to_string(), fetch: info.is_present("fetch"), }, ("list", Some(list)) => Command::List { author: list.is_present("author"), isbn: list.is_present("isbn"), table: list.is_present("table"), }, ("update", _) => Command::Update, _ => unreachable!(), } } #[cfg(test)] mod tests { extern crate assert_cmd; use std::process::Command; use interface::cli::tests::assert_cmd::prelude::*; #[test] fn find_returns_successfully() { let mut cmd = Command::cargo_bin("roots").unwrap(); cmd.arg("find"); let assert = cmd.assert(); assert.success().code(0); } #[test] fn default_config_path_is_displayed() { let mut cmd = Command::cargo_bin("roots").unwrap(); cmd.arg("config").arg("--path"); let assert = cmd.assert(); assert.success().code(0); } #[test] fn info_returns_successfully() { let mut cmd = Command::cargo_bin("roots").unwrap(); cmd.arg("info").arg("var/cache/pg98.mobi"); let assert = cmd.assert(); assert.success().code(0); } #[test] fn info_fails_missing_path() { let mut cmd = Command::cargo_bin("roots").unwrap(); cmd.arg("info"); let assert = cmd.assert(); assert.failure().code(1); } #[test] fn default_and_path_flags_conflict() { let mut cmd = Command::cargo_bin("roots").unwrap(); cmd.arg("config").arg("--path").arg("--default"); let assert = cmd.assert(); assert.failure().code(1); } #[test] fn fields_handles_no_database() { let mut cmd = Command::cargo_bin("roots").unwrap(); cmd.arg("fields"); let assert = cmd.assert(); assert .success() .stdout("No available fields, is roots initialised?\n") .code(0); } #[test] fn list_handles_no_database() { let assert = Command::cargo_bin("roots").unwrap().arg("list").assert(); assert .success() .stdout("No titles to list, is roots initialised?\n") .code(0); } #[test] fn update_handles_no_database() { let assert = Command::cargo_bin("roots").unwrap().arg("update").assert(); assert .success() .stdout("No titles found, is roots initialised?\n") .code(0); } #[test] fn import_returns_successfully() { let assert = Command::cargo_bin("roots").unwrap().arg("import").arg(".").assert(); assert.success() .stdout("No files found to import.\n") .code(0); } }
use { clap::{App, AppSettings, Arg, SubCommand}, database::query::{list_fields, list_titles}, std::path::Path, }; use application::book::Book; use internet::metadata; use crate::application::command::Command; use crate::configuration::Configuration; pub struct Application { cfg: Configuration, } impl Application { pub fn new(cfg: Configuration) -> Self { Application { cfg } } pub fn run(self) -> Result<(), ()> { let cmd: Command = parse_command_line(); handle_command(self.cfg, cmd) } } fn handle_command(cfg: Configuration, cmd: Command) -> Result<(), ()> { match cmd { Command::Config { .. } => handle_config_command(cfg, cmd), Command::Fields => handle_fields_command(cfg, cmd), Command::Find { .. } => Ok(()), Command::Import { .. } => handle_import_command(cfg, cmd), Command::Info { .. } => handle_info_command(cfg, cmd), Command::List { .. } => handle_list_command(cfg, cmd), Command::Update => handle_update_command(cfg, cmd), } } fn handle_config_command(cfg: Configuration, cmd: Command) -> Result<(), ()> { match cmd { Command::Config { path: true, default: false, } => { println!("{}", cfg.get_source()); Ok(()) } Command::Config { path: false, default: true, } => { println!("{}", &Configuration::default()); Ok(()) } Command::Config { path: false, default: false, } => { println!("{}", cfg); Ok(()) } _ => Err(()), } } fn handle_fields_command(_cfg: Configuration, cmd: Command) -> Result<(), ()> { match cmd { Command::Fields => { let available_fields = list_fields(); if available_fields.is_empty() { println!("No available fields, is roots initialised?") } else { for f in available_fields { println!("{}", f); } } Ok(()) } _ => Err(()), } } fn handle_import_command(_cfg: Configuration, _cmd: Command) -> Result<(), ()> { println!("No files found to import."); Ok(()) } fn handle_info_command(_cfg: Configuration, cmd: Command) -> Result<(), ()> { match cmd { Command::Info { path, fetch } => { let book = Book::new(Path::new(&path)); if fetch { let result = metadata::request(&book); let volumes = result.map(|r| r.items).unwrap(); let books = volumes.iter().map(Book::from).collect::<Vec<Book>>(); println!("{:#?}", books); } println!("{:#?}", book); Ok(()) } _ => Err(()), } } fn handle_list_command(_cfg: Configuration, cmd: Command) -> Result<(), ()> { match cmd { Command::List { .. } => { let available_titles = list_titles(); if available_titles.is_empty() { println!("No titles to list, is roots initialised?"); } else { for t in available_titles { println!("{}", t); } } Ok(()) } _ => Err(()), } } fn handle_update_command(_cfg: Configuration, cmd: Command) -> Result<(), ()> { match cmd { Command::Update => { let available_titles = list_titles(); if available_titles.is_empty() { println!("No titles found, is roots initialised?"); } Ok(()) } _ => Err(()), } } fn parse_command_line() -> Command { let matches = App::new("root") .bin_name("root") .version(crate_version!()) .version_short("v") .about("roots e-book manager") .setting(AppSettings::VersionlessSubcommands) .setting(AppSettings::SubcommandRequiredElseHelp) .subcommand( SubCommand::with_name("config") .about("Shows the configuration") .arg( Arg::with_name("path") .short("p") .long("path") .conflicts_with("default") .help("Display the configuration file path"), ) .arg( Arg::with_name("default") .short("d") .long("default") .conflicts_with("path") .help("Display configuration defaults"), ), ) .subcommand( SubCommand::with_name("fields") .about("Shows fields that can be used in queries"), ).subcommand( SubCommand::with_name("find") .about("Find metadata online") .arg( Arg::with_name("scores") .short("s") .long("show-scores") .help("Show multiple results ranked by score"), ), ) .subcommand( SubCommand::with_name("import") .about("Imports new e-books") .usage( "root import <path> EXAMPLES: root import ~/Downloads/ -> imports books from ~/Downloads/", ) .arg( Arg::with_name("path") .help("Path to directory containing e-books") .required(true), ), ) .subcommand( SubCommand::with_name("info") .about("Display information for a file") .usage( "root info <path> EXAMPLES: root info file.epub -> displays information for 'file.epub'", ) .arg( Arg::with_name("path") .help("Path to e-book file") .required(true), ) .arg( Arg::with_name("fetch") .short("f") .long("fetch") .help("Fetches missing information from the web"), ), ) .subcommand( SubCommand::with_name("list") .about("Queries the library") .usage( "root list [FLAGS] EXAMPLES: root list author:forster -> All titles by Forster root list --author howards end -> All authors of matching titles root list --isbn -> All known titles with ISBNs", ) .arg( Arg::with_name("author") .short("a") .long("author") .help("Show a list of matching authors"), ) .arg( Arg::with_name("isbn") .short("i") .long("isbn") .help("Show the ISBN number of each title"), ) .arg( Arg::with_name("table") .short("t") .long("table") .help("Print the matches in a table"), ), ) .subcommand(SubCommand::with_name("update").about("Updates the library")) .get_matches(); match matches.subcommand() { ("config", Some(config)) => Command::Config { path: config.is_present("path"), default: config.is_present("default"), }, ("fields", _) => Command::Fields, ("find", Some(find)) => Command::Find { show_scores: find.is_present("scores") }, ("import", Some(import)) => Command::Import { path: import.value_of("path").map(|v| String::from(v)).unwrap(), }, ("info", Some(info)) => Command::Info { path: info.value_of("path").unwrap().to_string(), fetch: info.is_present("fetch"), }, ("list", Some(list)) => Command::List { author: list.is_present("author"), isbn: list.is_present("isbn"), table: list.is_present("table"), }, ("update", _) => Command::Update, _ => unreachable!(), } } #[cfg(test)] mod tests { extern crate assert_cmd; use std::process::Command; use interface::cli::tests::assert_cmd::prelude::*; #[test] fn find_returns_successfully() { let mut cmd = Command::cargo_bin("roots").unwrap(); cmd.arg("find"); let assert = cmd.assert(); assert.success().code(0); } #[test] fn default_config_path_is_displayed() { let mut cmd = Command::cargo_bin("roots").unwrap(); cmd.arg("config").arg("--path"); let assert = cmd.assert(); assert.success().code(0); } #[test] fn info_returns_successfully() { let mut cmd = Command::cargo_bin("roots").unwrap(); cmd.arg("info").arg("var/cache/pg98.mobi"); let assert = cmd.assert(); assert.success().code(0); } #[test] fn info_fails_missing_path() { let mut cmd = Command::cargo_bin("roots").unwrap(); cmd.arg("info"); let assert = cmd.assert(); assert.failure().code(1); } #[test] fn default_and_path_flags_conflict() { let mut cmd = Command::cargo_bin("roots").unwrap(); cmd.arg("config").arg("--path").arg("--default"); let assert = cmd.assert(); assert.failure().code(1); } #[test] fn fields_handles_no_database() { let mut cmd = Command::cargo_bin("roots").unwrap(); cmd.arg("fields"); let assert = cmd.assert(); assert .success() .stdout("No available fields, is roots initialised?\n") .code(0); } #[test] fn list_handles_no_d
#[test] fn update_handles_no_database() { let assert = Command::cargo_bin("roots").unwrap().arg("update").assert(); assert .success() .stdout("No titles found, is roots initialised?\n") .code(0); } #[test] fn import_returns_successfully() { let assert = Command::cargo_bin("roots").unwrap().arg("import").arg(".").assert(); assert.success() .stdout("No files found to import.\n") .code(0); } }
atabase() { let assert = Command::cargo_bin("roots").unwrap().arg("list").assert(); assert .success() .stdout("No titles to list, is roots initialised?\n") .code(0); }
function_block-function_prefixed
[ { "content": "fn defaults() -> Config {\n\n let user_config_path = user_config_path();\n\n Config::default()\n\n .set_default(\"debug\", false).unwrap()\n\n .set_default(\"directory\", \"~/Books\".to_string()).unwrap()\n\n .set_default(\"library\", \"library.db\".to_string()).unwrap()...
Rust
crates/lang/codegen/src/generator/trait_def/trait_registry.rs
LedgerProject/ink
7432565b7e91cbe41f47be57ee0a344429724cc8
use super::TraitDefinition; use crate::{ generator::{self,}, traits::GenerateCode, EnforcedErrors, }; use derive_more::From; use proc_macro2::{ Span, TokenStream as TokenStream2, }; use quote::{ format_ident, quote, quote_spanned, }; use syn::{ parse_quote, spanned::Spanned, }; impl<'a> TraitDefinition<'a> { pub fn generate_trait_registry_impl(&self) -> TokenStream2 { TraitRegistry::from(*self).generate_code() } pub fn trait_info_ident(&self) -> syn::Ident { self.append_trait_suffix("TraitInfo") } } #[derive(From)] struct TraitRegistry<'a> { trait_def: TraitDefinition<'a>, } impl GenerateCode for TraitRegistry<'_> { fn generate_code(&self) -> TokenStream2 { let registry_impl = self.generate_registry_impl(); let trait_info = self.generate_trait_info_object(); quote! { #registry_impl #trait_info } } } impl TraitRegistry<'_> { fn span(&self) -> Span { self.trait_def.span() } fn trait_ident(&self) -> &syn::Ident { self.trait_def.trait_def.item().ident() } fn generate_registry_impl(&self) -> TokenStream2 { let span = self.span(); let name = self.trait_ident(); let trait_info_ident = self.trait_def.trait_info_ident(); let messages = self.generate_registry_messages(); quote_spanned!(span=> impl<E> #name for ::ink_lang::reflect::TraitDefinitionRegistry<E> where E: ::ink_env::Environment, { #[doc(hidden)] #[allow(non_camel_case_types)] type __ink_TraitInfo = #trait_info_ident<E>; #messages } ) } fn generate_registry_messages(&self) -> TokenStream2 { let messages = self.trait_def.trait_def.item().iter_items().filter_map( |(item, selector)| { item.filter_map_message() .map(|message| self.generate_registry_for_message(&message, selector)) }, ); quote! { #( #messages )* } } fn generate_inout_guards_for_message(message: &ir::InkTraitMessage) -> TokenStream2 { let message_span = message.span(); let message_inputs = message.inputs().map(|input| { let input_span = input.span(); let input_type = &*input.ty; quote_spanned!(input_span=> let _: () = ::ink_lang::codegen::utils::consume_type::< ::ink_lang::codegen::DispatchInput<#input_type> >(); ) }); let message_output = message.output().map(|output_type| { let output_span = output_type.span(); quote_spanned!(output_span=> let _: () = ::ink_lang::codegen::utils::consume_type::< ::ink_lang::codegen::DispatchOutput<#output_type> >(); ) }); quote_spanned!(message_span=> #( #message_inputs )* #message_output ) } fn generate_registry_for_message( &self, message: &ir::InkTraitMessage, selector: ir::Selector, ) -> TokenStream2 { let span = message.span(); let ident = message.ident(); let attrs = message.attrs(); let output_ident = generator::output_ident(message.ident()); let output_type = message .output() .cloned() .unwrap_or_else(|| parse_quote! { () }); let mut_token = message.receiver().is_ref_mut().then(|| quote! { mut }); let (input_bindings, input_types) = Self::input_bindings_and_types(message.inputs()); let linker_error_ident = EnforcedErrors::cannot_call_trait_message( self.trait_ident(), message.ident(), selector, message.mutates(), ); let inout_guards = Self::generate_inout_guards_for_message(message); let impl_body = match option_env!("INK_COVERAGE_REPORTING") { Some("true") => { quote! { ::core::unreachable!( "this is an invalid ink! message call which should never be possible." ); } } _ => { quote! { extern { fn #linker_error_ident() -> !; } unsafe { #linker_error_ident() } } } }; quote_spanned!(span=> type #output_ident = #output_type; #( #attrs )* #[cold] #[doc(hidden)] fn #ident( & #mut_token self #( , #input_bindings : #input_types )* ) -> Self::#output_ident { #inout_guards #impl_body } ) } fn input_bindings_and_types( inputs: ir::InputsIter, ) -> (Vec<syn::Ident>, Vec<&syn::Type>) { inputs .enumerate() .map(|(n, pat_type)| { let binding = format_ident!("__ink_binding_{}", n); let ty = &*pat_type.ty; (binding, ty) }) .unzip() } fn generate_trait_info_object(&self) -> TokenStream2 { let span = self.span(); let trait_ident = self.trait_ident(); let trait_info_ident = self.trait_def.trait_info_ident(); let trait_call_forwarder = self.trait_def.call_forwarder_ident(); let trait_message_info = self.generate_info_for_trait_messages(); quote_spanned!(span => #[doc(hidden)] #[allow(non_camel_case_types)] pub struct #trait_info_ident<E> { marker: ::core::marker::PhantomData<fn() -> E>, } #trait_message_info impl<E> ::ink_lang::reflect::TraitModulePath for #trait_info_ident<E> where E: ::ink_env::Environment, { const PATH: &'static ::core::primitive::str = ::core::module_path!(); const NAME: &'static ::core::primitive::str = ::core::stringify!(#trait_ident); } impl<E> ::ink_lang::codegen::TraitCallForwarder for #trait_info_ident<E> where E: ::ink_env::Environment, { type Forwarder = #trait_call_forwarder<E>; } ) } fn generate_info_for_trait_messages(&self) -> TokenStream2 { let span = self.span(); let message_impls = self.trait_def.trait_def.item().iter_items().filter_map( |(trait_item, selector)| { trait_item.filter_map_message().map(|message| { self.generate_info_for_trait_for_message(&message, selector) }) }, ); quote_spanned!(span=> #( #message_impls )* ) } fn generate_info_for_trait_for_message( &self, message: &ir::InkTraitMessage, selector: ir::Selector, ) -> TokenStream2 { let span = message.span(); let trait_info_ident = self.trait_def.trait_info_ident(); let local_id = message.local_id(); let selector_bytes = selector.hex_lits(); let is_payable = message.ink_attrs().is_payable(); quote_spanned!(span=> impl<E> ::ink_lang::reflect::TraitMessageInfo<#local_id> for #trait_info_ident<E> { const PAYABLE: ::core::primitive::bool = #is_payable; const SELECTOR: [::core::primitive::u8; 4usize] = [ #( #selector_bytes ),* ]; } ) } }
use super::TraitDefinition; use crate::{ generator::{self,}, traits::GenerateCode, EnforcedErrors, }; use derive_more::From; use proc_macro2::{ Span, TokenStream as TokenStream2, }; use quote::{ format_ident, quote, quote_spanned, }; use syn::{ parse_quote, spanned::Spanned, }; impl<'a> TraitDefinition<'a> { pub fn generate_trait_registry_impl(&self) -> TokenStream2 { TraitRegistry::from(*self).generate_code() } pub fn trait_info_ident(&self) -> syn::Ident { self.append_trait_suffix("TraitInfo") } } #[derive(From)] struct TraitRegistry<'a> { trait_def: TraitDefinition<'a>, } impl GenerateCode for TraitRegistry<'_> { fn generate_code(&self) -> TokenStream2 { let registry_impl = self.generate_registry_impl(); let trait_info = self.generate_trait_info_object(); quote! { #registry_impl #trait_info } } } impl TraitRegistry<'_> { fn span(&self) -> Span { self.trait_def.span() } fn trait_ident(&self) -> &syn::Ident { self.trait_def.trait_def.item().ident() } fn generate_registry_impl(&self) -> TokenStream2 { let span = self.span(); let name = self.trait_ident(); let trait_info_ident = self.trait_def.trait_info_ident(); let messages = self.generate_registry_messages(); quote_spanned!(span=> impl<E> #name for ::ink_lang::reflect::TraitDefinitionRegistry<E> where E: ::ink_env::Environment, { #[doc(hidden)] #[allow(non_camel_case_types)] type __ink_TraitInfo = #trait_info_ident<E>; #messages } ) } fn generate_registry_messages(&self) -> TokenStream2 { let messages = self.trait_def.trait_def.item().iter_items().filter_map( |(item, selector)| { item.filter_map_message() .map(|message| self.generate_registry_for_message(&message, selector)) }, ); quote! { #( #messages )* } } fn generate_inout_guards_for_message(message: &ir::InkTraitMessage) -> TokenStream2 { let message_span = message.span(); let message_inputs = message.inputs().map(|input| { let input_span = input.span(); let input_type = &*input.ty; quote_spanned!(input_span=> let _: () = ::ink_lang::codegen::utils::consume_type::< ::ink_lang::codegen::DispatchInput<#input_type> >(); ) }); let message_output = message.output().map(|output_type| { let output_span = output_type.span(); quote_spanned!(output_span=> let _: () = ::ink_lang::codegen::utils::consume_type::< ::ink_lang::codegen::DispatchOutput<#output_type> >(); ) }); quote_spanned!(message_span=> #( #message_inputs )* #message_output ) } fn generate_registry_for_message( &self, message: &ir::InkTraitMessage, selector: ir::Selector, ) -> TokenStream2 { let span = message.span(); let ident = message.ident(); let attrs = message.at
, message.mutates(), ); let inout_guards = Self::generate_inout_guards_for_message(message); let impl_body = match option_env!("INK_COVERAGE_REPORTING") { Some("true") => { quote! { ::core::unreachable!( "this is an invalid ink! message call which should never be possible." ); } } _ => { quote! { extern { fn #linker_error_ident() -> !; } unsafe { #linker_error_ident() } } } }; quote_spanned!(span=> type #output_ident = #output_type; #( #attrs )* #[cold] #[doc(hidden)] fn #ident( & #mut_token self #( , #input_bindings : #input_types )* ) -> Self::#output_ident { #inout_guards #impl_body } ) } fn input_bindings_and_types( inputs: ir::InputsIter, ) -> (Vec<syn::Ident>, Vec<&syn::Type>) { inputs .enumerate() .map(|(n, pat_type)| { let binding = format_ident!("__ink_binding_{}", n); let ty = &*pat_type.ty; (binding, ty) }) .unzip() } fn generate_trait_info_object(&self) -> TokenStream2 { let span = self.span(); let trait_ident = self.trait_ident(); let trait_info_ident = self.trait_def.trait_info_ident(); let trait_call_forwarder = self.trait_def.call_forwarder_ident(); let trait_message_info = self.generate_info_for_trait_messages(); quote_spanned!(span => #[doc(hidden)] #[allow(non_camel_case_types)] pub struct #trait_info_ident<E> { marker: ::core::marker::PhantomData<fn() -> E>, } #trait_message_info impl<E> ::ink_lang::reflect::TraitModulePath for #trait_info_ident<E> where E: ::ink_env::Environment, { const PATH: &'static ::core::primitive::str = ::core::module_path!(); const NAME: &'static ::core::primitive::str = ::core::stringify!(#trait_ident); } impl<E> ::ink_lang::codegen::TraitCallForwarder for #trait_info_ident<E> where E: ::ink_env::Environment, { type Forwarder = #trait_call_forwarder<E>; } ) } fn generate_info_for_trait_messages(&self) -> TokenStream2 { let span = self.span(); let message_impls = self.trait_def.trait_def.item().iter_items().filter_map( |(trait_item, selector)| { trait_item.filter_map_message().map(|message| { self.generate_info_for_trait_for_message(&message, selector) }) }, ); quote_spanned!(span=> #( #message_impls )* ) } fn generate_info_for_trait_for_message( &self, message: &ir::InkTraitMessage, selector: ir::Selector, ) -> TokenStream2 { let span = message.span(); let trait_info_ident = self.trait_def.trait_info_ident(); let local_id = message.local_id(); let selector_bytes = selector.hex_lits(); let is_payable = message.ink_attrs().is_payable(); quote_spanned!(span=> impl<E> ::ink_lang::reflect::TraitMessageInfo<#local_id> for #trait_info_ident<E> { const PAYABLE: ::core::primitive::bool = #is_payable; const SELECTOR: [::core::primitive::u8; 4usize] = [ #( #selector_bytes ),* ]; } ) } }
trs(); let output_ident = generator::output_ident(message.ident()); let output_type = message .output() .cloned() .unwrap_or_else(|| parse_quote! { () }); let mut_token = message.receiver().is_ref_mut().then(|| quote! { mut }); let (input_bindings, input_types) = Self::input_bindings_and_types(message.inputs()); let linker_error_ident = EnforcedErrors::cannot_call_trait_message( self.trait_ident(), message.ident(), selector
function_block-random_span
[ { "content": "/// Returns the associated output type for an ink! trait message.\n\npub fn output_ident(message_name: &syn::Ident) -> syn::Ident {\n\n format_ident!(\"{}Output\", message_name.to_string().to_camel_case())\n\n}\n\n\n", "file_path": "crates/lang/codegen/src/generator/arg_list.rs", "rank"...
Rust
src/operations/mod.rs
AndreVuillemot160/QuickDash
156c15e369a8e5c594430eb56d70727cb47f6114
/* Copyright [2021] [Cerda] * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ mod compare; mod write; use std::{ collections::{BTreeMap, BTreeSet}, fs::File, io::{self, BufRead, BufReader, Write}, path::{Path, PathBuf}, }; use futures::{executor, future, task::SpawnExt}; use once_cell::sync::Lazy; use pbr::ProgressBar; use regex::Regex; use tabwriter::TabWriter; use walkdir::WalkDir; pub use self::{compare::*, write::*}; use crate::{ hash_file, utilities::{mul_str, relative_name}, Algorithm, Error, }; pub fn create_hashes<Wo: Write>( path: &Path, ignored_files: BTreeSet<String>, algo: Algorithm, depth: Option<usize>, follow_symlinks: bool, jobs: usize, pb_out: Wo, ) -> BTreeMap<String, String> { let mut walkdir = WalkDir::new(path).follow_links(follow_symlinks); if let Some(depth) = depth { walkdir = walkdir.max_depth(depth + 1); } let mut hashes = BTreeMap::new(); let mut hashes_f: BTreeMap<String, String> = BTreeMap::new(); let pool = executor::ThreadPoolBuilder::new() .pool_size(jobs) .create() .expect("could not create ThreadPool"); let mut walkdir = walkdir.into_iter(); while let Some(entry) = walkdir.next() { match entry { Ok(entry) => { let file_type = entry.file_type(); let filename = relative_name(path, entry.path()); let ignored = ignored_files.contains(&filename); if file_type.is_file() { if ignored { hashes.insert(mul_str("-", algo.hexlen()), filename); } else { let ready = future::ready(hash_file(algo, entry.path())); let future = pool.spawn_with_handle(ready).expect("failed to spawn"); hashes_f.insert(filename, executor::block_on(future)); } } else if ignored { walkdir.skip_current_dir(); } } Err(error) => { let err = format!( "Symlink loop detected at {}", relative_name(path, error.path().unwrap()) ); writeln!(io::stderr(), "{}", err).expect("io err"); } } } let mut pb = ProgressBar::on(pb_out, hashes_f.len() as u64); pb.set_width(Some(80)); pb.show_speed = false; pb.show_tick = true; hashes.extend(hashes_f.into_iter().map(|(k, f)| { pb.message(&format!("{} ", k)); pb.inc(); (k, f) })); pb.show_tick = false; pb.tick(); pb.finish(); hashes } pub fn write_hashes( out_file: &(String, PathBuf), algo: Algorithm, mut hashes: BTreeMap<String, String>, ) { let mut out = TabWriter::new(File::create(&out_file.1).unwrap()); hashes.insert(out_file.0.clone(), mul_str("-", algo.hexlen())); for (fname, hash) in hashes { writeln!(&mut out, "{} {}", hash, fname).unwrap(); } out.flush().unwrap(); } pub fn read_hashes(file: &(String, PathBuf)) -> Result<BTreeMap<String, String>, Error> { let mut hashes = BTreeMap::new(); let in_file = BufReader::new(File::open(&file.1).unwrap()); for line in in_file.lines().map(Result::unwrap) { try_contains(&line, &mut hashes)?; } Ok(hashes) } fn try_contains(line: &str, hashes: &mut BTreeMap<String, String>) -> Result<(), Error> { if line.is_empty() { return Err(Error::HashesFileParsingFailure); } static LINE_RGX1: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?i)^([[:xdigit:]-]+)\s{2,}(.+?)$").unwrap()); static LINE_RGX2: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?i)^(.+?)\t{0,}\s{1,}([[:xdigit:]-]+)$").unwrap()); if let Some(captures) = LINE_RGX1.captures(line) { hashes.insert(captures[2].to_string(), captures[1].to_uppercase()); return Ok(()); } if let Some(captures) = LINE_RGX2.captures(line) { hashes.insert(captures[1].to_string(), captures[2].to_uppercase()); return Ok(()); } Err(Error::HashesFileParsingFailure) }
/* Copyright [2021] [Cerda] * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://ww
Result::unwrap) { try_contains(&line, &mut hashes)?; } Ok(hashes) } fn try_contains(line: &str, hashes: &mut BTreeMap<String, String>) -> Result<(), Error> { if line.is_empty() { return Err(Error::HashesFileParsingFailure); } static LINE_RGX1: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?i)^([[:xdigit:]-]+)\s{2,}(.+?)$").unwrap()); static LINE_RGX2: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?i)^(.+?)\t{0,}\s{1,}([[:xdigit:]-]+)$").unwrap()); if let Some(captures) = LINE_RGX1.captures(line) { hashes.insert(captures[2].to_string(), captures[1].to_uppercase()); return Ok(()); } if let Some(captures) = LINE_RGX2.captures(line) { hashes.insert(captures[1].to_string(), captures[2].to_uppercase()); return Ok(()); } Err(Error::HashesFileParsingFailure) }
w.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ mod compare; mod write; use std::{ collections::{BTreeMap, BTreeSet}, fs::File, io::{self, BufRead, BufReader, Write}, path::{Path, PathBuf}, }; use futures::{executor, future, task::SpawnExt}; use once_cell::sync::Lazy; use pbr::ProgressBar; use regex::Regex; use tabwriter::TabWriter; use walkdir::WalkDir; pub use self::{compare::*, write::*}; use crate::{ hash_file, utilities::{mul_str, relative_name}, Algorithm, Error, }; pub fn create_hashes<Wo: Write>( path: &Path, ignored_files: BTreeSet<String>, algo: Algorithm, depth: Option<usize>, follow_symlinks: bool, jobs: usize, pb_out: Wo, ) -> BTreeMap<String, String> { let mut walkdir = WalkDir::new(path).follow_links(follow_symlinks); if let Some(depth) = depth { walkdir = walkdir.max_depth(depth + 1); } let mut hashes = BTreeMap::new(); let mut hashes_f: BTreeMap<String, String> = BTreeMap::new(); let pool = executor::ThreadPoolBuilder::new() .pool_size(jobs) .create() .expect("could not create ThreadPool"); let mut walkdir = walkdir.into_iter(); while let Some(entry) = walkdir.next() { match entry { Ok(entry) => { let file_type = entry.file_type(); let filename = relative_name(path, entry.path()); let ignored = ignored_files.contains(&filename); if file_type.is_file() { if ignored { hashes.insert(mul_str("-", algo.hexlen()), filename); } else { let ready = future::ready(hash_file(algo, entry.path())); let future = pool.spawn_with_handle(ready).expect("failed to spawn"); hashes_f.insert(filename, executor::block_on(future)); } } else if ignored { walkdir.skip_current_dir(); } } Err(error) => { let err = format!( "Symlink loop detected at {}", relative_name(path, error.path().unwrap()) ); writeln!(io::stderr(), "{}", err).expect("io err"); } } } let mut pb = ProgressBar::on(pb_out, hashes_f.len() as u64); pb.set_width(Some(80)); pb.show_speed = false; pb.show_tick = true; hashes.extend(hashes_f.into_iter().map(|(k, f)| { pb.message(&format!("{} ", k)); pb.inc(); (k, f) })); pb.show_tick = false; pb.tick(); pb.finish(); hashes } pub fn write_hashes( out_file: &(String, PathBuf), algo: Algorithm, mut hashes: BTreeMap<String, String>, ) { let mut out = TabWriter::new(File::create(&out_file.1).unwrap()); hashes.insert(out_file.0.clone(), mul_str("-", algo.hexlen())); for (fname, hash) in hashes { writeln!(&mut out, "{} {}", hash, fname).unwrap(); } out.flush().unwrap(); } pub fn read_hashes(file: &(String, PathBuf)) -> Result<BTreeMap<String, String>, Error> { let mut hashes = BTreeMap::new(); let in_file = BufReader::new(File::open(&file.1).unwrap()); for line in in_file.lines().map(
random
[ { "content": "/// Hash the specified file using the specified hashing algorithm.\n\npub fn hash_file(algo: Algorithm, path: &Path) -> String {\n\n\thash_reader(algo, &mut File::open(path).unwrap())\n\n}\n\n\n", "file_path": "src/hashing/mod.rs", "rank": 1, "score": 18270.098607971686 }, { "c...
Rust
hdf5-sys/src/h5f.rs
p-gerhard/hdf5-rust
27a59c2811b877d4729d50f9ffd5a2f3386f7f9f
use std::mem; pub use self::H5F_close_degree_t::*; pub use self::H5F_libver_t::*; pub use self::H5F_mem_t::*; pub use self::H5F_scope_t::*; #[cfg(not(feature = "1.10.0"))] pub use { H5F_info1_t as H5F_info_t, H5F_info1_t__sohm as H5F_info_t__sohm, H5Fget_info1 as H5Fget_info, }; #[cfg(feature = "1.10.0")] pub use { H5F_info2_t as H5F_info_t, H5F_info2_t__free as H5F_info_t__free, H5F_info2_t__sohm as H5F_info_t__sohm, H5F_info2_t__super as H5F_info_t__super, H5Fget_info2 as H5Fget_info, }; use crate::internal_prelude::*; use crate::h5ac::H5AC_cache_config_t; #[cfg_attr(feature = "1.10.0", deprecated(note = "deprecated in HDF5 1.10.0"))] pub const H5F_ACC_DEBUG: c_uint = 0x0000; /* these flags call H5check() in the C library */ pub const H5F_ACC_RDONLY: c_uint = 0x0000; pub const H5F_ACC_RDWR: c_uint = 0x0001; pub const H5F_ACC_TRUNC: c_uint = 0x0002; pub const H5F_ACC_EXCL: c_uint = 0x0004; pub const H5F_ACC_CREAT: c_uint = 0x0010; pub const H5F_ACC_DEFAULT: c_uint = 0xffff; pub const H5F_OBJ_FILE: c_uint = 0x0001; pub const H5F_OBJ_DATASET: c_uint = 0x0002; pub const H5F_OBJ_GROUP: c_uint = 0x0004; pub const H5F_OBJ_DATATYPE: c_uint = 0x0008; pub const H5F_OBJ_ATTR: c_uint = 0x0010; pub const H5F_OBJ_ALL: c_uint = H5F_OBJ_FILE | H5F_OBJ_DATASET | H5F_OBJ_GROUP | H5F_OBJ_DATATYPE | H5F_OBJ_ATTR; pub const H5F_OBJ_LOCAL: c_uint = 0x0020; pub const H5F_FAMILY_DEFAULT: hsize_t = 0; pub const H5F_MPIO_DEBUG_KEY: &str = "H5F_mpio_debug_key"; pub const H5F_UNLIMITED: hsize_t = !0; #[repr(C)] #[derive(Copy, Clone, PartialEq, PartialOrd, Debug)] pub enum H5F_scope_t { H5F_SCOPE_LOCAL = 0, H5F_SCOPE_GLOBAL = 1, } #[repr(C)] #[derive(Copy, Clone, PartialEq, PartialOrd, Debug)] pub enum H5F_close_degree_t { H5F_CLOSE_DEFAULT = 0, H5F_CLOSE_WEAK = 1, H5F_CLOSE_SEMI = 2, H5F_CLOSE_STRONG = 3, } impl Default for H5F_close_degree_t { fn default() -> Self { Self::H5F_CLOSE_DEFAULT } } #[cfg_attr(feature = "1.10.0", deprecated(note = "deprecated in HDF5 1.10.0, use H5F_info2_t"))] #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct H5F_info1_t { pub super_ext_size: hsize_t, pub sohm: H5F_info1_t__sohm, } impl Default for H5F_info1_t { fn default() -> Self { unsafe { mem::zeroed() } } } #[cfg_attr(feature = "1.10.0", deprecated(note = "deprecated in HDF5 1.10.0, use H5F_info2_t"))] #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct H5F_info1_t__sohm { pub hdr_size: hsize_t, pub msgs_info: H5_ih_info_t, } impl Default for H5F_info1_t__sohm { fn default() -> Self { unsafe { mem::zeroed() } } } #[repr(C)] #[derive(Copy, Clone, PartialEq, PartialOrd, Debug)] pub enum H5F_mem_t { H5FD_MEM_NOLIST = -1, H5FD_MEM_DEFAULT = 0, H5FD_MEM_SUPER = 1, H5FD_MEM_BTREE = 2, H5FD_MEM_DRAW = 3, H5FD_MEM_GHEAP = 4, H5FD_MEM_LHEAP = 5, H5FD_MEM_OHDR = 6, H5FD_MEM_NTYPES = 7, } #[cfg(not(feature = "1.10.2"))] #[repr(C)] #[derive(Copy, Clone, PartialEq, PartialOrd, Debug)] pub enum H5F_libver_t { H5F_LIBVER_EARLIEST = 0, H5F_LIBVER_LATEST = 1, } #[cfg(feature = "1.10.2")] #[repr(C)] #[derive(Copy, Clone, PartialEq, PartialOrd, Debug)] pub enum H5F_libver_t { H5F_LIBVER_ERROR = -1, H5F_LIBVER_EARLIEST = 0, H5F_LIBVER_V18 = 1, H5F_LIBVER_V110 = 2, #[cfg(feature = "1.12.0")] H5F_LIBVER_V112 = 3, #[cfg(feature = "1.13.0")] H5F_LIBVER_V114 = 4, H5F_LIBVER_NBOUNDS, } #[cfg(feature = "1.10.2")] pub const H5F_LIBVER_LATEST: H5F_libver_t = H5F_LIBVER_V110; impl Default for H5F_libver_t { fn default() -> Self { H5F_LIBVER_LATEST } } extern "C" { #[cfg_attr( hdf5_1_10_2, deprecated(note = "deprecated in HDF5 1.10.2, use H5Fset_libver_bounds()") )] pub fn H5Fset_latest_format(file_id: hid_t, latest_format: hbool_t) -> herr_t; pub fn H5Fis_hdf5(filename: *const c_char) -> htri_t; #[cfg(feature = "1.12.0")] pub fn H5Fis_accessible(container_name: *const c_char, fapl_id: hid_t) -> htri_t; pub fn H5Fcreate( filename: *const c_char, flags: c_uint, create_plist: hid_t, access_plist: hid_t, ) -> hid_t; pub fn H5Fopen(filename: *const c_char, flags: c_uint, access_plist: hid_t) -> hid_t; pub fn H5Freopen(file_id: hid_t) -> hid_t; pub fn H5Fflush(object_id: hid_t, scope: H5F_scope_t) -> herr_t; pub fn H5Fclose(file_id: hid_t) -> herr_t; #[cfg(feature = "1.12.0")] pub fn H5Fdelete(filename: *const c_char, fapl_id: hid_t) -> herr_t; pub fn H5Fget_create_plist(file_id: hid_t) -> hid_t; pub fn H5Fget_access_plist(file_id: hid_t) -> hid_t; pub fn H5Fget_intent(file_id: hid_t, intent: *mut c_uint) -> herr_t; #[cfg(feature = "1.12.0")] pub fn H5Fget_fileno(file_id: hid_t, fileno: *mut c_ulong) -> herr_t; pub fn H5Fget_obj_count(file_id: hid_t, types: c_uint) -> ssize_t; pub fn H5Fget_obj_ids( file_id: hid_t, types: c_uint, max_objs: size_t, obj_id_list: *mut hid_t, ) -> ssize_t; pub fn H5Fget_vfd_handle(file_id: hid_t, fapl: hid_t, file_handle: *mut *mut c_void) -> herr_t; pub fn H5Fmount(loc: hid_t, name: *const c_char, child: hid_t, plist: hid_t) -> herr_t; pub fn H5Funmount(loc: hid_t, name: *const c_char) -> herr_t; pub fn H5Fget_freespace(file_id: hid_t) -> hssize_t; pub fn H5Fget_filesize(file_id: hid_t, size: *mut hsize_t) -> herr_t; pub fn H5Fget_mdc_config(file_id: hid_t, config_ptr: *mut H5AC_cache_config_t) -> herr_t; #[cfg(not(feature = "1.13.0"))] pub fn H5Fset_mdc_config(file_id: hid_t, config_ptr: *mut H5AC_cache_config_t) -> herr_t; #[cfg(feature = "1.13.0")] pub fn H5Fset_mdc_config(file_id: hid_t, config_ptr: *const H5AC_cache_config_t) -> herr_t; pub fn H5Fget_mdc_hit_rate(file_id: hid_t, hit_rate_ptr: *mut c_double) -> herr_t; pub fn H5Fget_mdc_size( file_id: hid_t, max_size_ptr: *mut size_t, min_clean_size_ptr: *mut size_t, cur_size_ptr: *mut size_t, cur_num_entries_ptr: *mut c_int, ) -> herr_t; pub fn H5Freset_mdc_hit_rate_stats(file_id: hid_t) -> herr_t; pub fn H5Fget_name(obj_id: hid_t, name: *mut c_char, size: size_t) -> ssize_t; } #[cfg(feature = "1.8.7")] extern "C" { pub fn H5Fclear_elink_file_cache(file_id: hid_t) -> herr_t; } #[cfg(feature = "1.8.9")] extern "C" { pub fn H5Fget_file_image(file_id: hid_t, buf_ptr: *mut c_void, buf_len: size_t) -> ssize_t; } #[cfg(all(feature = "1.8.9", feature = "have-parallel"))] extern "C" { pub fn H5Fset_mpi_atomicity(file_id: hid_t, flag: hbool_t) -> herr_t; pub fn H5Fget_mpi_atomicity(file_id: hid_t, flag: *mut hbool_t) -> herr_t; } #[cfg(feature = "1.10.0")] mod hdf5_1_10_0 { use super::*; pub const H5F_ACC_SWMR_WRITE: c_uint = 0x0020; pub const H5F_ACC_SWMR_READ: c_uint = 0x0040; #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct H5F_retry_info_t { pub nbins: c_uint, pub retries: [*mut u32; 21usize], } impl Default for H5F_retry_info_t { fn default() -> Self { unsafe { mem::zeroed() } } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct H5F_sect_info_t { pub addr: haddr_t, pub size: hsize_t, } impl Default for H5F_sect_info_t { fn default() -> Self { unsafe { mem::zeroed() } } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct H5F_info2_t { pub super_: H5F_info2_t__super, pub free: H5F_info2_t__free, pub sohm: H5F_info2_t__sohm, } impl Default for H5F_info2_t { fn default() -> Self { unsafe { mem::zeroed() } } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct H5F_info2_t__super { pub version: c_uint, pub super_size: hsize_t, pub super_ext_size: hsize_t, } impl Default for H5F_info2_t__super { fn default() -> Self { unsafe { mem::zeroed() } } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct H5F_info2_t__free { pub version: c_uint, pub meta_size: hsize_t, pub tot_space: hsize_t, } impl Default for H5F_info2_t__free { fn default() -> Self { unsafe { mem::zeroed() } } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct H5F_info2_t__sohm { pub version: c_uint, pub hdr_size: hsize_t, pub msgs_info: H5_ih_info_t, } impl Default for H5F_info2_t__sohm { fn default() -> Self { unsafe { mem::zeroed() } } } pub type H5F_flush_cb_t = Option<unsafe extern "C" fn(object_id: hid_t, udata: *mut c_void) -> herr_t>; #[repr(C)] #[derive(Copy, Clone, PartialEq, PartialOrd, Debug)] pub enum H5F_file_space_type_t { H5F_FILE_SPACE_DEFAULT = 0, H5F_FILE_SPACE_ALL_PERSIST = 1, H5F_FILE_SPACE_ALL = 2, H5F_FILE_SPACE_AGGR_VFD = 3, H5F_FILE_SPACE_VFD = 4, H5F_FILE_SPACE_NTYPES = 5, } pub use self::H5F_file_space_type_t::*; extern "C" { pub fn H5Fstart_swmr_write(file_id: hid_t) -> herr_t; pub fn H5Fget_metadata_read_retry_info( file_id: hid_t, info: *mut H5F_retry_info_t, ) -> herr_t; pub fn H5Fstart_mdc_logging(file_id: hid_t) -> herr_t; pub fn H5Fstop_mdc_logging(file_id: hid_t) -> herr_t; pub fn H5Fget_free_sections( file_id: hid_t, type_: H5F_mem_t, nsects: size_t, sect_info: *mut H5F_sect_info_t, ) -> ssize_t; pub fn H5Fformat_convert(fid: hid_t) -> herr_t; pub fn H5Fget_info2(obj_id: hid_t, finfo: *mut H5F_info2_t) -> herr_t; } } extern "C" { #[cfg_attr( feature = "1.10.0", deprecated(note = "deprecated in HDF5 1.10.0, use H5Fget_info2") )] #[cfg_attr(not(feature = "1.10.0"), link_name = "H5Fget_info")] pub fn H5Fget_info1(obj_id: hid_t, finfo: *mut H5F_info1_t) -> herr_t; } #[cfg(feature = "1.10.0")] pub use self::hdf5_1_10_0::*; #[cfg(feature = "1.10.1")] mod hdf5_1_10_1 { use super::*; #[repr(C)] #[derive(Copy, Clone, PartialEq, PartialOrd, Debug)] pub enum H5F_fspace_strategy_t { H5F_FSPACE_STRATEGY_FSM_AGGR = 0, H5F_FSPACE_STRATEGY_PAGE = 1, H5F_FSPACE_STRATEGY_AGGR = 2, H5F_FSPACE_STRATEGY_NONE = 3, H5F_FSPACE_STRATEGY_NTYPES = 4, } impl Default for H5F_fspace_strategy_t { fn default() -> Self { H5F_FSPACE_STRATEGY_FSM_AGGR } } pub use self::H5F_fspace_strategy_t::*; extern "C" { pub fn H5Fget_mdc_image_info( file_id: hid_t, image_addr: *mut haddr_t, image_size: *mut hsize_t, ) -> herr_t; pub fn H5Freset_page_buffering_stats(file_id: hid_t) -> herr_t; pub fn H5Fget_page_buffering_stats( file_id: hid_t, accesses: *mut c_uint, hits: *mut c_uint, misses: *mut c_uint, evictions: *mut c_uint, bypasses: *mut c_uint, ) -> herr_t; } } #[cfg(feature = "1.10.1")] pub use self::hdf5_1_10_1::*; #[cfg(feature = "1.10.5")] extern "C" { pub fn H5Fget_dset_no_attrs_hint(file_id: hid_t, minimize: *mut hbool_t) -> herr_t; pub fn H5Fset_dset_no_attrs_hint(file_id: hid_t, minimize: hbool_t) -> herr_t; } #[cfg(feature = "1.13.0")] extern "C" { pub fn H5Fclose_async( app_file: *const c_char, app_func: *const c_char, app_line: c_uint, file_id: hid_t, es_id: hid_t, ) -> herr_t; pub fn H5Fcreate_async( app_file: *const c_char, app_func: *const c_char, app_line: c_uint, filename: *const c_char, flags: c_uint, fcpl_id: hid_t, fapl_id: hid_t, es_id: hid_t, ) -> hid_t; pub fn H5Fflush_async( app_file: *const c_char, app_func: *const c_char, app_line: c_uint, object_id: hid_t, scope: H5F_scope_t, es_id: hid_t, ) -> herr_t; pub fn H5Fopen_async( app_file: *const c_char, app_func: *const c_char, app_line: c_uint, filename: *const c_char, flags: c_uint, access_plit: hid_t, es_id: hid_t, ) -> hid_t; pub fn H5reopen_async( app_file: *const c_char, app_func: *const c_char, app_line: c_uint, file_id: hid_t, es_id: hid_t, ) -> hid_t; }
use std::mem; pub use self::H5F_close_degree_t::*; pub use self::H5F_libver_t::*; pub use self::H5F_mem_t::*; pub use self::H5F_scope_t::*; #[cfg(not(feature = "1.10.0"))] pub use { H5F_info1_t as H5F_info_t, H5F_info1_t__sohm as H5F_info_t__sohm, H5Fget_info1 as H5Fget_info, }; #[cfg(feature = "1.10.0")] pub use { H5F_info2_t as H5F_info_t, H5F_info2_t__free as H5F_info_t__free, H5F_info2_t__sohm as H5F_info_t__sohm, H5F_info2_t__super as H5F_info_t__super, H5Fget_info2 as H5Fget_info, }; use crate::internal_prelude::*; use crate::h5ac::H5AC_cache_config_t; #[cfg_attr(feature = "1.10.0", deprecated(note = "deprecated in HDF5 1.10.0"))] pub const H5F_ACC_DEBUG: c_uint = 0x0000; /* these flags call H5check() in the C library */ pub const H5F_ACC_RDONLY: c_uint = 0x0000; pub const H5F_ACC_RDWR: c_uint = 0x0001; pub const H5F_ACC_TRUNC: c_uint = 0x0002; pub const H5F_ACC_EXCL: c_uint = 0x0004; pub const H5F_ACC_CREAT: c_uint = 0x0010; pub const H5F_ACC_DEFAULT: c_uint = 0xffff; pub const H5F_OBJ_FILE: c_uint = 0x0001; pub const H5F_OBJ_DATASET: c_uint = 0x0002; pub const H5F_OBJ_GROUP: c_uint = 0x0004; pub const H5F_OBJ_DATATYPE: c_uint = 0x0008; pub const H5F_OBJ_ATTR: c_uint = 0x0010; pub const H5F_OBJ_ALL: c_uint = H5F_OBJ_FILE | H5F_OBJ_DATASET | H5F_OBJ_GROUP | H5F_OBJ_DATATYPE | H5F_OBJ_ATTR; pub const H5F_OBJ_LOCAL: c_uint = 0x0020; pub const H5F_FAMILY_DEFAULT: hsize_t = 0; pub const H5F_MPIO_DEBUG_KEY: &str = "H5F_mpio_debug_key"; pub const H5F_UNLIMITED: hsize_t = !0; #[repr(C)] #[derive(Copy, Clone, PartialEq, PartialOrd, Debug)] pub enum H5F_scope_t { H5F_SCOPE_LOCAL = 0, H5F_SCOPE_GLOBAL = 1, } #[repr(C)] #[derive(Copy, Clone, PartialEq, PartialOrd, Debug)] pub enum H5F_close_degree_t { H5F_CLOSE_DEFAULT = 0, H5F_CLOSE_WEAK = 1, H5F_CLOSE_SEMI = 2, H5F_CLOSE_STRONG = 3, } impl Default
unsafe { mem::zeroed() } } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct H5F_info2_t { pub super_: H5F_info2_t__super, pub free: H5F_info2_t__free, pub sohm: H5F_info2_t__sohm, } impl Default for H5F_info2_t { fn default() -> Self { unsafe { mem::zeroed() } } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct H5F_info2_t__super { pub version: c_uint, pub super_size: hsize_t, pub super_ext_size: hsize_t, } impl Default for H5F_info2_t__super { fn default() -> Self { unsafe { mem::zeroed() } } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct H5F_info2_t__free { pub version: c_uint, pub meta_size: hsize_t, pub tot_space: hsize_t, } impl Default for H5F_info2_t__free { fn default() -> Self { unsafe { mem::zeroed() } } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct H5F_info2_t__sohm { pub version: c_uint, pub hdr_size: hsize_t, pub msgs_info: H5_ih_info_t, } impl Default for H5F_info2_t__sohm { fn default() -> Self { unsafe { mem::zeroed() } } } pub type H5F_flush_cb_t = Option<unsafe extern "C" fn(object_id: hid_t, udata: *mut c_void) -> herr_t>; #[repr(C)] #[derive(Copy, Clone, PartialEq, PartialOrd, Debug)] pub enum H5F_file_space_type_t { H5F_FILE_SPACE_DEFAULT = 0, H5F_FILE_SPACE_ALL_PERSIST = 1, H5F_FILE_SPACE_ALL = 2, H5F_FILE_SPACE_AGGR_VFD = 3, H5F_FILE_SPACE_VFD = 4, H5F_FILE_SPACE_NTYPES = 5, } pub use self::H5F_file_space_type_t::*; extern "C" { pub fn H5Fstart_swmr_write(file_id: hid_t) -> herr_t; pub fn H5Fget_metadata_read_retry_info( file_id: hid_t, info: *mut H5F_retry_info_t, ) -> herr_t; pub fn H5Fstart_mdc_logging(file_id: hid_t) -> herr_t; pub fn H5Fstop_mdc_logging(file_id: hid_t) -> herr_t; pub fn H5Fget_free_sections( file_id: hid_t, type_: H5F_mem_t, nsects: size_t, sect_info: *mut H5F_sect_info_t, ) -> ssize_t; pub fn H5Fformat_convert(fid: hid_t) -> herr_t; pub fn H5Fget_info2(obj_id: hid_t, finfo: *mut H5F_info2_t) -> herr_t; } } extern "C" { #[cfg_attr( feature = "1.10.0", deprecated(note = "deprecated in HDF5 1.10.0, use H5Fget_info2") )] #[cfg_attr(not(feature = "1.10.0"), link_name = "H5Fget_info")] pub fn H5Fget_info1(obj_id: hid_t, finfo: *mut H5F_info1_t) -> herr_t; } #[cfg(feature = "1.10.0")] pub use self::hdf5_1_10_0::*; #[cfg(feature = "1.10.1")] mod hdf5_1_10_1 { use super::*; #[repr(C)] #[derive(Copy, Clone, PartialEq, PartialOrd, Debug)] pub enum H5F_fspace_strategy_t { H5F_FSPACE_STRATEGY_FSM_AGGR = 0, H5F_FSPACE_STRATEGY_PAGE = 1, H5F_FSPACE_STRATEGY_AGGR = 2, H5F_FSPACE_STRATEGY_NONE = 3, H5F_FSPACE_STRATEGY_NTYPES = 4, } impl Default for H5F_fspace_strategy_t { fn default() -> Self { H5F_FSPACE_STRATEGY_FSM_AGGR } } pub use self::H5F_fspace_strategy_t::*; extern "C" { pub fn H5Fget_mdc_image_info( file_id: hid_t, image_addr: *mut haddr_t, image_size: *mut hsize_t, ) -> herr_t; pub fn H5Freset_page_buffering_stats(file_id: hid_t) -> herr_t; pub fn H5Fget_page_buffering_stats( file_id: hid_t, accesses: *mut c_uint, hits: *mut c_uint, misses: *mut c_uint, evictions: *mut c_uint, bypasses: *mut c_uint, ) -> herr_t; } } #[cfg(feature = "1.10.1")] pub use self::hdf5_1_10_1::*; #[cfg(feature = "1.10.5")] extern "C" { pub fn H5Fget_dset_no_attrs_hint(file_id: hid_t, minimize: *mut hbool_t) -> herr_t; pub fn H5Fset_dset_no_attrs_hint(file_id: hid_t, minimize: hbool_t) -> herr_t; } #[cfg(feature = "1.13.0")] extern "C" { pub fn H5Fclose_async( app_file: *const c_char, app_func: *const c_char, app_line: c_uint, file_id: hid_t, es_id: hid_t, ) -> herr_t; pub fn H5Fcreate_async( app_file: *const c_char, app_func: *const c_char, app_line: c_uint, filename: *const c_char, flags: c_uint, fcpl_id: hid_t, fapl_id: hid_t, es_id: hid_t, ) -> hid_t; pub fn H5Fflush_async( app_file: *const c_char, app_func: *const c_char, app_line: c_uint, object_id: hid_t, scope: H5F_scope_t, es_id: hid_t, ) -> herr_t; pub fn H5Fopen_async( app_file: *const c_char, app_func: *const c_char, app_line: c_uint, filename: *const c_char, flags: c_uint, access_plit: hid_t, es_id: hid_t, ) -> hid_t; pub fn H5reopen_async( app_file: *const c_char, app_func: *const c_char, app_line: c_uint, file_id: hid_t, es_id: hid_t, ) -> hid_t; }
for H5F_close_degree_t { fn default() -> Self { Self::H5F_CLOSE_DEFAULT } } #[cfg_attr(feature = "1.10.0", deprecated(note = "deprecated in HDF5 1.10.0, use H5F_info2_t"))] #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct H5F_info1_t { pub super_ext_size: hsize_t, pub sohm: H5F_info1_t__sohm, } impl Default for H5F_info1_t { fn default() -> Self { unsafe { mem::zeroed() } } } #[cfg_attr(feature = "1.10.0", deprecated(note = "deprecated in HDF5 1.10.0, use H5F_info2_t"))] #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct H5F_info1_t__sohm { pub hdr_size: hsize_t, pub msgs_info: H5_ih_info_t, } impl Default for H5F_info1_t__sohm { fn default() -> Self { unsafe { mem::zeroed() } } } #[repr(C)] #[derive(Copy, Clone, PartialEq, PartialOrd, Debug)] pub enum H5F_mem_t { H5FD_MEM_NOLIST = -1, H5FD_MEM_DEFAULT = 0, H5FD_MEM_SUPER = 1, H5FD_MEM_BTREE = 2, H5FD_MEM_DRAW = 3, H5FD_MEM_GHEAP = 4, H5FD_MEM_LHEAP = 5, H5FD_MEM_OHDR = 6, H5FD_MEM_NTYPES = 7, } #[cfg(not(feature = "1.10.2"))] #[repr(C)] #[derive(Copy, Clone, PartialEq, PartialOrd, Debug)] pub enum H5F_libver_t { H5F_LIBVER_EARLIEST = 0, H5F_LIBVER_LATEST = 1, } #[cfg(feature = "1.10.2")] #[repr(C)] #[derive(Copy, Clone, PartialEq, PartialOrd, Debug)] pub enum H5F_libver_t { H5F_LIBVER_ERROR = -1, H5F_LIBVER_EARLIEST = 0, H5F_LIBVER_V18 = 1, H5F_LIBVER_V110 = 2, #[cfg(feature = "1.12.0")] H5F_LIBVER_V112 = 3, #[cfg(feature = "1.13.0")] H5F_LIBVER_V114 = 4, H5F_LIBVER_NBOUNDS, } #[cfg(feature = "1.10.2")] pub const H5F_LIBVER_LATEST: H5F_libver_t = H5F_LIBVER_V110; impl Default for H5F_libver_t { fn default() -> Self { H5F_LIBVER_LATEST } } extern "C" { #[cfg_attr( hdf5_1_10_2, deprecated(note = "deprecated in HDF5 1.10.2, use H5Fset_libver_bounds()") )] pub fn H5Fset_latest_format(file_id: hid_t, latest_format: hbool_t) -> herr_t; pub fn H5Fis_hdf5(filename: *const c_char) -> htri_t; #[cfg(feature = "1.12.0")] pub fn H5Fis_accessible(container_name: *const c_char, fapl_id: hid_t) -> htri_t; pub fn H5Fcreate( filename: *const c_char, flags: c_uint, create_plist: hid_t, access_plist: hid_t, ) -> hid_t; pub fn H5Fopen(filename: *const c_char, flags: c_uint, access_plist: hid_t) -> hid_t; pub fn H5Freopen(file_id: hid_t) -> hid_t; pub fn H5Fflush(object_id: hid_t, scope: H5F_scope_t) -> herr_t; pub fn H5Fclose(file_id: hid_t) -> herr_t; #[cfg(feature = "1.12.0")] pub fn H5Fdelete(filename: *const c_char, fapl_id: hid_t) -> herr_t; pub fn H5Fget_create_plist(file_id: hid_t) -> hid_t; pub fn H5Fget_access_plist(file_id: hid_t) -> hid_t; pub fn H5Fget_intent(file_id: hid_t, intent: *mut c_uint) -> herr_t; #[cfg(feature = "1.12.0")] pub fn H5Fget_fileno(file_id: hid_t, fileno: *mut c_ulong) -> herr_t; pub fn H5Fget_obj_count(file_id: hid_t, types: c_uint) -> ssize_t; pub fn H5Fget_obj_ids( file_id: hid_t, types: c_uint, max_objs: size_t, obj_id_list: *mut hid_t, ) -> ssize_t; pub fn H5Fget_vfd_handle(file_id: hid_t, fapl: hid_t, file_handle: *mut *mut c_void) -> herr_t; pub fn H5Fmount(loc: hid_t, name: *const c_char, child: hid_t, plist: hid_t) -> herr_t; pub fn H5Funmount(loc: hid_t, name: *const c_char) -> herr_t; pub fn H5Fget_freespace(file_id: hid_t) -> hssize_t; pub fn H5Fget_filesize(file_id: hid_t, size: *mut hsize_t) -> herr_t; pub fn H5Fget_mdc_config(file_id: hid_t, config_ptr: *mut H5AC_cache_config_t) -> herr_t; #[cfg(not(feature = "1.13.0"))] pub fn H5Fset_mdc_config(file_id: hid_t, config_ptr: *mut H5AC_cache_config_t) -> herr_t; #[cfg(feature = "1.13.0")] pub fn H5Fset_mdc_config(file_id: hid_t, config_ptr: *const H5AC_cache_config_t) -> herr_t; pub fn H5Fget_mdc_hit_rate(file_id: hid_t, hit_rate_ptr: *mut c_double) -> herr_t; pub fn H5Fget_mdc_size( file_id: hid_t, max_size_ptr: *mut size_t, min_clean_size_ptr: *mut size_t, cur_size_ptr: *mut size_t, cur_num_entries_ptr: *mut c_int, ) -> herr_t; pub fn H5Freset_mdc_hit_rate_stats(file_id: hid_t) -> herr_t; pub fn H5Fget_name(obj_id: hid_t, name: *mut c_char, size: size_t) -> ssize_t; } #[cfg(feature = "1.8.7")] extern "C" { pub fn H5Fclear_elink_file_cache(file_id: hid_t) -> herr_t; } #[cfg(feature = "1.8.9")] extern "C" { pub fn H5Fget_file_image(file_id: hid_t, buf_ptr: *mut c_void, buf_len: size_t) -> ssize_t; } #[cfg(all(feature = "1.8.9", feature = "have-parallel"))] extern "C" { pub fn H5Fset_mpi_atomicity(file_id: hid_t, flag: hbool_t) -> herr_t; pub fn H5Fget_mpi_atomicity(file_id: hid_t, flag: *mut hbool_t) -> herr_t; } #[cfg(feature = "1.10.0")] mod hdf5_1_10_0 { use super::*; pub const H5F_ACC_SWMR_WRITE: c_uint = 0x0020; pub const H5F_ACC_SWMR_READ: c_uint = 0x0040; #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct H5F_retry_info_t { pub nbins: c_uint, pub retries: [*mut u32; 21usize], } impl Default for H5F_retry_info_t { fn default() -> Self { unsafe { mem::zeroed() } } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct H5F_sect_info_t { pub addr: haddr_t, pub size: hsize_t, } impl Default for H5F_sect_info_t { fn default() -> Self {
random
[ { "content": "/// Convert a `String` or a `&str` into a zero-terminated string (`const char *`).\n\npub fn to_cstring<S: Borrow<str>>(string: S) -> Result<CString> {\n\n let string = string.borrow();\n\n #[allow(clippy::map_err_ignore)]\n\n CString::new(string).map_err(|_| format!(\"null byte in string...
Rust
src/platform_impl/linux/x11/util/hint.rs
mahkoh/winit
62e917bdf928b0a9d45e5ebb7f0aff9b10994a5c
use std::sync::Arc; use super::*; use std::convert::TryInto; use thiserror::Error; use xcb_dl_util::hint::{XcbHints, XcbHintsError, XcbSizeHints, XcbSizeHintsError}; use xcb_dl_util::property::XcbGetPropertyError; #[derive(Debug)] #[allow(dead_code)] pub enum StateOperation { Remove = 0, Add = 1, Toggle = 2, } impl From<bool> for StateOperation { fn from(op: bool) -> Self { if op { StateOperation::Add } else { StateOperation::Remove } } } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub enum WindowType { Desktop, Dock, Toolbar, Menu, Utility, Splash, Dialog, DropdownMenu, PopupMenu, Tooltip, Notification, Combo, Dnd, Normal, } impl Default for WindowType { fn default() -> Self { WindowType::Normal } } impl WindowType { pub(crate) fn as_atom(&self, xconn: &Arc<XConnection>) -> ffi::xcb_atom_t { use self::WindowType::*; let atom_name: &str = match *self { Desktop => "_NET_WM_WINDOW_TYPE_DESKTOP", Dock => "_NET_WM_WINDOW_TYPE_DOCK", Toolbar => "_NET_WM_WINDOW_TYPE_TOOLBAR", Menu => "_NET_WM_WINDOW_TYPE_MENU", Utility => "_NET_WM_WINDOW_TYPE_UTILITY", Splash => "_NET_WM_WINDOW_TYPE_SPLASH", Dialog => "_NET_WM_WINDOW_TYPE_DIALOG", DropdownMenu => "_NET_WM_WINDOW_TYPE_DROPDOWN_MENU", PopupMenu => "_NET_WM_WINDOW_TYPE_POPUP_MENU", Tooltip => "_NET_WM_WINDOW_TYPE_TOOLTIP", Notification => "_NET_WM_WINDOW_TYPE_NOTIFICATION", Combo => "_NET_WM_WINDOW_TYPE_COMBO", Dnd => "_NET_WM_WINDOW_TYPE_DND", Normal => "_NET_WM_WINDOW_TYPE_NORMAL", }; xconn.get_atom(atom_name) } } pub struct MotifHints { hints: MwmHints, } struct MwmHints { flags: u32, functions: u32, decorations: u32, input_mode: u32, status: u32, } #[allow(dead_code)] mod mwm { pub const MWM_HINTS_FUNCTIONS: u32 = 1 << 0; pub const MWM_HINTS_DECORATIONS: u32 = 1 << 1; pub const MWM_FUNC_ALL: u32 = 1 << 0; pub const MWM_FUNC_RESIZE: u32 = 1 << 1; pub const MWM_FUNC_MOVE: u32 = 1 << 2; pub const MWM_FUNC_MINIMIZE: u32 = 1 << 3; pub const MWM_FUNC_MAXIMIZE: u32 = 1 << 4; pub const MWM_FUNC_CLOSE: u32 = 1 << 5; } impl MotifHints { pub fn new() -> MotifHints { MotifHints { hints: MwmHints { flags: 0, functions: 0, decorations: 0, input_mode: 0, status: 0, }, } } pub fn set_decorations(&mut self, decorations: bool) { self.hints.flags |= mwm::MWM_HINTS_DECORATIONS; self.hints.decorations = decorations as u32; } pub fn set_maximizable(&mut self, maximizable: bool) { if maximizable { self.add_func(mwm::MWM_FUNC_MAXIMIZE); } else { self.remove_func(mwm::MWM_FUNC_MAXIMIZE); } } fn add_func(&mut self, func: u32) { if self.hints.flags & mwm::MWM_HINTS_FUNCTIONS != 0 { if self.hints.functions & mwm::MWM_FUNC_ALL != 0 { self.hints.functions &= !func; } else { self.hints.functions |= func; } } } fn remove_func(&mut self, func: u32) { if self.hints.flags & mwm::MWM_HINTS_FUNCTIONS == 0 { self.hints.flags |= mwm::MWM_HINTS_FUNCTIONS; self.hints.functions = mwm::MWM_FUNC_ALL; } if self.hints.functions & mwm::MWM_FUNC_ALL != 0 { self.hints.functions |= func; } else { self.hints.functions &= !func; } } } impl MwmHints { fn as_array(&self) -> [u32; 5] { [ self.flags, self.functions, self.decorations, self.input_mode, self.status, ] } } #[derive(Debug, Error)] pub enum HintsError { #[error("Could not convert the property contents to XcbHints: {0}")] Contents(#[from] XcbHintsError), #[error("Could not convert the property contents to XcbSizeHints: {0}")] SizeContents(#[from] XcbSizeHintsError), #[error("Could not retrieve the property: {0}")] Property(#[from] XcbGetPropertyError), #[error("An xcb error occurred: {0}")] Xcb(#[from] XcbError), } impl XConnection { pub fn get_wm_hints(&self, window: ffi::xcb_window_t) -> Result<XcbHints, HintsError> { let prop = self.get_property::<u32>(window, ffi::XCB_ATOM_WM_HINTS, ffi::XCB_ATOM_WM_HINTS); let bytes = match prop { Ok(b) => b, Err(XcbGetPropertyError::Unset) => return Ok(XcbHints::default()), Err(e) => return Err(e.into()), }; Ok((&*bytes).try_into()?) } pub fn set_wm_hints(&self, window: ffi::xcb_window_t, wm_hints: XcbHints) -> XcbPendingCommand { self.change_property( window, ffi::XCB_ATOM_WM_HINTS, ffi::XCB_ATOM_WM_HINTS, PropMode::Replace, wm_hints.as_bytes(), ) } pub fn get_normal_hints(&self, window: ffi::xcb_window_t) -> Result<XcbSizeHints, HintsError> { let bytes = self.get_property::<u32>( window, ffi::XCB_ATOM_WM_NORMAL_HINTS, ffi::XCB_ATOM_WM_SIZE_HINTS, )?; Ok((&*bytes).try_into()?) } pub fn set_normal_hints( &self, window: ffi::xcb_window_t, normal_hints: XcbSizeHints, ) -> XcbPendingCommand { self.change_property( window, ffi::XCB_ATOM_WM_NORMAL_HINTS, ffi::XCB_ATOM_WM_SIZE_HINTS, PropMode::Replace, normal_hints.as_bytes(), ) } pub fn get_motif_hints(&self, window: ffi::xcb_window_t) -> MotifHints { let motif_hints = self.get_atom("_MOTIF_WM_HINTS"); let mut hints = MotifHints::new(); if let Ok(props) = self.get_property::<u32>(window, motif_hints, motif_hints) { hints.hints.flags = props.get(0).cloned().unwrap_or(0); hints.hints.functions = props.get(1).cloned().unwrap_or(0); hints.hints.decorations = props.get(2).cloned().unwrap_or(0); hints.hints.input_mode = props.get(3).cloned().unwrap_or(0); hints.hints.status = props.get(4).cloned().unwrap_or(0); } hints } pub fn set_motif_hints( &self, window: ffi::xcb_window_t, hints: &MotifHints, ) -> XcbPendingCommand { let motif_hints = self.get_atom("_MOTIF_WM_HINTS"); self.change_property( window, motif_hints, motif_hints, PropMode::Replace, &hints.hints.as_array(), ) .into() } }
use std::sync::Arc; use super::*; use std::convert::TryInto; use thiserror::Error; use xcb_dl_util::hint::{XcbHints, XcbHintsError, XcbSizeHints, XcbSizeHintsError}; use xcb_dl_util::property::XcbGetPropertyError; #[derive(Debug)] #[allow(dead_code)] pub enum StateOperation { Remove = 0, Add = 1, Toggle = 2, } impl From<bool> for StateOperation { fn from(op: bool) -> Self { if op { StateOperation::Add } else { StateOperation::Remove } } } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub enum WindowType { Desktop, Dock, Toolbar, Menu, Utility, Splash, Dialog, DropdownMenu, PopupMenu, Tooltip, Notification, Combo, Dnd, Normal, } impl Default for WindowType { fn default() -> Self { WindowType::Normal } } impl WindowType { pub(crate) fn as_atom(&self, xconn: &Arc<XConnection>) -> ffi::xcb_atom_t { use self::WindowType::*; let atom_name: &str = match *self { Desktop => "_NET_WM_WINDOW_TYPE_DESKTOP", Dock => "_NET_WM_WINDOW_TYPE_DOCK", Toolbar => "_NET_WM_WINDOW_TYPE_TOOLBAR", Menu => "_NET_WM_WINDOW_TYPE_MENU", Utility => "_NET_WM_WINDOW_TYPE_UTILITY", Splash => "_NET_WM_WINDOW_TYPE_SPLASH", Dialog => "_NET_WM_WINDOW_TYPE_DIALOG", DropdownMenu => "_NET_WM_WINDOW_TYPE_DROPDOWN_MENU", PopupMenu => "_NET_WM_WINDOW_TYPE_POPUP_MENU", Tooltip => "_NET_WM_WINDOW_TYPE_TOOLTIP", Notification => "_NET_WM_WINDOW_TYPE_NOTIFICATION", Combo => "_NET_WM_WINDOW_TYPE_COMBO", Dnd => "_NET_WM_WINDOW_TYPE_DND", Normal => "_NET_WM_WINDOW_TYPE_NORMAL", }; xconn.get_atom(atom_name) } } pub struct MotifHints { hints: MwmHints, } struct MwmHints { flags: u32, functions: u32, decorations: u32, input_mode: u32, status: u32, } #[allow(dead_code)] mod mwm { pub const MWM_HINTS_FUNCTIONS: u32 = 1 << 0; pub const MWM_HINTS_DECORATIONS: u32 = 1 << 1; pub const MWM_FUNC_ALL: u32 = 1 << 0; pub const MWM_FUNC_RESIZE: u32 = 1 << 1; pub const MWM_FUNC_MOVE: u32 = 1 << 2; pub const MWM_FUNC_MINIMIZE: u32 = 1 << 3; pub const MWM_FUNC_MAXIMIZE: u32 = 1 << 4; pub const MWM_FUNC_CLOSE: u32 = 1 << 5; } impl MotifHints { pub fn new() -> MotifHints { MotifHints { hints: MwmHints { flags: 0, functions: 0, decorations: 0, input_mode: 0, status: 0, }, } } pub fn set_decorations(&mut self, decorations: bool) { self.hints.flags |= mwm::MWM_HINTS_DECORATIONS; self.hints.decorations = decorations as u32; } pub fn set_maximizable(&mut self, maximizable: bool) { if maximizable { self.add_func(mwm::MWM_FUNC_MAXIMIZE); } else { self.remove_func(mwm::MWM_FUNC_MAXIMIZE); } } fn add_func(&mut self, func: u32) { if self.hints.flags & mwm::MWM_HINTS_FUNCTIONS != 0 { if self.hints.functions & mwm::MWM_FUNC_ALL != 0 { self.hints.functions &= !func; } else { self.hints.functions |= func; } } } fn remove_func(&mut self, func: u32) { if self.hints.flags & mwm::MWM_HINTS_FUNCTIONS == 0 { self.hints.flags |= mwm::MWM_HINTS_FUNCTIONS; self.hints.functions = mwm::MWM_FUNC_ALL; } if self.hints.functions & mwm::MWM_FUNC_ALL != 0 { self.hints.functions |= func; } else { self.hints.functions &= !func; } } } impl MwmHints { fn as_array(&self) -> [u32; 5] { [ self.flags, self.functions, self.decorations, self.input_mode, self.status, ] } } #[derive(Debug, Error)] pub enum HintsError { #[error("Could not convert the property contents to XcbHints: {0}")] Contents(#[from] XcbHintsError), #[error("Could not convert the property contents to XcbSizeHints: {0}")] SizeContents(#[from] XcbSizeHintsError), #[error("Could not retrieve the property: {0}")] Property(#[from] XcbGetPropertyError), #[error("An xcb error occurred: {0}")] Xcb(#[from] XcbError), } impl XConnection { pub fn get_wm_hints(&self, window: ffi::xcb_window_t) -> Result<XcbHints, HintsError> { let prop = self.get_property::<u32>(window, ffi::XCB_ATOM_WM_HINTS, ffi::XCB_ATOM_WM_HINTS); let bytes = match prop { Ok(b) => b, Err(XcbGetPropertyError::Unset) => return Ok(XcbHints::default()), Err(e) => return Err(e.into()), }; Ok((&*bytes).try_into()?) } pub fn set_wm_hints(&self, window: ffi::xcb_window_t, wm_hints: XcbHints) -> XcbPendingCommand { self.change_property( window, ffi::XCB_ATOM_WM_HINTS, ffi::XCB_ATOM_WM_HINTS, PropMode::Replace, wm_hints.as_bytes(), ) } pub fn get_normal_hints(&self, window: ffi::xcb_window_t) -> Result<XcbSizeHints, HintsError> { let bytes = self.get_property::<u32>( window, ffi::XCB_ATOM_WM_NORMAL_HINTS, ffi::XCB_ATOM_WM_SIZE_HINTS, )?; Ok((&*bytes).try_into()?) } pub fn set_normal_hints( &self, window: ffi::xcb_window_t, normal_hints: XcbSizeHints, ) -> XcbPendingCommand { self.change_property( window, ffi::XCB_ATOM_WM_NORMAL_HINT
pub fn get_motif_hints(&self, window: ffi::xcb_window_t) -> MotifHints { let motif_hints = self.get_atom("_MOTIF_WM_HINTS"); let mut hints = MotifHints::new(); if let Ok(props) = self.get_property::<u32>(window, motif_hints, motif_hints) { hints.hints.flags = props.get(0).cloned().unwrap_or(0); hints.hints.functions = props.get(1).cloned().unwrap_or(0); hints.hints.decorations = props.get(2).cloned().unwrap_or(0); hints.hints.input_mode = props.get(3).cloned().unwrap_or(0); hints.hints.status = props.get(4).cloned().unwrap_or(0); } hints } pub fn set_motif_hints( &self, window: ffi::xcb_window_t, hints: &MotifHints, ) -> XcbPendingCommand { let motif_hints = self.get_atom("_MOTIF_WM_HINTS"); self.change_property( window, motif_hints, motif_hints, PropMode::Replace, &hints.hints.as_array(), ) .into() } }
S, ffi::XCB_ATOM_WM_SIZE_HINTS, PropMode::Replace, normal_hints.as_bytes(), ) }
function_block-function_prefixed
[ { "content": "pub fn has_flag<T>(bitset: T, flag: T) -> bool\n\nwhere\n\n T: Copy + PartialEq + BitAnd<T, Output = T>,\n\n{\n\n bitset & flag == flag\n\n}\n\n\n", "file_path": "src/platform_impl/windows/util.rs", "rank": 0, "score": 377559.9592201411 }, { "content": "pub fn has_flag<T>...
Rust
src/main.rs
uuhan/sonnerie
7ddf1c2c32e50afc6760b60ca0dd382b6c04e887
use sonnerie::formatted; use std::path::Path; use std::fs::File; use sonnerie::*; use std::io::Write; fn main() -> std::io::Result<()> { use clap::{SubCommand,Arg}; let matches = clap::App::new("sonnerie") .version("0.5.8") .author("Charles Samuels <kalle@eventures.vc>") .about("A compressed timeseries database") .arg(Arg::with_name("dir") .long("dir") .short("d") .help("store data here in this directory. Create a \"main\" file here first.") .required(true) .takes_value(true) ) .subcommand( SubCommand::with_name("add") .about("adds records") .arg(Arg::with_name("format") .short("f") .long("format") .takes_value(true) .required(true) ) .arg(Arg::with_name("timestamp-format") .long("timestamp-format") .help("instead of nanoseconds since the epoch, use this strftime format") .takes_value(true) ) .arg(Arg::with_name("unsafe-nocheck") .long("unsafe-nocheck") .help("suppress the format coherency check (makes insertions faster)") ) ) .subcommand( SubCommand::with_name("compact") .about("merge transactions") .arg(Arg::with_name("major") .short("M") .long("major") .help("compact everything into a new main database") ) .arg(Arg::with_name("gegnum") .long("gegnum") .help("Run this command, writing compacted data as if by \"read\" \ into the process's stdin, and reading its stdout as if by \"add\". \ This is useful for removing or modifying data. \ It is recommended to backup the database first \ (or make hard links of the files). You probably want to \ use this with --major to get the entire database.") .takes_value(true) ) .arg(Arg::with_name("timestamp-format") .long("timestamp-format") .help("with --gegnum, instead of nanoseconds since the epoch, use this strftime format") .takes_value(true) .requires("gegnum") .takes_value(true) ) .arg(Arg::with_name("unsafe-nocheck") .long("unsafe-nocheck") .help("suppress the format coherency check (makes insertions faster)") .requires("gegnum") ) ) .subcommand( SubCommand::with_name("read") .about("reads records") .arg(Arg::with_name("filter") .help("select the keys to print out, \"%\" is the wildcard") .takes_value(true) .required_unless_one(&["before", "after"]) ) .arg(Arg::with_name("print-format") .long("print-format") .help("Output the line format after the timestamp for each record") ) .arg(Arg::with_name("timestamp-format") .long("timestamp-format") .help("instead of \"%F %T\", use this strftime format") .takes_value(true) ) .arg(Arg::with_name("timestamp-nanos") .long("timestamp-nanos") .help("Print timestamps as nanoseconds since the unix epoch") .conflicts_with("timestamp-format") ) .arg(Arg::with_name("timestamp-seconds") .long("timestamp-seconds") .help("Print timestamps as seconds since the unix epoch (rounded down if necessary)") .conflicts_with("timestamp-format") .conflicts_with("timestamp-nanos") ) .arg(Arg::with_name("before") .long("before") .help("read values before (but not including) this key") .takes_value(true) .conflicts_with("filter") ) .arg(Arg::with_name("after") .long("after") .help("read values after (and including) this key") .takes_value(true) .conflicts_with("filter") ) ) .get_matches(); let dir = matches.value_of_os("dir").expect("--dir"); let dir = std::path::Path::new(dir); if let Some(matches) = matches.subcommand_matches("add") { let format = matches.value_of("format").unwrap(); let nocheck = matches.is_present("unsafe-nocheck"); let ts_format = matches.value_of("timestamp-format"); add(&dir, format, ts_format, nocheck); } else if let Some(matches) = matches.subcommand_matches("compact") { let gegnum = matches.value_of_os("gegnum"); let ts_format = matches.value_of("timestamp-format").unwrap_or("%FT%T"); let nocheck = matches.is_present("unsafe-nocheck"); compact( &dir, matches.is_present("major"), gegnum, ts_format, nocheck, ).expect("compacting"); } else if let Some(matches) = matches.subcommand_matches("read") { let print_format = matches.is_present("print-format"); let timestamp_format = matches.value_of("timestamp-format") .unwrap_or("%F %T"); let timestamp_nanos = matches.is_present("timestamp-nanos"); let timestamp_seconds = matches.is_present("timestamp-seconds"); let after = matches.value_of("after"); let before = matches.value_of("before"); let filter = matches.value_of("filter"); let stdout = std::io::stdout(); let mut stdout = std::io::BufWriter::new(stdout.lock()); let db = DatabaseReader::new(dir)?; let print_record_format = if print_format { formatted::PrintRecordFormat::Yes } else { formatted::PrintRecordFormat::No }; let print_timestamp = if timestamp_nanos { formatted::PrintTimestamp::Nanos } else if timestamp_seconds { formatted::PrintTimestamp::Seconds } else { formatted::PrintTimestamp::FormatString(timestamp_format) }; macro_rules! filter { ($filter:expr) => { for record in $filter { formatted::print_record2( &record, &mut stdout, print_timestamp, print_record_format )?; writeln!(&mut stdout, "")?; } }; } match (after, before, filter) { (Some(after), None, None) => filter!(db.get_range(after ..)), (None, Some(before), None) => filter!(db.get_range( .. before)), (Some(after), Some(before), None) => filter!(db.get_range(after .. before)), (None, None, Some(filter)) => filter!(db.get_filter(&Wildcard::new(filter))), _ => unreachable!(), } } else { eprintln!("A command must be specified (read, add, compact)"); std::process::exit(1); } Ok(()) } fn add(dir: &Path, fmt: &str, ts_format: Option<&str>, nocheck: bool) { let db = DatabaseReader::new(dir).expect("opening db"); let mut tx = CreateTx::new(dir).expect("creating tx"); let stdin = std::io::stdin(); let mut stdin = stdin.lock(); formatted::add_from_stream(&mut tx, &db, fmt, &mut stdin, ts_format, nocheck) .expect("adding value"); tx.commit().expect("failed to commit transaction"); } fn compact( dir: &Path, major: bool, gegnum: Option<&std::ffi::OsStr>, ts_format: &str, nocheck: bool) -> Result<(), crate::WriteFailure> { use fs2::FileExt; let lock = File::create(dir.join(".compact"))?; lock.lock_exclusive()?; let db; if major { db = DatabaseReader::new(dir)?; } else { db = DatabaseReader::without_main_db(dir)?; } let db = std::sync::Arc::new(db); let mut compacted = CreateTx::new(dir)?; if let Some(gegnum) = gegnum { let mut child = std::process::Command::new("/bin/sh") .arg("-c") .arg(gegnum) .stdin(std::process::Stdio::piped()) .stdout(std::process::Stdio::piped()) .spawn() .expect("unable to run --gegnum process"); let childinput = child.stdin.take().expect("process had no stdin"); let mut childinput = std::io::BufWriter::new(childinput); let ts_format_copy = ts_format.to_owned(); let reader_db = db.clone(); let reader_thread = std::thread::spawn( move || -> std::io::Result<()> { let timestamp_format = formatted::PrintTimestamp::FormatString(&ts_format_copy); let reader = reader_db.get_range(..); for record in reader { formatted::print_record2( &record, &mut childinput, timestamp_format, formatted::PrintRecordFormat::Yes, )?; writeln!(&mut childinput, "")?; } Ok(()) } ); let childoutput = child.stdout.take().expect("process had no stdout"); let mut childoutput = std::io::BufReader::new(childoutput); formatted::add_from_stream_with_fmt( &mut compacted, &db, &mut childoutput, Some(ts_format), nocheck, )?; reader_thread.join() .expect("failed to join subprocess writing thread") .expect("child writer failed"); let result = child.wait()?; if !result.success() { panic!("child process failed: cancelling compact"); } } else { { let ps = db.transaction_paths(); if ps.len() == 1 && ps[0].file_name().expect("filename") == "main" { eprintln!("nothing to do"); return Ok(()); } } let reader = db.get_range(..); let mut n=0u64; for record in reader { compacted.add_record( record.key(), record.format(), record.value(), )?; n+= 1; } eprintln!("compacted {} records", n); } if major { compacted.commit_to(&dir.join("main")) .expect("failed to replace main database"); } else { compacted.commit() .expect("failed to commit compacted database"); } for txfile in db.transaction_paths() { if txfile.file_name().expect("filename in txfile") == "main" { continue; } if let Err(e) = std::fs::remove_file(&txfile) { eprintln!("warning: failed to remove {:?}: {}", txfile, e); } } Ok(()) }
use sonnerie::formatted; use std::path::Path; use std::fs::File; use sonnerie::*; use std::io::Write; fn main() -> std::io::Result<()> { use clap::{SubCommand,Arg}; let matches = clap::App::new("sonnerie") .version("0.5.8") .author("Charles Samuels <kalle@eventures.vc>") .about("A compressed timeseries database") .arg(Arg::with_name("dir") .long("dir") .short("d") .help("store data here in this directory. Create a \"main\" file here first.") .required(true) .takes_value(true) ) .subcommand( SubCommand::with_name("add") .about("adds records") .arg(Arg::with_name("format") .short("f") .long("format") .takes_value(true) .required(true) ) .arg(Arg::with_name("timestamp-format") .long("timestamp-format") .help("instead of nanoseconds since the epoch, use this strftime format") .takes_value(true) ) .arg(Arg::with_name("unsafe-nocheck") .long("unsafe-nocheck") .help("suppress the format coherency check (makes insertions faster)") ) ) .subcommand( SubCommand::with_name("compact") .about("merge transactions") .arg(Arg::with_name("major") .short("M") .long("major") .help("compact everything into a new main database") ) .arg(Arg::with_name("gegnum") .long("gegnum") .help("Run this command, writing compacted data as if by \"read\" \ into the process's stdin, and reading its stdout as if by \"add\". \ This is useful for removing or modifying data. \ It is recommended to backup the database first \ (or make hard links of the files). You probably want to \ use this with --major to get the entire database.") .takes_value(true) ) .arg(Arg::with_name("timestamp-format") .long("timestamp-format") .help("with --gegnum, instead of nanoseconds since the epoch, use this strftime format") .takes_value(true) .requires("gegnum") .takes_value(true) ) .arg(Arg::with_name("unsafe-nocheck") .long("unsafe-nocheck") .help("suppress the format coherency check (makes insertions faster)") .requires("gegnum") ) ) .subcommand( SubCommand::with_name("read") .about("reads records") .arg(Arg::with_name("filter") .help("select the keys to print out, \"%\" is the wildcard") .takes_value(true) .required_unless_one(&["before", "after"]) ) .arg(Arg::with_name("print-format") .long("print-format") .help("Output the line format after the timestamp for each record") ) .arg(Arg::with_name("timestamp-format") .long("timestamp-format") .help("instead of \"%F %T\", use this strftime format") .takes_value(true) ) .arg(Arg::with_name("timestamp-nanos") .long("timestamp-nanos") .help("Print timestamps as nanoseconds since the unix epoch") .conflicts_with("timestamp-format") ) .arg(Arg::with_name("timestamp-seconds") .long("timestamp-seconds") .help("Pr
check = matches.is_present("unsafe-nocheck"); let ts_format = matches.value_of("timestamp-format"); add(&dir, format, ts_format, nocheck); } else if let Some(matches) = matches.subcommand_matches("compact") { let gegnum = matches.value_of_os("gegnum"); let ts_format = matches.value_of("timestamp-format").unwrap_or("%FT%T"); let nocheck = matches.is_present("unsafe-nocheck"); compact( &dir, matches.is_present("major"), gegnum, ts_format, nocheck, ).expect("compacting"); } else if let Some(matches) = matches.subcommand_matches("read") { let print_format = matches.is_present("print-format"); let timestamp_format = matches.value_of("timestamp-format") .unwrap_or("%F %T"); let timestamp_nanos = matches.is_present("timestamp-nanos"); let timestamp_seconds = matches.is_present("timestamp-seconds"); let after = matches.value_of("after"); let before = matches.value_of("before"); let filter = matches.value_of("filter"); let stdout = std::io::stdout(); let mut stdout = std::io::BufWriter::new(stdout.lock()); let db = DatabaseReader::new(dir)?; let print_record_format = if print_format { formatted::PrintRecordFormat::Yes } else { formatted::PrintRecordFormat::No }; let print_timestamp = if timestamp_nanos { formatted::PrintTimestamp::Nanos } else if timestamp_seconds { formatted::PrintTimestamp::Seconds } else { formatted::PrintTimestamp::FormatString(timestamp_format) }; macro_rules! filter { ($filter:expr) => { for record in $filter { formatted::print_record2( &record, &mut stdout, print_timestamp, print_record_format )?; writeln!(&mut stdout, "")?; } }; } match (after, before, filter) { (Some(after), None, None) => filter!(db.get_range(after ..)), (None, Some(before), None) => filter!(db.get_range( .. before)), (Some(after), Some(before), None) => filter!(db.get_range(after .. before)), (None, None, Some(filter)) => filter!(db.get_filter(&Wildcard::new(filter))), _ => unreachable!(), } } else { eprintln!("A command must be specified (read, add, compact)"); std::process::exit(1); } Ok(()) } fn add(dir: &Path, fmt: &str, ts_format: Option<&str>, nocheck: bool) { let db = DatabaseReader::new(dir).expect("opening db"); let mut tx = CreateTx::new(dir).expect("creating tx"); let stdin = std::io::stdin(); let mut stdin = stdin.lock(); formatted::add_from_stream(&mut tx, &db, fmt, &mut stdin, ts_format, nocheck) .expect("adding value"); tx.commit().expect("failed to commit transaction"); } fn compact( dir: &Path, major: bool, gegnum: Option<&std::ffi::OsStr>, ts_format: &str, nocheck: bool) -> Result<(), crate::WriteFailure> { use fs2::FileExt; let lock = File::create(dir.join(".compact"))?; lock.lock_exclusive()?; let db; if major { db = DatabaseReader::new(dir)?; } else { db = DatabaseReader::without_main_db(dir)?; } let db = std::sync::Arc::new(db); let mut compacted = CreateTx::new(dir)?; if let Some(gegnum) = gegnum { let mut child = std::process::Command::new("/bin/sh") .arg("-c") .arg(gegnum) .stdin(std::process::Stdio::piped()) .stdout(std::process::Stdio::piped()) .spawn() .expect("unable to run --gegnum process"); let childinput = child.stdin.take().expect("process had no stdin"); let mut childinput = std::io::BufWriter::new(childinput); let ts_format_copy = ts_format.to_owned(); let reader_db = db.clone(); let reader_thread = std::thread::spawn( move || -> std::io::Result<()> { let timestamp_format = formatted::PrintTimestamp::FormatString(&ts_format_copy); let reader = reader_db.get_range(..); for record in reader { formatted::print_record2( &record, &mut childinput, timestamp_format, formatted::PrintRecordFormat::Yes, )?; writeln!(&mut childinput, "")?; } Ok(()) } ); let childoutput = child.stdout.take().expect("process had no stdout"); let mut childoutput = std::io::BufReader::new(childoutput); formatted::add_from_stream_with_fmt( &mut compacted, &db, &mut childoutput, Some(ts_format), nocheck, )?; reader_thread.join() .expect("failed to join subprocess writing thread") .expect("child writer failed"); let result = child.wait()?; if !result.success() { panic!("child process failed: cancelling compact"); } } else { { let ps = db.transaction_paths(); if ps.len() == 1 && ps[0].file_name().expect("filename") == "main" { eprintln!("nothing to do"); return Ok(()); } } let reader = db.get_range(..); let mut n=0u64; for record in reader { compacted.add_record( record.key(), record.format(), record.value(), )?; n+= 1; } eprintln!("compacted {} records", n); } if major { compacted.commit_to(&dir.join("main")) .expect("failed to replace main database"); } else { compacted.commit() .expect("failed to commit compacted database"); } for txfile in db.transaction_paths() { if txfile.file_name().expect("filename in txfile") == "main" { continue; } if let Err(e) = std::fs::remove_file(&txfile) { eprintln!("warning: failed to remove {:?}: {}", txfile, e); } } Ok(()) }
int timestamps as seconds since the unix epoch (rounded down if necessary)") .conflicts_with("timestamp-format") .conflicts_with("timestamp-nanos") ) .arg(Arg::with_name("before") .long("before") .help("read values before (but not including) this key") .takes_value(true) .conflicts_with("filter") ) .arg(Arg::with_name("after") .long("after") .help("read values after (and including) this key") .takes_value(true) .conflicts_with("filter") ) ) .get_matches(); let dir = matches.value_of_os("dir").expect("--dir"); let dir = std::path::Path::new(dir); if let Some(matches) = matches.subcommand_matches("add") { let format = matches.value_of("format").unwrap(); let no
random
[ { "content": "#[deprecated]\n\npub fn print_record<W: std::io::Write>(\n\n\trecord: &crate::record::OwnedRecord,\n\n\tout: &mut W,\n\n) -> std::io::Result<()>\n\n{\n\n\tlet fmt = parse_row_format(record.format());\n\n\tlet key = record.key();\n\n\tlet ts = &record.value()[0..8];\n\n\tlet value = &record.value()...
Rust
yewdux/src/dispatch.rs
Quetzal2/yewdux
2329d74ef86a75dc6c0b37bcba147febd4327b14
use std::rc::Rc; use std::{cell::RefCell, future::Future}; use yew::{Callback, Properties}; use crate::{ service::{ServiceBridge, ServiceOutput, ServiceRequest, ServiceResponse}, store::Store, }; type Model<T> = <T as Store>::Model; pub trait Dispatcher { type Store: Store; #[doc(hidden)] fn bridge(&self) -> Rc<RefCell<ServiceBridge<Self::Store>>>; fn send(&self, msg: impl Into<<Self::Store as Store>::Input>) { self.bridge().borrow_mut().send_store(msg.into()) } fn callback<E, M>(&self, f: impl Fn(E) -> M + 'static) -> Callback<E> where M: Into<<Self::Store as Store>::Input>, { let bridge = self.bridge(); let f = Rc::new(f); Callback::from(move |e| { let msg = f(e); bridge.borrow_mut().send_store(msg.into()) }) } fn callback_once<E, M>(&self, f: impl Fn(E) -> M + 'static) -> Callback<E> where M: Into<<Self::Store as Store>::Input>, { let bridge = self.bridge(); let f = Rc::new(f); Callback::once(move |e| { let msg = f(e); bridge.borrow_mut().send_store(msg.into()) }) } fn reduce<F, R>(&self, f: F) where F: FnOnce(&mut Model<Self::Store>) -> R + 'static, { self.bridge() .borrow_mut() .send_service(ServiceRequest::Reduce(Box::new(move |state| { f(state); }))) } fn reduce_callback<F, R, E>(&self, f: F) -> Callback<E> where F: Fn(&mut Model<Self::Store>) -> R + 'static, E: 'static, { let bridge = self.bridge(); let f = Rc::new(f); Callback::from(move |_| { bridge.borrow_mut().send_service(ServiceRequest::Reduce({ let f = f.clone(); Box::new(move |state| { f(state); }) })) }) } fn reduce_callback_with<F, R, E>(&self, f: F) -> Callback<E> where F: Fn(&mut Model<Self::Store>, E) -> R + 'static, E: 'static, { let bridge = self.bridge(); let f = Rc::new(f); Callback::from(move |e: E| { let f = f.clone(); bridge.borrow_mut().send_service(ServiceRequest::Reduce({ let f = f.clone(); Box::new(move |state| { f(state, e); }) })) }) } fn reduce_callback_once<F, R, E>(&self, f: F) -> Callback<E> where F: FnOnce(&mut Model<Self::Store>) -> R + 'static, E: 'static, { let bridge = self.bridge(); Callback::once(move |_| { bridge .borrow_mut() .send_service(ServiceRequest::Reduce(Box::new(move |state| { f(state); }))) }) } fn reduce_callback_once_with<F, R, E>(&self, f: F) -> Callback<E> where F: FnOnce(&mut Model<Self::Store>, E) -> R + 'static, E: 'static, { let bridge = self.bridge(); Callback::once(move |e: E| { bridge .borrow_mut() .send_service(ServiceRequest::Reduce(Box::new(move |state| { f(state, e); }))) }) } fn future<F, FU, OUT>(&self, f: F) where F: FnOnce(Self) -> FU + 'static, FU: Future<Output = OUT> + 'static, OUT: 'static, Self: Clone + 'static, { let this = self.clone(); self.bridge() .borrow_mut() .send_service(ServiceRequest::Future(Box::pin(async move { f(this).await; }))) } fn future_callback<F, FU, OUT, E>(&self, f: F) -> Callback<E> where F: Fn(Self) -> FU + 'static, FU: Future<Output = OUT>, OUT: 'static, Self: Clone + 'static, E: 'static, { let this = self.clone(); let bridge = this.bridge(); let f = Rc::new(f); Callback::from(move |_| { let this = this.clone(); bridge .borrow_mut() .send_service(ServiceRequest::Future(Box::pin({ let this = this.clone(); let f = f.clone(); async move { f(this).await; } }))) }) } fn future_callback_with<F, FU, OUT, E>(&self, f: F) -> Callback<E> where F: Fn(Self, E) -> FU + 'static, FU: Future<Output = OUT>, OUT: 'static, Self: Clone + 'static, E: 'static, { let this = self.clone(); let bridge = this.bridge(); let f = Rc::new(f); Callback::from(move |e| { let this = this.clone(); bridge .borrow_mut() .send_service(ServiceRequest::Future(Box::pin({ let this = this.clone(); let f = f.clone(); async move { f(this, e).await; } }))) }) } fn future_callback_once<F, FU, OUT, E>(&self, f: F) -> Callback<E> where F: FnOnce(Self) -> FU + 'static, FU: Future<Output = OUT>, OUT: 'static, Self: Clone + 'static, E: 'static, { let this = self.clone(); let bridge = this.bridge().clone(); Callback::once(move |_| { bridge .borrow_mut() .send_service(ServiceRequest::Future(Box::pin({ async move { f(this).await; } }))) }) } fn future_callback_once_with<F, FU, OUT, E>(&self, f: F) -> Callback<E> where F: FnOnce(Self, E) -> FU + 'static, FU: Future<Output = OUT>, OUT: 'static, Self: Clone + 'static, E: 'static, { let this = self.clone(); let bridge = this.bridge().clone(); Callback::once(move |e| { bridge .borrow_mut() .send_service(ServiceRequest::Future(Box::pin({ async move { f(this, e).await; } }))) }) } } pub struct Dispatch<STORE: Store, SCOPE: 'static = STORE> { pub(crate) bridge: Rc<RefCell<ServiceBridge<STORE, SCOPE>>>, } impl<STORE: Store, SCOPE: 'static> Dispatch<STORE, SCOPE> { pub fn new() -> Self { Self { bridge: Rc::new(RefCell::new(ServiceBridge::new(Callback::noop()))), } } pub fn bridge( on_state: Callback<Rc<STORE::Model>>, on_output: Callback<STORE::Output>, ) -> Self { let cb = Callback::from(move |msg| match msg { ServiceOutput::Store(msg) => on_output.emit(msg), ServiceOutput::Service(msg) => match msg { ServiceResponse::State(state) => on_state.emit(state), }, }); Self { bridge: Rc::new(RefCell::new(ServiceBridge::new(cb))), } } pub fn bridge_state(on_state: Callback<Rc<STORE::Model>>) -> Self { let cb = Callback::from(move |msg| match msg { ServiceOutput::Store(_) => {} ServiceOutput::Service(msg) => match msg { ServiceResponse::State(state) => on_state.emit(state), }, }); Self { bridge: Rc::new(RefCell::new(ServiceBridge::new(cb))), } } } impl<STORE: Store> Dispatcher for Dispatch<STORE> { type Store = STORE; fn bridge(&self) -> Rc<RefCell<ServiceBridge<Self::Store>>> { Rc::clone(&self.bridge) } } impl<STORE: Store, SCOPE: 'static> Clone for Dispatch<STORE, SCOPE> { fn clone(&self) -> Self { Self { bridge: self.bridge.clone(), } } } impl<STORE: Store, SCOPE: 'static> PartialEq for Dispatch<STORE, SCOPE> { fn eq(&self, other: &Self) -> bool { Rc::ptr_eq(&self.bridge, &other.bridge) } } impl<STORE: Store, SCOPE: 'static> std::fmt::Debug for Dispatch<STORE, SCOPE> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Dispatch").finish() } } impl<STORE: Store, SCOPE: 'static> Default for Dispatch<STORE, SCOPE> { fn default() -> Self { Self::new() } } #[derive(Properties)] pub struct DispatchProps<STORE: Store, SCOPE: 'static = STORE> { #[prop_or_default] pub(crate) state: RefCell<Option<Rc<Model<STORE>>>>, #[prop_or_default] pub(crate) dispatch: RefCell<Dispatch<STORE, SCOPE>>, } impl<STORE: Store, SCOPE: 'static> DispatchProps<STORE, SCOPE> { pub(crate) fn new(on_state: Callback<Rc<STORE::Model>>) -> Self { Self { state: Default::default(), dispatch: Dispatch::bridge_state(on_state).into(), } } pub fn state(&self) -> Rc<Model<STORE>> { Rc::clone( &self .state .borrow() .as_ref() .expect("State accessed prematurely. Missing WithDispatch?"), ) } } impl<STORE: Store> Dispatcher for DispatchProps<STORE> { type Store = STORE; fn bridge(&self) -> Rc<RefCell<ServiceBridge<Self::Store>>> { self.dispatch.borrow().bridge() } } impl<STORE: Store> WithDispatchProps for DispatchProps<STORE> { type Store = STORE; fn dispatch(&self) -> &DispatchProps<Self::Store> { self } } impl<STORE: Store, SCOPE: 'static> Default for DispatchProps<STORE, SCOPE> { fn default() -> Self { Self { state: Default::default(), dispatch: Default::default(), } } } impl<STORE: Store, SCOPE: 'static> Clone for DispatchProps<STORE, SCOPE> { fn clone(&self) -> Self { Self { state: self.state.clone(), dispatch: self.dispatch.clone(), } } } impl<STORE: Store, SCOPE: 'static> PartialEq for DispatchProps<STORE, SCOPE> { fn eq(&self, other: &Self) -> bool { self.dispatch == other.dispatch && self .state .borrow() .as_ref() .zip(other.state.borrow().as_ref()) .map(|(a, b)| Rc::ptr_eq(a, b)) .unwrap_or(false) } } impl<STORE: Store, SCOPE: 'static> std::fmt::Debug for DispatchProps<STORE, SCOPE> where STORE::Model: std::fmt::Debug, { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("DispatchProps") .field("state", &self.state) .finish() } } pub trait WithDispatchProps { type Store: Store; fn dispatch(&self) -> &DispatchProps<Self::Store>; } #[cfg(test)] pub mod tests { use crate::prelude::BasicStore; use super::*; #[test] fn dispatch_impl_debug() { #[derive(Debug)] struct Foo { dispatch: Dispatch<BasicStore<()>>, } } #[test] fn dispatch_props_impl_debug() { #[derive(Debug)] struct Foo { dispatch: DispatchProps<BasicStore<()>>, } } }
use std::rc::Rc; use std::{cell::RefCell, future::Future}; use yew::{Callback, Properties}; use crate::{ service::{ServiceBridge, ServiceOutput, ServiceRequest, ServiceResponse}, store::Store, }; type Model<T> = <T as Store>::Model; pub trait Dispatcher { type Store: Store; #[doc(hidden)] fn bridge(&self) -> Rc<RefCell<ServiceBridge<Self::Store>>>; fn send(&self, msg: impl Into<<Self::Store as Store>::Input>) { self.bridge().borrow_mut().send_store(msg.into()) } fn callback<E, M>(&self, f: impl Fn(E) -> M + 'static) -> Callback<E> where M: Into<<Self::Store as Store>::Input>, { let bridge = self.bridge(); let f = Rc::new(f); Callback::from(move |e| { let msg = f(e); bridge.borrow_mut().send_store(msg.into()) }) } fn callback_once<E, M>(&self, f: impl Fn(E) -> M + 'static) -> Callback<E> where M: Into<<Self::Store as Store>::Input>, { let bridge = self.bridge(); let f = Rc::new(f); Callback::once(move |e| { let msg = f(e); bridge.borrow_mut().send_store(msg.into()) }) } fn reduce<F, R>(&self, f: F) where F: FnOnce(&mut Model<Self::Store>) -> R + 'static, { self.bridge() .borrow_mut() .send_service(ServiceRequest::Reduce(Box::new(move |state| { f(state); }))) } fn reduce_callback<F, R, E>(&self, f: F) -> Callback<E> where F: Fn(&mut Model<Self::Store>) -> R + 'static, E: 'static, { let bridge = self.bridge(); let f = Rc::new(f); Callback::from(move |_| { bridge.borrow_mut().send_service(ServiceRequest::Reduce({ let f = f.clone(); Box::new(move |state| { f(state); }) })) }) } fn reduce_callback_with<F, R, E>(&self, f: F) -> Callback<E> where F: Fn(&mut Model<Self::Store>, E) -> R + 'static, E: 'static, { let bridge = self.bridge(); let f = Rc::new(f); Callback::from(move |e: E| { let f = f.clone(); bridge.borrow_mut().send_service(ServiceRequest::Reduce({ let f = f.clone(); Box::new(move |state| { f(state, e); }) })) }) } fn reduce_callback_once<F, R, E>(&self, f: F) -> Callback<E> where F: FnOnce(&mut Model<Self::Store>) -> R + 'static, E: 'static, { let bridge = self.bridge(); Callback::once(move |_| { bridge .borrow_mut() .send_service(ServiceRequest::Reduce(Box::new(move |state| { f(state); }))) }) } fn reduce_callback_once_with<F, R, E>(&self, f: F) -> Callback<E> where F: FnOnce(&mut Model<Self::Store>, E) -> R + 'static, E: 'static, { let bridge = self.bridge(); Callback::once(move |e: E| { bridge .borrow_mut() .send_service(ServiceRequest::Reduce(Box::new(move |state| { f(state, e); }))) }) } fn future<F, FU, OUT>(&self, f: F) where F: FnOnce(Self) -> FU + 'static, FU: Future<Output = OUT> + 'static, OUT: 'static, Self: Clone + 'static, { let this = self.clone(); self.bridge() .borrow_mut() .send_service(ServiceRequest::Future(Box::pin(async move { f(this).await; }))) } fn future_callback<F, FU, OUT, E>(&self, f: F) -> Callback<E> where F: Fn(Self) -> FU + 'static, FU: Future<Output = OUT>, OUT: 'static, Self: Clone + 'static, E: 'static, { let this = self.clone(); let bridge = this.bridge(); let f = Rc::new(f); Callback::from(move |_| { let this = this.clone(); bridge .borrow_mut() .send_service(ServiceRequest::Future(Box::pin({ let this = this.clone(); let f = f.clone(); async move { f(this).await; } }))) }) } fn future_callback_with<F, FU, OUT, E>(&self, f: F) -> Callback<E> where F: Fn(Self, E) -> FU + 'static, FU: Future<Output = OUT>, OUT: 'static, Self: Clone + 'static, E: 'static, { let this = self.clone(); let bridge = this.bridge(); let f = Rc::new(f); Callback::from(move |e| { let this = this.clone(); bridge .borrow_mut() .send_service(ServiceRequest::Future(Box::pin({ let this = this.clone(); let f = f.clone(); async move { f(this, e).await; } }))) }) } fn future_callback_once<F, FU, OUT, E>(&self, f: F) -> Callback<E> where F: FnOnce(Self) -> FU + 'static, FU: Future<Output = OUT>, OUT: 'static, Self: Clone + 'static, E: 'static, { let this = self.clone(); let bridge = this.bridge().clone(); Callback::once(move |_| { bridge .borrow_mut() .send_service(ServiceRequest::Future(Box::pin({ async move { f(this).await; } }))) }) } fn future_callback_once_with<F, FU, OUT, E>(&self, f: F) -> Callback<E> where F: FnOnce(Self, E) -> FU + 'static, FU: Future<Output = OUT>, OUT: 'static, Self: Clone + 'static, E: 'static, { let this = self.clone(); let bridge = this.bridge().clone(); Callback::once(move |e| { bridge .borrow_mut() .send_service(ServiceRequest::Future(Box::pin({ async move { f(this, e).await; } }))) }) } } pub struct Dispatch<STORE: Store, SCOPE: 'static = STORE> { pub(crate) bridge: Rc<RefCell<ServiceBridge<STORE, SCOPE>>>, } impl<STORE: Store, SCOPE: 'static> Dispatch<STORE, SCOPE> { pub fn new() -> Self { Self { bridge: Rc::new(RefCell::new(ServiceBridge::new(Callback::noop()))), } } pub fn bridge( on_state: Callback<Rc<STORE::Model>>, on_output: Callback<STORE::Output>, ) -> Self { let cb = Callback::from(move |msg| match msg { ServiceOutput::Store(msg) => on_output.emit(msg), ServiceOutput::Service(msg) => match msg { ServiceResponse::State(state) => on_state.emit(state), }, }); Self { bridge: Rc::new(RefCell::new(ServiceBridge::new(cb))), } } pub fn bridge_state(on_state: Callback<Rc<STORE::Model>>) -> Self { let cb = Callback::from(move |msg| match msg { ServiceOutput::Store(_) => {} ServiceOutput::Service(msg) => match msg { ServiceResponse::State(state) => on_state.emit(state), }, }); Self { bridge: Rc::new(RefCell::new(ServiceBridge::new(cb))), } } } impl<STORE: Store> Dispatcher for Dispatch<STORE> { type Store = STORE; fn bridge(&self) -> Rc<RefCell<ServiceBridge<Self::Store>>> { Rc::clone(&self.bridge) } } impl<STORE: Store, SCOPE: 'static> Clone for Dispatch<STORE, SCOPE> { fn clone(&self) -> Self { Self { bridge: self.bridge.clone(), } } } impl<STORE: Store, SCOPE: 'static> PartialEq for Dispatch<STORE, SCOPE> { fn eq(&self, other: &Self) -> bool { Rc::ptr_eq(&self.bridge, &other.bridge) } } impl<STORE: Store, SCOPE: 'static> std::fmt::Debug for Dispatch<STORE, SCOPE> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Dispatch").finish() } } impl<STORE: Store, SCOPE: 'static> Default for Dispatch<STORE, SCOPE> { fn default() -> Self { Self::new() } } #[derive(Properties)] pub struct DispatchProps<STORE: Store, SCOPE: 'static = STORE> { #[prop_or_default] pub(crate) state: RefCell<Option<Rc<Model<STORE>>>>, #[prop_or_default] pub(crate) dispatch: RefCell<Dispatch<STORE, SCOPE>>, } impl<STORE: Store, SCOPE: 'static> DispatchProps<STORE, SCOPE> { pub(crate) fn new(on_state: Callback<Rc<STORE::Model>>) -> Self { Self { state: Default::default(), dispatch: Dispatch::bridge_state(on_state).into(), } } pub fn state(&self) -> Rc<Model<STORE>> { Rc::clone( &self .state .borrow() .as_ref() .expect("State accessed prematurely. Missing WithDispatch?"), ) } } impl<STORE: Store> Dispatcher for DispatchProps<STORE> { type Store = STORE; fn bridge(&self) -> Rc<RefCell<ServiceBridge<Self::Store>>> { self.dispatch.borrow().bridge() } } impl<STORE: Store> WithDispatchProps for DispatchProps<STORE> { type Store = STORE; fn dispatch(&self) -> &DispatchProps<Self::Store> { self } } impl<STORE: Store, SCOPE: 'static> Default for DispatchProps<STORE, SCOPE> { fn default() -> Self { Self { state: Default::default(), dispatch: Default::default(), } } } impl<STORE: Store, SCOPE: 'static> Clone for DispatchProps<STORE, SCOPE> {
} impl<STORE: Store, SCOPE: 'static> PartialEq for DispatchProps<STORE, SCOPE> { fn eq(&self, other: &Self) -> bool { self.dispatch == other.dispatch && self .state .borrow() .as_ref() .zip(other.state.borrow().as_ref()) .map(|(a, b)| Rc::ptr_eq(a, b)) .unwrap_or(false) } } impl<STORE: Store, SCOPE: 'static> std::fmt::Debug for DispatchProps<STORE, SCOPE> where STORE::Model: std::fmt::Debug, { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("DispatchProps") .field("state", &self.state) .finish() } } pub trait WithDispatchProps { type Store: Store; fn dispatch(&self) -> &DispatchProps<Self::Store>; } #[cfg(test)] pub mod tests { use crate::prelude::BasicStore; use super::*; #[test] fn dispatch_impl_debug() { #[derive(Debug)] struct Foo { dispatch: Dispatch<BasicStore<()>>, } } #[test] fn dispatch_props_impl_debug() { #[derive(Debug)] struct Foo { dispatch: DispatchProps<BasicStore<()>>, } } }
fn clone(&self) -> Self { Self { state: self.state.clone(), dispatch: self.dispatch.clone(), } }
function_block-full_function
[ { "content": "/// This hook allows getting a [`Dispatch`] to the store.\n\n///\n\n/// Do not use the `state` method on the [`Dispatch`]. The dispatch should only be used to create callbacks.\n\n/// The proper way to access the state is via the [`use_store`] hook.\n\n///\n\n/// # Example\n\n/// ```ignore\n\n/// ...
Rust
src/generation/city/landmass_shape.rs
ScottyThePilot/glt-mc-generator
1666b9ca0035030cec34dc8a02b2a249c10870d1
use std::collections::VecDeque; use glam::{DVec2, IVec2, Vec2}; use grid::SparseGrid; use noise::{Fbm, MultiFractal, NoiseFn, Perlin}; use rand::Rng; use crate::utility::{cardinal4, cardinal8}; const MIN_BUILDING_SIZE: u32 = 5; const MAX_BUILDING_SIZE: u32 = 9; const MIN_BUILDING_HEIGHT: u32 = 2; const MAX_BUILDING_HEIGHT: u32 = 8; const PILLAR_EDGE_DISTANCE: usize = 12; const PILLAR_SPACING: usize = 32; #[derive(Debug, Clone)] pub struct LandmassShape { grid: SparseGrid<LandmassCell> } impl LandmassShape { pub fn generate_new(seed: u32, size: f64) -> Self { let grid = generate_landmass_shape(seed, size); LandmassShape { grid } } pub fn generate_pillar_points(&self) -> Vec<IVec2> { generate_mount_points(&self.grid, PILLAR_EDGE_DISTANCE, PILLAR_SPACING) } pub fn generate_building_shapes<R: Rng>(&self, rng: &mut R) -> Vec<BuildingShape> { generate_building_shapes(rng, &self.grid) } #[inline] pub fn sample(&self, pos: IVec2) -> Option<LandmassCell> { self.grid.get(pos).copied() } #[inline] pub fn is_edge_at(&self, pos: IVec2) -> bool { self.grid.get(pos).map_or(false, |cell| cell.edge) } #[inline] pub fn min(&self) -> IVec2 { self.grid.min().expect("unreachable") } #[inline] pub fn max(&self) -> IVec2 { self.grid.max().expect("unreachable") } } #[derive(Debug, Clone, Copy)] pub struct LandmassCell { pub ordering: usize, pub edge_distance: usize, pub edge: bool } impl LandmassCell { fn new(ordering: usize, edge_distance: usize, edge: bool) -> Self { LandmassCell { ordering, edge_distance, edge } } } const DISTANCE_POWER: i32 = 4; const MAX_ORDERING: f32 = u32::MAX as f32; fn generate_landmass_shape(seed: u32, size: f64) -> SparseGrid<LandmassCell> { assert!(size >= 1.0, "landmass size may not be less than 1"); discover(landmass_generator(seed, size, 128.0)) } fn discover(noise: impl NoiseFn<f64, 2>) -> SparseGrid<LandmassCell> { use std::f32::consts::{PI, TAU}; #[derive(Debug, Clone, Copy, PartialEq, Eq)] enum Value { Present, Boundary, BoundaryFinal { index: usize } } #[inline] fn boundary_at(grid: &SparseGrid<Value>, pos: IVec2) -> bool { matches!(grid.get(pos), Some(&Value::Boundary)) } let grid = { let mut q = VecDeque::from([IVec2::ZERO]); let mut grid: SparseGrid<Value> = SparseGrid::new(); while let Some(pos) = q.pop_front() { let value = noise.get(pos.as_dvec2()); if value > 0.0 { grid.put(pos, Value::Present); for candidate in cardinal4(pos) { if !grid.contains(candidate) && !q.contains(&candidate) { q.push_back(candidate); }; }; } else { grid.put(pos, Value::Boundary); }; }; grid }; let (all_edges, outer_edge_root) = { let mut all_edges: Vec<IVec2> = Vec::new(); let outer_edge_root = grid.cells() .filter_map(|(pos, value)| match *value { Value::Present => None, Value::Boundary => Some(pos), Value::BoundaryFinal { .. } => unreachable!() }) .inspect(|&pos| all_edges.push(pos)) .max_by_key(|&pos| pos.abs().max_element()) .expect("unreachable"); (all_edges, outer_edge_root) }; let grid = { let mut grid = grid; let mut index = 0; let mut q = VecDeque::from([outer_edge_root]); while let Some(pos) = q.pop_front() { grid.put(pos, Value::BoundaryFinal { index }); for candidate in cardinal8(pos) { if boundary_at(&grid, candidate) && !q.contains(&candidate) { q.push_back(candidate); if index == 0 { break }; }; }; index += 1; }; grid }; let (grid, outer_edges) = { let mut grid = grid; let (q, outer_edges) = all_edges.into_iter() .partition::<Vec<IVec2>, _>(|&pos| boundary_at(&grid, pos)); let mut q = VecDeque::from(q); while let Some(pos) = q.pop_front() { grid.put(pos, Value::Present); for candidate in cardinal4(pos) { if grid.get(candidate) == None && !q.contains(&candidate) { q.push_back(candidate); }; }; }; let len = outer_edges.len(); let outer_edges: Vec<_> = outer_edges.into_iter() .map(|outer_edge| { let index = match grid[outer_edge] { Value::BoundaryFinal { index } => index, _ => unreachable!() }; let a = (index as f32 / len as f32) * TAU; (outer_edge, Vec2::new(a.cos(), a.sin())) }) .collect(); (grid, outer_edges) }; fn get_ordering_and_dist(outer_edges: &[(IVec2, Vec2)], pos: IVec2) -> (usize, usize) { const INIT: (Vec2, Option<f32>) = (Vec2::ZERO, None); let (totaled_vector, dist) = outer_edges.into_iter() .fold(INIT, |(acc_vector, acc_dist), &(outer_edge, vector)| { let dist = outer_edge.as_vec2().distance(pos.as_vec2()); let acc_vector = acc_vector + vector * dist.powi(-DISTANCE_POWER); let acc_dist = acc_dist.map_or(dist, |m| m.min(dist)); (acc_vector, Some(acc_dist)) }); let a = f32::atan2(-totaled_vector.y, -totaled_vector.x); let ordering = ((a + PI) / TAU * MAX_ORDERING).floor() as usize; (ordering, dist.expect("unreachable").floor() as usize) } #[inline] fn get_ordering_from_index(index: usize, len: usize) -> usize { (index as f32 / len as f32 * MAX_ORDERING).floor() as usize } grid .cells() .map(|(pos, value)| { (pos, match *value { Value::Present => { let (ordering, distance) = get_ordering_and_dist(&outer_edges, pos); LandmassCell::new(ordering, distance, false) }, Value::BoundaryFinal { index } => LandmassCell::new(get_ordering_from_index(index, outer_edges.len()), 0, true), Value::Boundary => unreachable!() }) }) .collect() } fn generate_mount_points(grid: &SparseGrid<LandmassCell>, distance: usize, spacing: usize) -> Vec<IVec2> { let mut points = grid.cells() .filter(|&(_, value)| value.edge_distance == distance) .map(|(pos, value)| (pos, value.ordering)) .collect::<Vec<(IVec2, usize)>>(); let mount_point_count = points.len() / spacing; let adjusted_spacing = points.len() as f32 / mount_point_count as f32; points.sort_unstable_by_key(|&(_, ordering)| ordering); points.into_iter().enumerate() .filter_map(|(i, (pos, _))| { let i = (i as f32 % adjusted_spacing).floor() as usize; (i == 0).then(|| pos) }) .collect() } fn landmass_generator(seed: u32, size: f64, resolution: f64) -> impl NoiseFn<f64, 2> { Fbm::<Perlin>::new(seed) .set_octaves(8) .set_persistence(0.25) .multiply_constant(0.5) .scale_point_by(2.0) .add(OriginDistance::new(size)) .scale_point_by(resolution.recip()) } struct OriginDistance { offset: f64 } impl OriginDistance { pub fn new(offset: f64) -> Self { OriginDistance { offset } } } impl NoiseFn<f64, 2> for OriginDistance { fn get(&self, point: impl Into<[f64; 2]>) -> f64 { let dist = DVec2::from(point.into()).length(); (self.offset - dist).clamp(-1.0, 1.0) } } #[inline] pub(super) fn random_building_height<R: Rng>(rng: &mut R) -> u32 { rng.gen_range(MIN_BUILDING_HEIGHT..MAX_BUILDING_HEIGHT) } #[derive(Debug, Clone)] pub struct BuildingShape { pub(super) edge_min: IVec2, pub(super) edge_max: IVec2 } fn generate_building_shapes<R: Rng>(rng: &mut R, grid: &SparseGrid<LandmassCell>) -> Vec<BuildingShape> { fn generate_next_building<R: Rng>(rng: &mut R, grid: &SparseGrid<Value>) -> Option<BuildingShape> { let size_x = rng.gen_range(MIN_BUILDING_SIZE..MAX_BUILDING_SIZE); let size_y = rng.gen_range(MIN_BUILDING_SIZE..MAX_BUILDING_SIZE); let size = IVec2::new(size_x as i32, size_y as i32); grid.cells() .filter_map(|(pos, _)| { let building = BuildingShape { edge_min: pos, edge_max: pos + size - IVec2::ONE }; get_neighbor_count_if_vacant(grid, &building) .map(|n| (n, building)) }) .max_by_key(|e| e.0) .map(|(_, building)| building) } fn put_building_in_vacancy(grid: &mut SparseGrid<Value>, building: &BuildingShape, i: usize) { for x in building.edge_min.x..=building.edge_max.x { for y in building.edge_min.y..=building.edge_max.y { if let Some(value @ &mut Value::Vacant) = grid.get_mut(IVec2::new(x, y)) { *value = Value::Occupied(i); } else { panic!("`put_building_in_vacancy` requires that all positions be vacant"); }; }; }; } fn get_neighbor_count_if_vacant(grid: &SparseGrid<Value>, building: &BuildingShape) -> Option<usize> { let min = building.edge_min - IVec2::ONE; let max = building.edge_max + IVec2::ONE; let mut neighbor_count = 0; for x in min.x..=max.x { let is_edge_x = x == min.x || x == max.x; for y in min.y..=max.y { let is_edge_y = y == min.y || y == max.y; match (is_edge_x || is_edge_y, grid.get(IVec2::new(x, y))) { (true, Some(&Value::Occupied(_))) => neighbor_count += 1, (false, None | Some(&Value::Occupied(_))) => return None, (true, None) => return None, _ => () }; }; }; Some(neighbor_count) } #[derive(Debug, Copy, Clone, PartialEq, Eq)] enum Value { Vacant, Occupied(usize) } let mut grid = grid.cells() .filter_map(|(pos, _)| match ivec2_rem_euclid_2(pos) { true => Some((pos / 2, Value::Vacant)), false => None }) .collect::<SparseGrid<Value>>(); let mut i = 0; let mut buildings = Vec::new(); while let Some(building) = generate_next_building(rng, &grid) { put_building_in_vacancy(&mut grid, &building, i); buildings.push(building); i += 1; }; buildings } #[inline] fn ivec2_rem_euclid_2(s: IVec2) -> bool { s.x.rem_euclid(2) == 0 && s.y.rem_euclid(2) == 0 }
use std::collections::VecDeque; use glam::{DVec2, IVec2, Vec2}; use grid::SparseGrid; use noise::{Fbm, MultiFractal, NoiseFn, Perlin}; use rand::Rng; use crate::utility::{cardinal4, cardinal8}; const MIN_BUILDING_SIZE: u32 = 5; const MAX_BUILDING_SIZE: u32 = 9; const MIN_BUILDING_HEIGHT: u32 = 2; const MAX_BUILDING_HEIGHT: u32 = 8; const PILLAR_EDGE_DISTANCE: usize = 12; const PILLAR_SPACING: usize = 32; #[derive(Debug, Clone)] pub struct LandmassShape { grid: SparseGrid<LandmassCell> } impl LandmassShape { pub fn generate_new(seed: u32, size: f64) -> Self { let grid = generate_landmass_shape(seed, size); LandmassShape { grid } } pub fn generate_pillar_points(&self) -> Vec<IVec2> { generate_mount_points(&self.grid, PILLAR_EDGE_DISTANCE, PILLAR_SPACING) } pub fn generate_building_shapes<R: Rng>(&self, rng: &mut R) -> Vec<BuildingShape> { generate_building_shapes(rng, &self.grid) } #[inline] pub fn sample(&self, pos: IVec2) -> Option<LandmassCell> { self.grid.get(pos).copied() } #[inline] pub fn is_edge_at(&self, pos: IVec2) -> bool { self.grid.get(pos).map_or(false, |cell| cell.edge) } #[inline] pub fn min(&self) -> IVec2 { self.grid.min().expect("unreachable") } #[inline] pub fn max(&self) -> IVec2 { self.grid.max().expect("unreachable") } } #[derive(Debug, Clone, Copy)] pub struct LandmassCell { pub ordering: usize, pub edge_distance: usize, pub edge: bool } impl LandmassCell { fn new(ordering: usize, edge_distance: usize, edge: bool) -> Self { LandmassCell { ordering, edge_distance, edge } } } const DISTANCE_POWER: i32 = 4; const MAX_ORDERING: f32 = u32::MAX as f32; fn generate_landmass_shape(seed: u32, size: f64) -> SparseGrid<LandmassCell> { assert!(size >= 1.0, "landmass size may not be less than 1"); discover(landmass_generator(seed, size, 128.0)) } fn discover(noise: impl NoiseFn<f64, 2>) -> SparseGrid<LandmassCell> { use std::f32::consts::{PI, TAU}; #[derive(Debug, Clone, Copy, PartialEq, Eq)] enum Value { Present, Boundary, BoundaryFinal { index: usize } } #[inline] fn boundary_at(grid: &SparseGrid<Value>, pos: IVec2) -> bool { matches!(grid.get(pos), Some(&Value::Boundary)) } let grid = { let mut q = VecDeque::from([IVec2::ZERO]); let mut grid: SparseGrid<Value> = SparseGrid::new(); while let Some(pos) = q.pop_front() { let value = noise.get(pos.as_dvec2()); if value > 0.0 { grid.put(pos, Value::Present); for candidate in cardinal4(pos) { if !grid.contains(candidate) && !q.contains(&candidate) { q.push_back(candidate); }; }; } else { grid.put(pos, Value::Boundary); }; }; grid }; let (all_edges, outer_edge_root) = { let mut all_edges: Vec<IVec2> = Vec::new(); let outer_edge_root = grid.cells() .filter_map(|(pos, value)| match *value { Value::Present => None, Value::Boundary => Some(pos), Value::BoundaryFinal { .. } => unreachable!() }) .inspect(|&pos| all_edges.push(pos)) .max_by_key(|&pos| pos.abs().max_element()) .expect("unreachable"); (all_edges, outer_edge_root) }; let grid = { let mut grid = grid; let mut index = 0; let mut q = VecDeque::from([outer_edge_root]); while let Some(pos) = q.pop_front() { grid.put(pos, Value::BoundaryFinal { index }); for candidate in cardinal8(pos) { if boundary_at(&grid, candidate) && !q.contains(&candidate) { q.push_back(candidate); if index == 0 { break }; }; }; index += 1; }; grid }; let (grid, outer_edges) = { let mut grid = grid; let (q, outer_edges) = all_edges.into_iter() .partition::<Vec<IVec2>, _>(|&pos| boundary_at(&grid, pos)); let mut q = VecDeque::from(q); while let Some(pos) = q.pop_front() { grid.put(pos, Value::Present); for candidate in cardinal4(pos) { if grid.get(candidate) == None && !q.contains(&candidate) { q.push_back(candidate); }; }; }; let len = outer_edges.len(); let outer_edges: Vec<_> = outer_edges.into_iter() .map(|outer_edge| { let index = match grid[outer_edge] { Value::BoundaryFinal { index } => index, _ => unreachable!() }; let a = (index as f32 / len as f32) * TAU; (outer_edge, Vec2::new(a.cos(), a.sin())) }) .collect(); (grid, outer_edges) }; fn get_ordering_and_dist(outer_edges: &[(IVec2, Vec2)], pos: IVec2) -> (usize, usize) { const INIT: (Vec2, Option<f32>) = (Vec2::ZERO, None); let (totaled_vector, dist) = outer_edges.into_iter() .fold(INIT, |(acc_vector, acc_dist), &(outer_edge, vector)| { let dist = outer_edge.as_vec2().distance(pos.as_vec2()); let acc_vector = acc_vector + vector * dist.powi(-DISTANCE_POWER); let acc_dist = acc_dist.map_or(dist, |m| m.min(dist)); (acc_vector, Some(acc_dist)) }); let a = f32::atan2(-totaled_vector.y, -totaled_vector.x); let ordering = ((a + PI) / TAU * MAX_ORDERING).floor() as usize; (ordering, dist.expect("unreachable").floor() as usize) } #[inline] fn get_ordering_from_index(index: usize, len: usize) -> usize { (index as f32 / len as f32 * MAX_ORDERING).floor() as usize } grid .cells() .map(|(pos, value)| { (pos, match *value { Value::Present => { let (ordering, distance) = get_ordering_and_dist(&outer_edges, pos); LandmassCell::new(ordering, distance, false) }, Value::BoundaryFinal { index } => LandmassCell::new(get_ordering_from_index(index, outer_edges.len()), 0, true), Value::Boundary => unreachable!() }) }) .collect() } fn generate_mount_points(grid: &SparseGrid<LandmassCell>, distance: usize, spacing: usize) -> Vec<IVec2> { let mut points = grid.cells() .filter(|&(_, value)| value.edge_distance == distance) .map(|(pos, value)| (pos, value.ordering)) .collect::<Vec<(IVec2, usize)>>(); let mount_point_count = points.len() / spacing; let adjusted_spacing = points.len() as f32 / mount_point_count as f32; points.sort_unstable_by_key(|&(_, ordering)| ordering); points.into_iter().enumerate() .filter_map(|(i, (pos, _))| { let i = (i as f32 % adjusted_spacing).floor() as usize; (i == 0).then(|| pos) }) .collect() } fn landmass_generator(seed: u32, size: f64, resolution: f64) -> impl NoiseFn<f64, 2> { Fbm::<Perlin>::new(seed) .set_octaves(8) .set_persistence(0.25) .multiply_constant(0.5) .scale_point_by(2.0) .add(OriginDistance::new(size)) .scale_point_by(resolution.recip()) } struct OriginDistance { offset: f64 } impl OriginDistance { pub fn new(offset: f64) -> Self { OriginDistance { offset } } } impl NoiseFn<f64, 2> for OriginDistance { fn get(&self, point: impl Into<[f64; 2]>) -> f64 { let dist = DVec2::from(point.into()).length(); (self.offset - dist).clamp(-1.0, 1.0) } } #[inline] pub(super) fn random_building_height<R: Rng>(rng: &mut R) -> u32 { rng.gen_range(MIN_BUILDING_HEIGHT..MAX_BUILDING_HEIGHT) } #[derive(Debug, Clone)] pub struct BuildingShape { pub(super) edge_min: IVec2, pub(super) edge_max: IVec2 } fn generate_building_shapes<R: Rng>(rng: &mut R, grid: &SparseGrid<LandmassCell>) -> Vec<BuildingShape> { fn generate_next_building<R: Rng>(rng: &mut R, grid: &SparseGrid<Value>) -> Option<BuildingShape> { let size_x = rng.gen_range(MIN_BUILDING_SIZE..MAX_BUILDING_SIZE); let size_y = rng.gen_range(MIN_BUILDING_SIZE..MAX_BUILDING_SIZE); let size = IVec2::new(size_x as i32, size_y as i32); grid.cells() .filter_map(|(pos, _)| { let building = BuildingShape { edge_min: pos, edge_max: pos + size - IVec2::ONE }; get_neighbor_count_if_vacant(grid, &building) .map(|n| (n, building)) }) .max_by_key(|e| e.0) .map(|(_, building)| building) } fn put_building_in_vacancy(grid: &mut SparseGrid<Value>, building: &BuildingShape, i: usize) { for x in building.edge_min.x..=building.edge_max.x { for y in building.edge_min.
fn get_neighbor_count_if_vacant(grid: &SparseGrid<Value>, building: &BuildingShape) -> Option<usize> { let min = building.edge_min - IVec2::ONE; let max = building.edge_max + IVec2::ONE; let mut neighbor_count = 0; for x in min.x..=max.x { let is_edge_x = x == min.x || x == max.x; for y in min.y..=max.y { let is_edge_y = y == min.y || y == max.y; match (is_edge_x || is_edge_y, grid.get(IVec2::new(x, y))) { (true, Some(&Value::Occupied(_))) => neighbor_count += 1, (false, None | Some(&Value::Occupied(_))) => return None, (true, None) => return None, _ => () }; }; }; Some(neighbor_count) } #[derive(Debug, Copy, Clone, PartialEq, Eq)] enum Value { Vacant, Occupied(usize) } let mut grid = grid.cells() .filter_map(|(pos, _)| match ivec2_rem_euclid_2(pos) { true => Some((pos / 2, Value::Vacant)), false => None }) .collect::<SparseGrid<Value>>(); let mut i = 0; let mut buildings = Vec::new(); while let Some(building) = generate_next_building(rng, &grid) { put_building_in_vacancy(&mut grid, &building, i); buildings.push(building); i += 1; }; buildings } #[inline] fn ivec2_rem_euclid_2(s: IVec2) -> bool { s.x.rem_euclid(2) == 0 && s.y.rem_euclid(2) == 0 }
y..=building.edge_max.y { if let Some(value @ &mut Value::Vacant) = grid.get_mut(IVec2::new(x, y)) { *value = Value::Occupied(i); } else { panic!("`put_building_in_vacancy` requires that all positions be vacant"); }; }; }; }
function_block-function_prefixed
[ { "content": "#[inline]\n\npub fn cardinal8(center: IVec2) -> impl Iterator<Item = IVec2> {\n\n CARDINAL8.into_iter().map(move |offset| offset + center)\n\n}\n\n\n\n\n\n\n", "file_path": "src/utility.rs", "rank": 5, "score": 181594.5483579758 }, { "content": "fn sample_checkered(size: u32, ...
Rust
services/mgmt/security/src/package_preview_2021_12/models.rs
reidav/azure-sdk-for-rust
3a6695ea0c81f326db78bae0b22a5785fbbd8c75
#![doc = "generated by AutoRust"] #![allow(non_camel_case_types)] #![allow(unused_imports)] use serde::{Deserialize, Serialize}; #[doc = "Describes an Azure resource with location"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct AzureTrackedResourceLocation { #[doc = "Location where the resource is stored"] #[serde(default, skip_serializing_if = "Option::is_none")] pub location: Option<String>, } impl AzureTrackedResourceLocation { pub fn new() -> Self { Self::default() } } #[doc = "Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.)."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct CloudError { #[doc = "The error detail."] #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<CloudErrorBody>, } impl CloudError { pub fn new() -> Self { Self::default() } } #[doc = "The error detail."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct CloudErrorBody { #[doc = "The error code."] #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[doc = "The error message."] #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, #[doc = "The error target."] #[serde(default, skip_serializing_if = "Option::is_none")] pub target: Option<String>, #[doc = "The error details."] #[serde(default, skip_serializing_if = "Vec::is_empty")] pub details: Vec<CloudErrorBody>, #[doc = "The error additional info."] #[serde(rename = "additionalInfo", default, skip_serializing_if = "Vec::is_empty")] pub additional_info: Vec<ErrorAdditionalInfo>, } impl CloudErrorBody { pub fn new() -> Self { Self::default() } } #[doc = "Entity tag is used for comparing two or more entities from the same requested resource."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct ETag { #[doc = "Entity tag is used for comparing two or more entities from the same requested resource."] #[serde(default, skip_serializing_if = "Option::is_none")] pub etag: Option<String>, } impl ETag { pub fn new() -> Self { Self::default() } } #[doc = "The resource management error additional info."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct ErrorAdditionalInfo { #[doc = "The additional info type."] #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, #[doc = "The additional info."] #[serde(default, skip_serializing_if = "Option::is_none")] pub info: Option<serde_json::Value>, } impl ErrorAdditionalInfo { pub fn new() -> Self { Self::default() } } #[doc = "Describes an Azure resource with kind"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct Kind { #[doc = "Kind of the resource"] #[serde(default, skip_serializing_if = "Option::is_none")] pub kind: Option<String>, } impl Kind { pub fn new() -> Self { Self::default() } } #[doc = "Describes an Azure resource."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct Resource { #[doc = "Resource Id"] #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[doc = "Resource name"] #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[doc = "Resource type"] #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } impl Resource { pub fn new() -> Self { Self::default() } } #[doc = "Security Scanner resource"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct Scanner { #[serde(flatten)] pub tracked_resource: TrackedResource, #[doc = "Metadata pertaining to creation and last modification of the resource."] #[serde(rename = "systemData", default, skip_serializing_if = "Option::is_none")] pub system_data: Option<SystemData>, } impl Scanner { pub fn new() -> Self { Self::default() } } #[doc = "Page of a Scanners list"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct ScannersList { #[doc = "Collection of Scanners in this page"] #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<Scanner>, #[doc = "The URI to fetch the next page"] #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } impl ScannersList { pub fn new() -> Self { Self::default() } } #[doc = "A list of key value pairs that describe the resource."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct Tags { #[doc = "A list of key value pairs that describe the resource."] #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, } impl Tags { pub fn new() -> Self { Self::default() } } #[doc = "Describes an Azure tracked resource."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct TrackedResource { #[serde(flatten)] pub resource: Resource, #[serde(flatten)] pub azure_tracked_resource_location: AzureTrackedResourceLocation, #[serde(flatten)] pub kind: Kind, #[serde(flatten)] pub e_tag: ETag, #[serde(flatten)] pub tags: Tags, } impl TrackedResource { pub fn new() -> Self { Self::default() } } #[doc = "Metadata pertaining to creation and last modification of the resource."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct SystemData { #[doc = "The identity that created the resource."] #[serde(rename = "createdBy", default, skip_serializing_if = "Option::is_none")] pub created_by: Option<String>, #[doc = "The type of identity that created the resource."] #[serde(rename = "createdByType", default, skip_serializing_if = "Option::is_none")] pub created_by_type: Option<system_data::CreatedByType>, #[doc = "The timestamp of resource creation (UTC)."] #[serde(rename = "createdAt", default, skip_serializing_if = "Option::is_none")] pub created_at: Option<String>, #[doc = "The identity that last modified the resource."] #[serde(rename = "lastModifiedBy", default, skip_serializing_if = "Option::is_none")] pub last_modified_by: Option<String>, #[doc = "The type of identity that last modified the resource."] #[serde(rename = "lastModifiedByType", default, skip_serializing_if = "Option::is_none")] pub last_modified_by_type: Option<system_data::LastModifiedByType>, #[doc = "The timestamp of resource last modification (UTC)"] #[serde(rename = "lastModifiedAt", default, skip_serializing_if = "Option::is_none")] pub last_modified_at: Option<String>, } impl SystemData { pub fn new() -> Self { Self::default() } } pub mod system_data { use super::*; #[doc = "The type of identity that created the resource."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum CreatedByType { User, Application, ManagedIdentity, Key, } #[doc = "The type of identity that last modified the resource."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum LastModifiedByType { User, Application, ManagedIdentity, Key, } }
#![doc = "generated by AutoRust"] #![allow(non_camel_case_types)] #![allow(unused_imports)] use serde::{Deserialize, Serialize}; #[doc = "Describes an Azure resource with location"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct AzureTrackedResourceLocation { #[doc = "Location where the resource is stored"] #[serde(default, skip_serializing_if = "Option::is_none")] pub location: Option<String>, } impl AzureTrackedResourceLocation { pub fn new() -> Self { Self::default() } } #[doc = "Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.)."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct CloudError { #[doc = "The error detail."] #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<CloudErrorBody>, } impl CloudError { pub fn new() -> Self { Self::default() } } #[doc = "The error detail."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct CloudErrorBody { #[doc = "The error code."] #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[doc = "The error message."] #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, #[doc = "The error target."] #[serde(default, skip_serializing_if = "Option::is_none")] pub target: Option<String>, #[doc = "The error details."] #[serde(default, skip_serializing_if = "Vec::is_empty")] pub details: Vec<CloudErrorBody>, #[doc = "The error additional info."] #[serde(rename = "additionalInfo", default, skip_serializing_if = "Vec::is_empty")] pub additional_info: Vec<ErrorAdditionalInfo>, } impl CloudErrorBody { pub fn new() -> Self { Self::default() } } #[doc = "Entity tag is used for comparing two or more entities from the same requested resource."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct ETag { #[doc = "Entity tag is used for comparing two or more entities from the same requested resource."] #[serde(default, skip_serializing_if = "Option::is_none")] pub etag: Option<String>, } impl ETag { pub fn new() -> Self { Self::default() } } #[doc = "The resource management error additional info."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct ErrorAdditionalInfo { #[doc = "The additional info type."] #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, #[doc = "The additional info."] #[serde(default, skip_serializing_if = "Option::is_none")] pub info: Option<serde_json::Value>, } impl ErrorAdditionalInfo { pub fn new() -> Self { Self::default() } } #[doc = "Describes an Azure resource with kind"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct Kind { #[doc = "Kind of the resource"] #[serde(default, skip_serializing_if = "Option::is_none")] pub kind: Option<String>, } impl Kind { pub fn new() -> Self { Self::default() } } #[doc = "Describes an Azure resource."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct Resource { #[doc = "Resource Id"] #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[doc = "Resource name"] #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[doc = "Resource type"] #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } impl Resource { pub fn new() -> Self { Self::default() } } #[doc = "Security Scanner resource"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct Scanner { #[serde(flatten)] pub tracked_resource: TrackedResource, #[doc = "Metadata pertaining to creation and last modification of the resource."] #[serde(rename = "systemData", default, skip_serializing_if = "Option::is_none")] pub system_data: Option<SystemData>, } impl Scanner { pub fn new() -> Self { Self::default() } } #[doc = "Page of a Scanners list"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct ScannersList { #[doc = "Collection of Scanners in this page"] #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<Scanner>, #[doc = "The URI to fetch the next page"] #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } impl ScannersList { pub fn new() -> Self { Self::default() } } #[doc = "A list of key value pairs that describe the resource."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct Tags { #[doc = "A list of key value pairs that describe the resource."] #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, } impl Tags { pub fn new() -> Self { Self::default() } } #[doc = "Describes an Azure tracked resource."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct TrackedResource { #[serde(flatten)] pub resource: Resource, #[serde(flatten)] pub azure_tracked_resource_location: AzureTrackedResourceLocation, #[serde(flatten)] pub kind: Kind, #[serde(flatten)] pub e_tag: ETag, #[serde(flatten)] pub tags: Tags, } impl TrackedResource { pub fn new() -> Self { Self::default() } } #[doc = "Metadata pertaining to creation and last modification of the resource."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct SystemData { #[doc = "The identity that created the resource."] #[serde(rename = "createdBy", default, skip_serializing_if = "Option::is_none")] pub created_by: Option<String>, #[doc = "The type of identity that created the resource."] #[serde(rename = "createdByType", default, skip_serializing_if = "Option::is_none")] pub created_by_type: Option<system_data::CreatedByType>, #[doc = "The timestamp of resource creation (UTC)."] #[se
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum LastModifiedByType { User, Application, ManagedIdentity, Key, } }
rde(rename = "createdAt", default, skip_serializing_if = "Option::is_none")] pub created_at: Option<String>, #[doc = "The identity that last modified the resource."] #[serde(rename = "lastModifiedBy", default, skip_serializing_if = "Option::is_none")] pub last_modified_by: Option<String>, #[doc = "The type of identity that last modified the resource."] #[serde(rename = "lastModifiedByType", default, skip_serializing_if = "Option::is_none")] pub last_modified_by_type: Option<system_data::LastModifiedByType>, #[doc = "The timestamp of resource last modification (UTC)"] #[serde(rename = "lastModifiedAt", default, skip_serializing_if = "Option::is_none")] pub last_modified_at: Option<String>, } impl SystemData { pub fn new() -> Self { Self::default() } } pub mod system_data { use super::*; #[doc = "The type of identity that created the resource."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum CreatedByType { User, Application, ManagedIdentity, Key, } #[doc = "The type of identity that last modified the resource."]
random
[]
Rust
crates/core/src/utils/string_utils.rs
bartlomieju/dprint
da162e4a63d981d663834019cd61dee863d5540f
pub fn get_line_number_of_pos(text: &str, pos: usize) -> usize { let text_bytes = text.as_bytes(); let mut line_count = 1; for i in 0..pos { if text_bytes.get(i) == Some(&('\n' as u8)) { line_count += 1; } } line_count } pub fn get_column_number_of_pos(text: &str, pos: usize) -> usize { let line_start_byte_pos = get_line_start_byte_pos(text, pos); return text[line_start_byte_pos..pos].chars().count() + 1; } fn get_line_start_byte_pos(text: &str, pos: usize) -> usize { let text_bytes = text.as_bytes(); for i in (0..pos).rev() { if text_bytes.get(i) == Some(&('\n' as u8)) { return i + 1; } } 0 } pub fn format_diagnostic(range: Option<(usize, usize)>, message: &str, file_text: &str) -> String { let mut result = String::new(); if let Some((error_start, _)) = range { let line_number = get_line_number_of_pos(file_text, error_start); let column_number = get_column_number_of_pos(file_text, error_start); result.push_str(&format!("Line {}, column {}: ", line_number, column_number)) } result.push_str(message); if let Some(range) = range { result.push_str("\n\n"); let code = get_range_text_highlight(file_text, range) .lines().map(|l| format!(" {}", l)) .collect::<Vec<_>>() .join("\n"); result.push_str(&code); } return result; } fn get_range_text_highlight(file_text: &str, range: (usize, usize)) -> String { let ((text_start, text_end), (error_start, error_end)) = get_text_and_error_range(range, file_text); let sub_text = &file_text[text_start..text_end]; let mut result = String::new(); let lines = sub_text.lines().collect::<Vec<_>>(); let line_count = lines.len(); for (i, line) in lines.iter().enumerate() { let is_last_line = i == line_count - 1; if i > 2 && !is_last_line { continue; } if i > 0 { result.push_str("\n"); } if i == 2 && !is_last_line { result.push_str("..."); continue; } result.push_str(line); result.push_str("\n"); let start_pos = if i == 0 { error_start } else { 0 }; let end_pos = if is_last_line { get_column_number_of_pos(sub_text, error_end) - 1 } else { line.chars().count() }; result.push_str(&" ".repeat(start_pos)); result.push_str(&"~".repeat(end_pos - start_pos)); } return result; fn get_text_and_error_range(range: (usize, usize), file_text: &str) -> ((usize, usize), (usize, usize)) { let (start, end) = range; let start_column_number_byte_count = start - get_line_start_byte_pos(file_text, start); let line_end = get_line_end(file_text, end); let text_start = start - std::cmp::min(20, start_column_number_byte_count); let text_end = std::cmp::min(line_end, end + 10); let error_start = start - text_start; let error_end = error_start + (end - start); ((text_start, text_end), (error_start, error_end)) } fn get_line_end(text: &str, pos: usize) -> usize { let mut pos = pos; for c in text.chars().skip(pos) { if c == '\n' { break; } pos += 1; } pos } } #[cfg(test)] mod tests { use super::*; #[test] fn should_get_line_number_of_single_line() { assert_eq!(get_line_number_of_pos("testing", 3), 1); } #[test] fn should_get_last_line_when_above_length() { assert_eq!(get_line_number_of_pos("t\nt", 50), 2); } #[test] fn should_get_line_when_at_first_pos_on_line() { assert_eq!(get_line_number_of_pos("t\ntest\nt", 2), 2); } #[test] fn should_get_line_when_at_last_pos_on_line() { assert_eq!(get_line_number_of_pos("t\ntest\nt", 6), 2); } #[test] fn should_get_column_for_first_line() { assert_eq!(get_column_number_of_pos("testing\nthis", 3), 4); } #[test] fn should_get_column_for_second_line() { assert_eq!(get_column_number_of_pos("test\nthis", 6), 2); } #[test] fn should_get_column_for_start_of_text() { assert_eq!(get_column_number_of_pos("test\nthis", 0), 1); } #[test] fn should_get_column_for_start_of_line() { assert_eq!(get_column_number_of_pos("test\nthis", 5), 1); } #[test] fn should_get_range_highlight_for_full_text_one_line() { let message = get_range_text_highlight("testtinga", (0, 9)); assert_eq!( message, concat!( "testtinga\n", "~~~~~~~~~" ) ); } #[test] fn should_get_range_highlight_for_full_text_multi_lines() { let message = get_range_text_highlight("test\nt\naa", (0, 9)); assert_eq!( message, concat!( "test\n", "~~~~\n", "t\n", "~\n", "aa\n", "~~" ) ); } #[test] fn should_get_range_highlight_on_one_line() { let message = get_range_text_highlight("testtinga testing test", (10, 17)); assert_eq!( message, concat!( "testtinga testing test\n", " ~~~~~~~" ) ); } #[test] fn should_get_range_highlight_on_second_line() { let message = get_range_text_highlight("test\ntest\ntest", (5, 9)); assert_eq!( message, concat!( "test\n", "~~~~" ) ); } #[test] fn should_get_range_highlight_on_multi_lines_within() { let message = get_range_text_highlight("test\ntest test\ntest test\nasdf", (10, 19)); assert_eq!( message, concat!( "test test\n", " ~~~~\n", "test test\n", "~~~~" ) ); } #[test] fn should_display_when_there_are_three_lines() { let message = get_range_text_highlight("test\nasdf\n1234\ntest\nasdf\n1234\ntest\n", (5, 19)); assert_eq!( message, concat!( "asdf\n", "~~~~\n", "1234\n", "~~~~\n", "test\n", "~~~~" ) ); } #[test] fn should_ignore_when_there_are_more_than_three_lines() { let message = get_range_text_highlight("test\nasdf\n1234\ntest\nasdf\n1234\ntest\n", (5, 24)); assert_eq!( message, concat!( "asdf\n", "~~~~\n", "1234\n", "~~~~\n", "...\n", "asdf\n", "~~~~" ) ); } #[test] fn should_show_only_twenty_chars_of_first_line() { let message = get_range_text_highlight("test asdf 1234 fdsa dsfa test", (25, 29)); assert_eq!( message, concat!( "asdf 1234 fdsa dsfa test\n", " ~~~~", ) ); } #[test] fn should_show_only_ten_chars_of_last_line() { let message = get_range_text_highlight("test asdf 1234 fdsa dsfa test", (10, 14)); assert_eq!( message, concat!( "test asdf 1234 fdsa dsfa\n", " ~~~~", ) ); } }
pub fn get_line_number_of_pos(text: &str, pos: usize) -> usize { let text_bytes = text.as_bytes(); let mut line_count = 1; for i in 0..pos { if text_bytes.get(i) == Some(&('\n' as u8)) { line_count += 1; } } line_count } pub fn get_column_number_of_pos(text: &str, pos: usize) -> usize { let line_start_byte_pos = get_line_start_byte_pos(text, pos); return text[line_start_byte_pos..pos].chars().count() + 1; } fn get_line_start_byte_pos(text: &str, pos: usize) -> usize { let text_bytes = text.as_bytes(); for i in (0..pos).rev() { if text_bytes.get(i) == Some(&('\n' as u8)) { return i + 1; } } 0 } pub fn format_diagnostic(range: Option<(usize, usize)>, message: &str, file_text: &str) -> String { let mut result = String::new(); if let Some((error_start, _)) = range { let line_number = get_line_number_of_pos(file_text, error_start); let column_number = get_column_number_of_pos(file_text, error_start); result.push_str(&format!("Line {}, column {}: ", line_number, column_number)) } result.push_str(message); if let Some(range) = range { result.push_str("\n\n"); let code = get_range_text_highlight(file_text, range) .lines().map(|l| format!(" {}", l)) .collect::<Vec<_>>() .join("\n"); result.push_str(&code); } return result; } fn get_range_text_highlight(file_text: &str, range: (usize, usize)) -> String { let ((text_start, text_end), (error_start, error_end)) = get_text_and_error_range(range, file_text); let sub_text = &file_text[text_start..text_end]; let mut result = String::new(); let lines = sub_text.lines().collect::<Vec<_>>(); let line_count = lines.len(); for (i, line) in lines.iter().enumerate() { let is_last_line = i == line_count - 1; if i > 2 && !is_last_line { continue; } if i > 0 { result.push_str("\n"); } if i == 2 && !is_last_line { result.push_str("..."); continue; } result.push_str(line); result.push_str("\n"); let start_pos = if i == 0 { error_start } else { 0 }; let end_pos = if is_last_line { get_column_number_of_pos(sub_text, error_end) - 1 } else { line.chars().count() }; result.push_str(&" ".repeat(start_pos)); result.push_str(&"~".repeat(end_pos - start_pos)); } return result; fn get_
); } #[test] fn should_get_range_highlight_on_second_line() { let message = get_range_text_highlight("test\ntest\ntest", (5, 9)); assert_eq!( message, concat!( "test\n", "~~~~" ) ); } #[test] fn should_get_range_highlight_on_multi_lines_within() { let message = get_range_text_highlight("test\ntest test\ntest test\nasdf", (10, 19)); assert_eq!( message, concat!( "test test\n", " ~~~~\n", "test test\n", "~~~~" ) ); } #[test] fn should_display_when_there_are_three_lines() { let message = get_range_text_highlight("test\nasdf\n1234\ntest\nasdf\n1234\ntest\n", (5, 19)); assert_eq!( message, concat!( "asdf\n", "~~~~\n", "1234\n", "~~~~\n", "test\n", "~~~~" ) ); } #[test] fn should_ignore_when_there_are_more_than_three_lines() { let message = get_range_text_highlight("test\nasdf\n1234\ntest\nasdf\n1234\ntest\n", (5, 24)); assert_eq!( message, concat!( "asdf\n", "~~~~\n", "1234\n", "~~~~\n", "...\n", "asdf\n", "~~~~" ) ); } #[test] fn should_show_only_twenty_chars_of_first_line() { let message = get_range_text_highlight("test asdf 1234 fdsa dsfa test", (25, 29)); assert_eq!( message, concat!( "asdf 1234 fdsa dsfa test\n", " ~~~~", ) ); } #[test] fn should_show_only_ten_chars_of_last_line() { let message = get_range_text_highlight("test asdf 1234 fdsa dsfa test", (10, 14)); assert_eq!( message, concat!( "test asdf 1234 fdsa dsfa\n", " ~~~~", ) ); } }
text_and_error_range(range: (usize, usize), file_text: &str) -> ((usize, usize), (usize, usize)) { let (start, end) = range; let start_column_number_byte_count = start - get_line_start_byte_pos(file_text, start); let line_end = get_line_end(file_text, end); let text_start = start - std::cmp::min(20, start_column_number_byte_count); let text_end = std::cmp::min(line_end, end + 10); let error_start = start - text_start; let error_end = error_start + (end - start); ((text_start, text_end), (error_start, error_end)) } fn get_line_end(text: &str, pos: usize) -> usize { let mut pos = pos; for c in text.chars().skip(pos) { if c == '\n' { break; } pos += 1; } pos } } #[cfg(test)] mod tests { use super::*; #[test] fn should_get_line_number_of_single_line() { assert_eq!(get_line_number_of_pos("testing", 3), 1); } #[test] fn should_get_last_line_when_above_length() { assert_eq!(get_line_number_of_pos("t\nt", 50), 2); } #[test] fn should_get_line_when_at_first_pos_on_line() { assert_eq!(get_line_number_of_pos("t\ntest\nt", 2), 2); } #[test] fn should_get_line_when_at_last_pos_on_line() { assert_eq!(get_line_number_of_pos("t\ntest\nt", 6), 2); } #[test] fn should_get_column_for_first_line() { assert_eq!(get_column_number_of_pos("testing\nthis", 3), 4); } #[test] fn should_get_column_for_second_line() { assert_eq!(get_column_number_of_pos("test\nthis", 6), 2); } #[test] fn should_get_column_for_start_of_text() { assert_eq!(get_column_number_of_pos("test\nthis", 0), 1); } #[test] fn should_get_column_for_start_of_line() { assert_eq!(get_column_number_of_pos("test\nthis", 5), 1); } #[test] fn should_get_range_highlight_for_full_text_one_line() { let message = get_range_text_highlight("testtinga", (0, 9)); assert_eq!( message, concat!( "testtinga\n", "~~~~~~~~~" ) ); } #[test] fn should_get_range_highlight_for_full_text_multi_lines() { let message = get_range_text_highlight("test\nt\naa", (0, 9)); assert_eq!( message, concat!( "test\n", "~~~~\n", "t\n", "~\n", "aa\n", "~~" ) ); } #[test] fn should_get_range_highlight_on_one_line() { let message = get_range_text_highlight("testtinga testing test", (10, 17)); assert_eq!( message, concat!( "testtinga testing test\n", " ~~~~~~~" )
random
[ { "content": "pub fn format_text(text: &str, config: &Configuration) -> Result<String, String> {\n\n let print_items = parse_items(text, config)?;\n\n\n\n Ok(print(print_items, PrintOptions {\n\n indent_width: config.indent_width,\n\n max_width: config.line_width,\n\n use_tabs: config...
Rust
src/app.rs
Netdex/niinii
d2fa91f3c16b1bdc20d7799a79e1d354247cd55e
use std::sync::mpsc; use imgui::*; use crate::{ backend::renderer::Env, gloss::{Gloss, GlossError, Glossator}, translation::{self, Translation}, view::{mixins::help_marker, rikai::RikaiView, settings::SettingsView}, }; const ERROR_MODAL_TITLE: &str = "Error"; #[derive(thiserror::Error, Debug)] enum Error { #[error(transparent)] Gloss(#[from] GlossError), #[error(transparent)] DeepL(#[from] deepl_api::Error), } #[derive(Debug)] enum Message { Gloss(Result<Gloss, GlossError>), Translation(Result<Translation, deepl_api::Error>), } #[derive(Debug)] enum State { Error(Error), Processing, None, } pub struct App { channel_tx: mpsc::Sender<Message>, channel_rx: mpsc::Receiver<Message>, input_text: String, last_clipboard: String, request_gloss_text: Option<String>, show_imgui_demo: bool, show_settings: bool, show_raw: bool, show_metrics_window: bool, show_style_editor: bool, settings: SettingsView, state: State, glossator: Glossator, rikai: RikaiView, } impl App { pub fn new(settings: SettingsView) -> Self { let (channel_tx, channel_rx) = mpsc::channel(); let glossator = Glossator::new(&settings); App { channel_tx, channel_rx, input_text: "".into(), last_clipboard: "".into(), request_gloss_text: None, show_imgui_demo: false, show_settings: false, show_raw: false, show_metrics_window: false, show_style_editor: false, settings, state: State::None, glossator, rikai: RikaiView::new(), } } fn request_gloss(&self, text: &str) { let channel_tx = self.channel_tx.clone(); let glossator = &self.glossator; let text = text.to_owned(); let variants = if self.settings.more_variants { 5 } else { 1 }; rayon::spawn(enclose! { (glossator) move || { let gloss = glossator.gloss(&text, variants); let _ = channel_tx.send(Message::Gloss(gloss)); }}); } fn request_translation(&self, text: &str) { let channel_tx = self.channel_tx.clone(); let text = text.to_owned(); let deepl_api_key = self.settings.deepl_api_key.clone(); rayon::spawn(move || { let translation = translation::translate(&deepl_api_key, &text); let _ = channel_tx.send(Message::Translation(translation)); }); } fn transition(&mut self, ui: &Ui, state: State) { if let State::Error(err) = &state { log::error!("{}", err); ui.open_popup(ERROR_MODAL_TITLE); } self.state = state; } fn poll(&mut self, ui: &Ui) { match self.channel_rx.try_recv() { Ok(Message::Gloss(Ok(gloss))) => { let should_translate = !gloss.root.is_flat(); if self.settings.auto_translate && should_translate { self.request_translation(&gloss.root.text_flatten()); } else { self.transition(ui, State::None); self.rikai.set_translation(None); } self.rikai.set_gloss(Some(gloss)); } Ok(Message::Translation(Ok(translation))) => { self.rikai.set_translation(Some(translation)); self.transition(ui, State::None) } Ok(Message::Gloss(Err(err))) => { self.transition(ui, State::Error(err.into())); } Ok(Message::Translation(Err(err))) => { self.transition(ui, State::Error(err.into())); } Err(mpsc::TryRecvError::Empty) => {} x => { log::error!("unhandled message: {:?}", x); } } match &self.state { State::Error(_) | State::None => { if let Some(request_gloss_text) = self.request_gloss_text.clone() { self.request_gloss_text = None; self.transition(ui, State::Processing); self.request_gloss(&request_gloss_text); } } _ => (), }; if self.settings.watch_clipboard { if let Some(clipboard) = ui.clipboard_text() { if clipboard != self.last_clipboard { self.input_text = clipboard.clone(); self.last_clipboard = clipboard.clone(); self.request_gloss_text = Some(clipboard); } } } } fn show_main_menu(&mut self, _env: &mut Env, ui: &Ui) { if let Some(_token) = ui.begin_menu_bar() { if let Some(_menu) = ui.begin_menu("Options") { if MenuItem::new("Watch clipboard") .selected(self.settings.watch_clipboard) .build(ui) { self.settings.watch_clipboard = !self.settings.watch_clipboard; } ui.separator(); if MenuItem::new("Settings").build(ui) { self.show_settings = true; } } if let Some(_menu) = ui.begin_menu("View") { if MenuItem::new("Show input") .selected(self.settings.show_manual_input) .build(ui) { self.settings.show_manual_input = !self.settings.show_manual_input; } ui.separator(); if MenuItem::new("Raw").build(ui) { self.show_raw = true; } if MenuItem::new("Style Editor").build(ui) { self.show_style_editor = true; } if MenuItem::new("Debugger").build(ui) { self.show_metrics_window = true; } if MenuItem::new("ImGui Demo").build(ui) { self.show_imgui_demo = true; } } } } fn show_error_modal(&mut self, _env: &mut Env, ui: &Ui) { if let State::Error(err) = &self.state { PopupModal::new(ERROR_MODAL_TITLE) .always_auto_resize(true) .build(ui, || { let _wrap_token = ui.push_text_wrap_pos_with_pos(300.0); ui.text(err.to_string()); ui.separator(); if ui.button_with_size("OK", [120.0, 0.0]) { ui.close_current_popup(); } }); } } fn show_deepl_usage(&self, ui: &Ui) { if let Some(Translation::DeepL { deepl_usage, .. }) = self.rikai.translation() { ui.same_line(); let fraction = deepl_usage.character_count as f32 / deepl_usage.character_limit as f32; ProgressBar::new(fraction) .overlay_text(format!( "DeepL API usage: {}/{} ({:.2}%)", deepl_usage.character_count, deepl_usage.character_limit, fraction * 100.0 )) .size([350.0, 0.0]) .build(ui); } } pub fn ui(&mut self, env: &mut Env, ui: &mut Ui, run: &mut bool) { let io = ui.io(); let mut niinii = Window::new("niinii") .opened(run) .menu_bar(true) .draw_background(!self.settings().transparent); if !self.settings().overlay_mode { niinii = niinii .position([0.0, 0.0], Condition::Always) .size(io.display_size, Condition::Always) .no_decoration() }; niinii.build(ui, || { self.show_main_menu(env, ui); if self.settings().show_manual_input { let _disable_input = ui.begin_disabled(matches!(self.state, State::Processing)); if ui .input_text_multiline("", &mut self.input_text, [0.0, 50.0]) .enter_returns_true(true) .build() { self.request_gloss_text = Some(self.input_text.clone()); } if ui.button_with_size("Gloss", [120.0, 0.0]) { self.request_gloss_text = Some(self.input_text.clone()); } ui.same_line(); let should_translate = self .rikai .gloss() .map_or_else(|| false, |x| !x.root.is_flat()); { let mut _disable_tl = ui.begin_disabled(!should_translate || self.rikai.translation().is_some()); if ui.button_with_size("Translate", [120.0, 0.0]) { self.transition(ui, State::Processing); if let Some(gloss) = self.rikai.gloss() { self.request_translation(&gloss.root.text_flatten()); } } } if !should_translate && ui.is_item_hovered_with_flags(ItemHoveredFlags::ALLOW_WHEN_DISABLED) { ui.tooltip(|| ui.text("Text does not require translation")); } } self.show_deepl_usage(ui); { let _disable_ready = ui.begin_disabled(!matches!(self.state, State::None)); self.rikai.ui(env, ui, &self.settings, &mut self.show_raw); if let State::Processing = &self.state { ui.set_mouse_cursor(Some(MouseCursor::NotAllowed)); } } self.show_error_modal(env, ui); self.poll(ui); }); if self.show_imgui_demo { ui.show_demo_window(&mut self.show_imgui_demo); } if self.show_settings { if let Some(_token) = Window::new("Settings").always_auto_resize(true).begin(ui) { self.settings.ui(ui); ui.separator(); if ui.button_with_size("OK", [120.0, 0.0]) { self.show_settings = false; } ui.same_line(); ui.text("* Restart to apply these changes"); } } if self.show_metrics_window { ui.show_metrics_window(&mut self.show_metrics_window); } if self.show_style_editor { let mut show_style_editor = self.show_style_editor; Window::new("Style Editor") .opened(&mut show_style_editor) .menu_bar(true) .build(ui, || { ui.menu_bar(|| { if ui.button("Save") { self.settings_mut().set_style(Some(&ui.clone_style())); } if ui.button("Reset") { self.settings_mut().set_style(None); } if self.settings.style.is_some() { ui.menu_with_enabled("Style saved", false, || {}); help_marker(ui, "Saved style will be restored on start-up. Reset will clear the stored style."); } }); ui.show_default_style_editor(); }); self.show_style_editor = show_style_editor; } } pub fn settings(&self) -> &SettingsView { &self.settings } pub fn settings_mut(&mut self) -> &mut SettingsView { &mut self.settings } }
use std::sync::mpsc; use imgui::*; use crate::{ backend::renderer::Env, gloss::{Gloss, GlossError, Glossator}, translation::{self, Translation}, view::{mixins::help_marker, rikai::RikaiView, settings::SettingsView}, }; const ERROR_MODAL_TITLE: &str = "Error"; #[derive(thiserror::Error, Debug)] enum Error { #[error(transparent)] Gloss(#[from] GlossError), #[error(transparent)] DeepL(#[from] deepl_api::Error), } #[derive(Debug)] enum Message { Gloss(Result<Gloss, GlossError>), Translation(Result<Translation, deepl_api::Error>), } #[derive(Debug)] enum State { Error(Error), Processing, None, } pub struct App { channel_tx: mpsc::Sender<Message>, channel_rx: mpsc::Receiver<Message>, input_text: String, last_clipboard: String, request_gloss_text: Option<String>, show_imgui_demo: bool, show_settings: bool, show_raw: bool, show_metrics_window: bool, show_style_editor: bool, settings: SettingsView, state: State, glossator: Glossator, rikai: RikaiView, } impl App { pub fn new(settings: SettingsView) -> Self { let (channel_tx, channel_rx) = mpsc::channel(); let glossator = Glossator::new(&settings); App { channel_tx, channel_rx, input_text: "".into(), last_clipboard: "".into(), request_gloss_text: None, show_imgui_demo: false, show_settings: false, show_raw: false, show_metrics_window: false, show_style_editor: false, settings, state: State::None, glossator, rikai: RikaiView::new(), } } fn request_gloss(&self, text: &st
fn request_translation(&self, text: &str) { let channel_tx = self.channel_tx.clone(); let text = text.to_owned(); let deepl_api_key = self.settings.deepl_api_key.clone(); rayon::spawn(move || { let translation = translation::translate(&deepl_api_key, &text); let _ = channel_tx.send(Message::Translation(translation)); }); } fn transition(&mut self, ui: &Ui, state: State) { if let State::Error(err) = &state { log::error!("{}", err); ui.open_popup(ERROR_MODAL_TITLE); } self.state = state; } fn poll(&mut self, ui: &Ui) { match self.channel_rx.try_recv() { Ok(Message::Gloss(Ok(gloss))) => { let should_translate = !gloss.root.is_flat(); if self.settings.auto_translate && should_translate { self.request_translation(&gloss.root.text_flatten()); } else { self.transition(ui, State::None); self.rikai.set_translation(None); } self.rikai.set_gloss(Some(gloss)); } Ok(Message::Translation(Ok(translation))) => { self.rikai.set_translation(Some(translation)); self.transition(ui, State::None) } Ok(Message::Gloss(Err(err))) => { self.transition(ui, State::Error(err.into())); } Ok(Message::Translation(Err(err))) => { self.transition(ui, State::Error(err.into())); } Err(mpsc::TryRecvError::Empty) => {} x => { log::error!("unhandled message: {:?}", x); } } match &self.state { State::Error(_) | State::None => { if let Some(request_gloss_text) = self.request_gloss_text.clone() { self.request_gloss_text = None; self.transition(ui, State::Processing); self.request_gloss(&request_gloss_text); } } _ => (), }; if self.settings.watch_clipboard { if let Some(clipboard) = ui.clipboard_text() { if clipboard != self.last_clipboard { self.input_text = clipboard.clone(); self.last_clipboard = clipboard.clone(); self.request_gloss_text = Some(clipboard); } } } } fn show_main_menu(&mut self, _env: &mut Env, ui: &Ui) { if let Some(_token) = ui.begin_menu_bar() { if let Some(_menu) = ui.begin_menu("Options") { if MenuItem::new("Watch clipboard") .selected(self.settings.watch_clipboard) .build(ui) { self.settings.watch_clipboard = !self.settings.watch_clipboard; } ui.separator(); if MenuItem::new("Settings").build(ui) { self.show_settings = true; } } if let Some(_menu) = ui.begin_menu("View") { if MenuItem::new("Show input") .selected(self.settings.show_manual_input) .build(ui) { self.settings.show_manual_input = !self.settings.show_manual_input; } ui.separator(); if MenuItem::new("Raw").build(ui) { self.show_raw = true; } if MenuItem::new("Style Editor").build(ui) { self.show_style_editor = true; } if MenuItem::new("Debugger").build(ui) { self.show_metrics_window = true; } if MenuItem::new("ImGui Demo").build(ui) { self.show_imgui_demo = true; } } } } fn show_error_modal(&mut self, _env: &mut Env, ui: &Ui) { if let State::Error(err) = &self.state { PopupModal::new(ERROR_MODAL_TITLE) .always_auto_resize(true) .build(ui, || { let _wrap_token = ui.push_text_wrap_pos_with_pos(300.0); ui.text(err.to_string()); ui.separator(); if ui.button_with_size("OK", [120.0, 0.0]) { ui.close_current_popup(); } }); } } fn show_deepl_usage(&self, ui: &Ui) { if let Some(Translation::DeepL { deepl_usage, .. }) = self.rikai.translation() { ui.same_line(); let fraction = deepl_usage.character_count as f32 / deepl_usage.character_limit as f32; ProgressBar::new(fraction) .overlay_text(format!( "DeepL API usage: {}/{} ({:.2}%)", deepl_usage.character_count, deepl_usage.character_limit, fraction * 100.0 )) .size([350.0, 0.0]) .build(ui); } } pub fn ui(&mut self, env: &mut Env, ui: &mut Ui, run: &mut bool) { let io = ui.io(); let mut niinii = Window::new("niinii") .opened(run) .menu_bar(true) .draw_background(!self.settings().transparent); if !self.settings().overlay_mode { niinii = niinii .position([0.0, 0.0], Condition::Always) .size(io.display_size, Condition::Always) .no_decoration() }; niinii.build(ui, || { self.show_main_menu(env, ui); if self.settings().show_manual_input { let _disable_input = ui.begin_disabled(matches!(self.state, State::Processing)); if ui .input_text_multiline("", &mut self.input_text, [0.0, 50.0]) .enter_returns_true(true) .build() { self.request_gloss_text = Some(self.input_text.clone()); } if ui.button_with_size("Gloss", [120.0, 0.0]) { self.request_gloss_text = Some(self.input_text.clone()); } ui.same_line(); let should_translate = self .rikai .gloss() .map_or_else(|| false, |x| !x.root.is_flat()); { let mut _disable_tl = ui.begin_disabled(!should_translate || self.rikai.translation().is_some()); if ui.button_with_size("Translate", [120.0, 0.0]) { self.transition(ui, State::Processing); if let Some(gloss) = self.rikai.gloss() { self.request_translation(&gloss.root.text_flatten()); } } } if !should_translate && ui.is_item_hovered_with_flags(ItemHoveredFlags::ALLOW_WHEN_DISABLED) { ui.tooltip(|| ui.text("Text does not require translation")); } } self.show_deepl_usage(ui); { let _disable_ready = ui.begin_disabled(!matches!(self.state, State::None)); self.rikai.ui(env, ui, &self.settings, &mut self.show_raw); if let State::Processing = &self.state { ui.set_mouse_cursor(Some(MouseCursor::NotAllowed)); } } self.show_error_modal(env, ui); self.poll(ui); }); if self.show_imgui_demo { ui.show_demo_window(&mut self.show_imgui_demo); } if self.show_settings { if let Some(_token) = Window::new("Settings").always_auto_resize(true).begin(ui) { self.settings.ui(ui); ui.separator(); if ui.button_with_size("OK", [120.0, 0.0]) { self.show_settings = false; } ui.same_line(); ui.text("* Restart to apply these changes"); } } if self.show_metrics_window { ui.show_metrics_window(&mut self.show_metrics_window); } if self.show_style_editor { let mut show_style_editor = self.show_style_editor; Window::new("Style Editor") .opened(&mut show_style_editor) .menu_bar(true) .build(ui, || { ui.menu_bar(|| { if ui.button("Save") { self.settings_mut().set_style(Some(&ui.clone_style())); } if ui.button("Reset") { self.settings_mut().set_style(None); } if self.settings.style.is_some() { ui.menu_with_enabled("Style saved", false, || {}); help_marker(ui, "Saved style will be restored on start-up. Reset will clear the stored style."); } }); ui.show_default_style_editor(); }); self.show_style_editor = show_style_editor; } } pub fn settings(&self) -> &SettingsView { &self.settings } pub fn settings_mut(&mut self) -> &mut SettingsView { &mut self.settings } }
r) { let channel_tx = self.channel_tx.clone(); let glossator = &self.glossator; let text = text.to_owned(); let variants = if self.settings.more_variants { 5 } else { 1 }; rayon::spawn(enclose! { (glossator) move || { let gloss = glossator.gloss(&text, variants); let _ = channel_tx.send(Message::Gloss(gloss)); }}); }
function_block-function_prefixed
[ { "content": "pub fn translate(deepl_api_key: &str, text: &str) -> Result<Translation, deepl_api::Error> {\n\n let text = filter_text(text);\n\n let deepl = DeepL::new(deepl_api_key.to_string());\n\n let deepl_text = deepl\n\n .translate(\n\n None,\n\n TranslatableTextList ...
Rust
near/oysterpack-smart-near/src/domain/public_key.rs
oysterpack/oysterpack-smart
f2985636d7c035de06e319f7ac8bec92e1293362
use crate::asserts::ERR_INVALID; use crate::Error; use near_sdk::json_types::Base58PublicKey; use near_sdk::{ borsh::{self, BorshDeserialize, BorshSerialize}, serde::{Deserialize, Deserializer, Serialize, Serializer}, }; use std::fmt::{Debug, Formatter}; use std::{ convert::{TryFrom, TryInto}, fmt::{self, Display}, }; #[derive(BorshDeserialize, BorshSerialize, Clone, Copy, PartialEq, Eq, Hash)] pub enum PublicKey { ED25519([u8; 32]), SECP256K1(([u8; 32], [u8; 32])), } impl Serialize for PublicKey { fn serialize<S>(&self, serializer: S) -> Result<<S as Serializer>::Ok, <S as Serializer>::Error> where S: Serializer, { let key = Base58PublicKey::from(*self); Serialize::serialize(&key, serializer) } } impl<'de> Deserialize<'de> for PublicKey { fn deserialize<D>(deserializer: D) -> Result<Self, <D as Deserializer<'de>>::Error> where D: Deserializer<'de>, { let key: Base58PublicKey = Deserialize::deserialize(deserializer)?; Ok(key.into()) } } impl TryFrom<&[u8]> for PublicKey { type Error = Error<String>; fn try_from(value: &[u8]) -> Result<Self, Self::Error> { match value.len() { 33 if value[0] == 0 => Ok(Self::ED25519((&value[1..]).try_into().unwrap())), 65 if value[0] == 1 => Ok(Self::SECP256K1(( (&value[1..33]).try_into().unwrap(), (&value[33..]).try_into().unwrap(), ))), _ => Err(ERR_INVALID.error("invalid public key".to_string())), } } } impl From<PublicKey> for Vec<u8> { fn from(key: PublicKey) -> Self { match key { PublicKey::ED25519(k) => { let mut key = Vec::with_capacity(33); key.push(0); for b in k.iter() { key.push(*b); } key } PublicKey::SECP256K1((k1, k2)) => { let mut key = Vec::with_capacity(65); key.push(1); for b in k1.iter() { key.push(*b); } for b in k2.iter() { key.push(*b); } key } } } } impl From<Base58PublicKey> for PublicKey { fn from(key: Base58PublicKey) -> Self { key.0.as_slice().try_into().unwrap() } } impl From<PublicKey> for Base58PublicKey { fn from(key: PublicKey) -> Self { Self(key.into()) } } impl Display for PublicKey { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let key = Base58PublicKey::from(*self); let s: String = (&key).try_into().map_err(|_| fmt::Error)?; Display::fmt(&s, f) } } impl Debug for PublicKey { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { Display::fmt(&self, f) } } #[cfg(test)] mod tests { use super::*; use near_sdk::serde_json; #[test] fn from_vec_ed25519() { let key = [0_u8; 33]; let key: PublicKey = key[..].try_into().unwrap(); println!("{}", key); match key { PublicKey::ED25519(_) => {} PublicKey::SECP256K1(_) => panic!("expected ED25519"), } } #[test] fn from_vec_secp256k1() { let key = [1_u8; 65]; let key: PublicKey = key[..].try_into().unwrap(); println!("{}", key); match key { PublicKey::ED25519(_) => panic!("expected ED25519"), PublicKey::SECP256K1(_) => {} } } #[test] fn json_serde() { let key = [1_u8; 65]; let key: PublicKey = key[..].try_into().unwrap(); let json = serde_json::to_string(&key).unwrap(); println!("json: {}", json); let key2: PublicKey = serde_json::from_str(&json).unwrap(); assert_eq!(key, key2); } #[test] fn into_base58() { let key = [1_u8; 65]; let key: PublicKey = key[..].try_into().unwrap(); let base58_key: Base58PublicKey = key.into(); println!("{}", serde_json::to_string(&base58_key).unwrap()); let base58_key2: Base58PublicKey = serde_json::from_str(serde_json::to_string(&base58_key).unwrap().as_str()).unwrap(); assert_eq!(base58_key, base58_key2); } }
use crate::asserts::ERR_INVALID; use crate::Error; use near_sdk::json_types::Base58PublicKey; use near_sdk::{ borsh::{self, BorshDeserialize, BorshSerialize}, serde::{Deserialize, Deserializer, Serialize, Serializer}, }; use std::fmt::{Debug, Formatter}; use std::{ convert::{TryFrom, TryInto}, fmt::{self, Display}, }; #[derive(BorshDeserialize, BorshSerialize, Clone, Copy, PartialEq, Eq, Hash)] pub enum PublicKey { ED25519([u8; 32]), SECP256K1(([u8; 32], [u8; 32])), } impl Serialize for PublicKey { fn
} impl<'de> Deserialize<'de> for PublicKey { fn deserialize<D>(deserializer: D) -> Result<Self, <D as Deserializer<'de>>::Error> where D: Deserializer<'de>, { let key: Base58PublicKey = Deserialize::deserialize(deserializer)?; Ok(key.into()) } } impl TryFrom<&[u8]> for PublicKey { type Error = Error<String>; fn try_from(value: &[u8]) -> Result<Self, Self::Error> { match value.len() { 33 if value[0] == 0 => Ok(Self::ED25519((&value[1..]).try_into().unwrap())), 65 if value[0] == 1 => Ok(Self::SECP256K1(( (&value[1..33]).try_into().unwrap(), (&value[33..]).try_into().unwrap(), ))), _ => Err(ERR_INVALID.error("invalid public key".to_string())), } } } impl From<PublicKey> for Vec<u8> { fn from(key: PublicKey) -> Self { match key { PublicKey::ED25519(k) => { let mut key = Vec::with_capacity(33); key.push(0); for b in k.iter() { key.push(*b); } key } PublicKey::SECP256K1((k1, k2)) => { let mut key = Vec::with_capacity(65); key.push(1); for b in k1.iter() { key.push(*b); } for b in k2.iter() { key.push(*b); } key } } } } impl From<Base58PublicKey> for PublicKey { fn from(key: Base58PublicKey) -> Self { key.0.as_slice().try_into().unwrap() } } impl From<PublicKey> for Base58PublicKey { fn from(key: PublicKey) -> Self { Self(key.into()) } } impl Display for PublicKey { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let key = Base58PublicKey::from(*self); let s: String = (&key).try_into().map_err(|_| fmt::Error)?; Display::fmt(&s, f) } } impl Debug for PublicKey { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { Display::fmt(&self, f) } } #[cfg(test)] mod tests { use super::*; use near_sdk::serde_json; #[test] fn from_vec_ed25519() { let key = [0_u8; 33]; let key: PublicKey = key[..].try_into().unwrap(); println!("{}", key); match key { PublicKey::ED25519(_) => {} PublicKey::SECP256K1(_) => panic!("expected ED25519"), } } #[test] fn from_vec_secp256k1() { let key = [1_u8; 65]; let key: PublicKey = key[..].try_into().unwrap(); println!("{}", key); match key { PublicKey::ED25519(_) => panic!("expected ED25519"), PublicKey::SECP256K1(_) => {} } } #[test] fn json_serde() { let key = [1_u8; 65]; let key: PublicKey = key[..].try_into().unwrap(); let json = serde_json::to_string(&key).unwrap(); println!("json: {}", json); let key2: PublicKey = serde_json::from_str(&json).unwrap(); assert_eq!(key, key2); } #[test] fn into_base58() { let key = [1_u8; 65]; let key: PublicKey = key[..].try_into().unwrap(); let base58_key: Base58PublicKey = key.into(); println!("{}", serde_json::to_string(&base58_key).unwrap()); let base58_key2: Base58PublicKey = serde_json::from_str(serde_json::to_string(&base58_key).unwrap().as_str()).unwrap(); assert_eq!(base58_key, base58_key2); } }
serialize<S>(&self, serializer: S) -> Result<<S as Serializer>::Ok, <S as Serializer>::Error> where S: Serializer, { let key = Base58PublicKey::from(*self); Serialize::serialize(&key, serializer) }
function_block-function_prefixed
[]
Rust
src/librustpkg/package_source.rs
thomcc/rust
c6515ee6a7f424679e2b53336974a991dd3b71c8
use target::*; use package_id::PkgId; use core::path::Path; use core::option::*; use core::{os, run, str, vec}; use context::*; use crate::Crate; use messages::*; use path_util::pkgid_src_in_workspace; use util::compile_crate; use version::{ExactRevision, SemanticVersion, NoVersion}; pub struct PkgSrc { root: Path, dst_dir: Path, id: PkgId, libs: ~[Crate], mains: ~[Crate], tests: ~[Crate], benchs: ~[Crate], } condition! { build_err: (~str) -> (); } impl PkgSrc { pub fn new(src_dir: &Path, dst_dir: &Path, id: &PkgId) -> PkgSrc { PkgSrc { root: copy *src_dir, dst_dir: copy *dst_dir, id: copy *id, libs: ~[], mains: ~[], tests: ~[], benchs: ~[] } } fn check_dir(&self) -> Path { use conditions::nonexistent_package::cond; debug!("Pushing onto root: %s | %s", self.id.remote_path.to_str(), self.root.to_str()); let dir; let dirs = pkgid_src_in_workspace(&self.id, &self.root); debug!("Checking dirs: %?", dirs); let path = dirs.iter().find_(|&d| os::path_exists(d)); match path { Some(d) => dir = copy *d, None => dir = match self.fetch_git() { None => cond.raise((copy self.id, ~"supplied path for package dir does not \ exist, and couldn't interpret it as a URL fragment")), Some(d) => d } } if !os::path_is_dir(&dir) { cond.raise((copy self.id, ~"supplied path for package dir is a \ non-directory")); } dir } pub fn fetch_git(&self) -> Option<Path> { let mut local = self.root.push("src"); local = local.push(self.id.to_str()); os::remove_dir_recursive(&local); let url = fmt!("https://%s", self.id.remote_path.to_str()); let branch_args = match self.id.version { NoVersion => ~[], ExactRevision(ref s) => ~[~"--branch", copy *s], SemanticVersion(ref s) => ~[~"--branch", s.to_str()] }; note(fmt!("Fetching package: git clone %s %s %?", url, local.to_str(), branch_args)); if run::process_output("git", ~[~"clone", copy url, local.to_str()] + branch_args).status != 0 { note(fmt!("fetching %s failed: can't clone repository", url)); None } else { Some(local) } } pub fn package_script_option(&self, cwd: &Path) -> Option<Path> { let maybe_path = cwd.push("pkg.rs"); if os::path_exists(&maybe_path) { Some(maybe_path) } else { None } } fn stem_matches(&self, p: &Path) -> bool { let self_id = self.id.local_path.filestem(); if self_id == p.filestem() { return true; } else { for self_id.iter().advance |pth| { if pth.starts_with("rust_") && match p.filestem() { Some(s) => str::eq_slice(s, pth.slice(5, pth.len())), None => false } { return true; } } } false } fn push_crate(cs: &mut ~[Crate], prefix: uint, p: &Path) { assert!(p.components.len() > prefix); let mut sub = Path(""); for vec::slice(p.components, prefix, p.components.len()).iter().advance |c| { sub = sub.push(*c); } debug!("found crate %s", sub.to_str()); cs.push(Crate::new(&sub)); } pub fn find_crates(&mut self) { use conditions::missing_pkg_files::cond; let dir = self.check_dir(); debug!("Called check_dir, I'm in %s", dir.to_str()); let prefix = dir.components.len(); debug!("Matching against %?", self.id.local_path.filestem()); for os::walk_dir(&dir) |pth| { match pth.filename() { Some(~"lib.rs") => PkgSrc::push_crate(&mut self.libs, prefix, pth), Some(~"main.rs") => PkgSrc::push_crate(&mut self.mains, prefix, pth), Some(~"test.rs") => PkgSrc::push_crate(&mut self.tests, prefix, pth), Some(~"bench.rs") => PkgSrc::push_crate(&mut self.benchs, prefix, pth), _ => () } } if self.libs.is_empty() && self.mains.is_empty() && self.tests.is_empty() && self.benchs.is_empty() { note("Couldn't infer any crates to build.\n\ Try naming a crate `main.rs`, `lib.rs`, \ `test.rs`, or `bench.rs`."); cond.raise(copy self.id); } debug!("found %u libs, %u mains, %u tests, %u benchs", self.libs.len(), self.mains.len(), self.tests.len(), self.benchs.len()) } fn build_crates(&self, ctx: &Ctx, dst_dir: &Path, src_dir: &Path, crates: &[Crate], cfgs: &[~str], what: OutputType) { for crates.iter().advance |&crate| { let path = &src_dir.push_rel(&crate.file).normalize(); note(fmt!("build_crates: compiling %s", path.to_str())); note(fmt!("build_crates: destination dir is %s", dst_dir.to_str())); let result = compile_crate(ctx, &self.id, path, dst_dir, crate.flags, crate.cfgs + cfgs, false, what); if !result { build_err::cond.raise(fmt!("build failure on %s", path.to_str())); } debug!("Result of compiling %s was %?", path.to_str(), result); } } pub fn build(&self, ctx: &Ctx, dst_dir: Path, cfgs: ~[~str]) { let dir = self.check_dir(); debug!("Building libs in %s", dir.to_str()); self.build_crates(ctx, &dst_dir, &dir, self.libs, cfgs, Lib); debug!("Building mains"); self.build_crates(ctx, &dst_dir, &dir, self.mains, cfgs, Main); debug!("Building tests"); self.build_crates(ctx, &dst_dir, &dir, self.tests, cfgs, Test); debug!("Building benches"); self.build_crates(ctx, &dst_dir, &dir, self.benchs, cfgs, Bench); } }
use target::*; use package_id::PkgId; use core::path::Path; use core::option::*; use core::{os, run, str, vec}; use context::*; use crate::Crate; use messages::*; use path_util::pkgid_src_in_workspace; use util::compile_crate; use version::{ExactRevision, SemanticVersion, NoVersion}; pub struct PkgSrc { root: Path, dst_dir: Path, id: PkgId, libs: ~[Crate], mains: ~[Crate], tests: ~[Crate], benchs: ~[Crate], } condition! { build_err: (~str) -> (); } impl PkgSrc { pub fn new(src_dir: &Path, dst_dir: &Path, id: &PkgId) -> PkgSrc { PkgSrc { root: copy *src_dir, dst_dir: copy *dst_dir, id: copy *id, libs: ~[], mains: ~[], tests: ~[], benchs: ~[] } } fn check_dir(&self) -> Path { use conditions::nonexistent_package::cond; debug!("Pushing onto root: %s | %s", self.id.remote_path.to_str(), self.root.to_str()); let dir; let dirs = pkgid_src_in_workspace(&self.id, &self.root); debug!("Checking dirs: %?", dirs); let path = dirs.iter().find_(|&d| os::path_exists(d)); match path { Some(d) => dir = copy *d, None => dir = match self.fetch_git() { None => cond.raise((copy self.id, ~"supplied path for package dir does not \ exist, and couldn't interpret it as a URL fragment")), Some(d) => d } } if !os::path_is_dir(&dir) { cond.raise((copy self.id, ~"supplied path for package dir is a \ non-directory")); } dir } pub fn fetch_git(&self) -> Option<Path> { let mut local = self.root.push("src"); local = local.push(self.id.to_str()); os::remove_dir_recursive(&local); let url = fmt!("https://%s", self.id.remote_path.to_str()); let branch_args = match self.id.version { NoVersion => ~[], ExactRevision(ref s) => ~[~"--branch", copy *s], SemanticVersion(ref s) => ~[~"--branch", s.to_str()] }; note(fmt!("Fetching package: git clone %s %s %?", url, local.to_str(), branch_args)); if run::process_output("git", ~[~"clone", copy url, local.to_str()] + branch_args).status != 0 { note(fmt!("fetching %s failed: can't clone repository", url)); None } else { Some(local) } } pub fn package_script_option(&self, cwd: &Path) -> Option<Path> { let maybe_path = cwd.push("pkg.rs"); if os::path_exists(&maybe_path) { Some(maybe_path) } else { None } } fn stem_matches(&self, p: &Path) -> bool { let self_id = self.id.local_path.filestem(); if self_id == p.filestem()
&dir, self.libs, cfgs, Lib); debug!("Building mains"); self.build_crates(ctx, &dst_dir, &dir, self.mains, cfgs, Main); debug!("Building tests"); self.build_crates(ctx, &dst_dir, &dir, self.tests, cfgs, Test); debug!("Building benches"); self.build_crates(ctx, &dst_dir, &dir, self.benchs, cfgs, Bench); } }
{ return true; } else { for self_id.iter().advance |pth| { if pth.starts_with("rust_") && match p.filestem() { Some(s) => str::eq_slice(s, pth.slice(5, pth.len())), None => false } { return true; } } } false } fn push_crate(cs: &mut ~[Crate], prefix: uint, p: &Path) { assert!(p.components.len() > prefix); let mut sub = Path(""); for vec::slice(p.components, prefix, p.components.len()).iter().advance |c| { sub = sub.push(*c); } debug!("found crate %s", sub.to_str()); cs.push(Crate::new(&sub)); } pub fn find_crates(&mut self) { use conditions::missing_pkg_files::cond; let dir = self.check_dir(); debug!("Called check_dir, I'm in %s", dir.to_str()); let prefix = dir.components.len(); debug!("Matching against %?", self.id.local_path.filestem()); for os::walk_dir(&dir) |pth| { match pth.filename() { Some(~"lib.rs") => PkgSrc::push_crate(&mut self.libs, prefix, pth), Some(~"main.rs") => PkgSrc::push_crate(&mut self.mains, prefix, pth), Some(~"test.rs") => PkgSrc::push_crate(&mut self.tests, prefix, pth), Some(~"bench.rs") => PkgSrc::push_crate(&mut self.benchs, prefix, pth), _ => () } } if self.libs.is_empty() && self.mains.is_empty() && self.tests.is_empty() && self.benchs.is_empty() { note("Couldn't infer any crates to build.\n\ Try naming a crate `main.rs`, `lib.rs`, \ `test.rs`, or `bench.rs`."); cond.raise(copy self.id); } debug!("found %u libs, %u mains, %u tests, %u benchs", self.libs.len(), self.mains.len(), self.tests.len(), self.benchs.len()) } fn build_crates(&self, ctx: &Ctx, dst_dir: &Path, src_dir: &Path, crates: &[Crate], cfgs: &[~str], what: OutputType) { for crates.iter().advance |&crate| { let path = &src_dir.push_rel(&crate.file).normalize(); note(fmt!("build_crates: compiling %s", path.to_str())); note(fmt!("build_crates: destination dir is %s", dst_dir.to_str())); let result = compile_crate(ctx, &self.id, path, dst_dir, crate.flags, crate.cfgs + cfgs, false, what); if !result { build_err::cond.raise(fmt!("build failure on %s", path.to_str())); } debug!("Result of compiling %s was %?", path.to_str(), result); } } pub fn build(&self, ctx: &Ctx, dst_dir: Path, cfgs: ~[~str]) { let dir = self.check_dir(); debug!("Building libs in %s", dir.to_str()); self.build_crates(ctx, &dst_dir,
random
[ { "content": "// xfail-test\n\npub fn main() { let early_error: @fn(str) -> ! = {|msg| fail!() }; }\n", "file_path": "src/test/run-pass/issue-1516.rs", "rank": 0, "score": 706562.2886381538 }, { "content": "pub fn main() { let mut v: ~[int] = ~[]; }\n", "file_path": "src/test/run-pass/...
Rust
research/gaia-x/pegasus/pegasus/src/operator/iteration/feedback.rs
bmmcq/GraphScope
a480d941f3a3f1270ddc0570e72059e6a34dab24
use crate::api::Notification; use crate::communication::input::{new_input_session, InputProxy}; use crate::communication::output::{new_output, OutputProxy}; use crate::errors::JobExecError; use crate::graph::Port; use crate::operator::{DefaultNotify, Notifiable, OperatorCore}; use crate::progress::{EndSignal, Weight}; use crate::tag::tools::map::TidyTagMap; use crate::{Data, Tag}; pub(crate) struct IterSyncOperator<D: Data> { observer: TidyTagMap<usize>, notify: DefaultNotify, _ph: std::marker::PhantomData<D>, } impl<D: Data> IterSyncOperator<D> { pub fn new(scope_level: u32) -> Self { IterSyncOperator { observer: TidyTagMap::new(scope_level), notify: DefaultNotify::Single, _ph: std::marker::PhantomData, } } } impl<D: Data> OperatorCore for IterSyncOperator<D> { fn on_receive( &mut self, inputs: &[Box<dyn InputProxy>], outputs: &[Box<dyn OutputProxy>], ) -> Result<(), JobExecError> { let mut input = new_input_session::<D>(&inputs[0]); let output = new_output::<D>(&outputs[0]); input.for_each_batch(|dataset| { if dataset.is_last() { let mut count = self.observer.remove(&dataset.tag).unwrap_or(0); if !dataset.is_empty() { count += 1; output.push_batch_mut(dataset)?; } if let Some(end) = dataset.take_end() { if count == 0 { let sync = end.tag.clone(); debug_worker!("detect if termination on {:?}", sync); let end = EndSignal::new(sync, Weight::all()); outputs[1].notify_end(end)?; } output.notify_end(end)?; } } else { if !dataset.is_empty() { let cnt = self .observer .get_mut_or_else(&dataset.tag, || 0); *cnt += 1; output.push_batch_mut(dataset)?; } } Ok(()) }) } } impl<D: Data> Notifiable for IterSyncOperator<D> { fn on_notify(&mut self, n: Notification, outputs: &[Box<dyn OutputProxy>]) -> Result<(), JobExecError> { if outputs.len() > 0 { match self.notify { DefaultNotify::Single => { assert_eq!(n.port, 0); let end = n.take_end(); if outputs.len() > 1 { for output in &outputs[1..] { output.notify_end(end.clone())?; } } outputs[0].notify_end(end)?; Ok(()) } _ => unreachable!(), } } else { Ok(()) } } fn on_cancel( &mut self, port: Port, tag: Tag, inputs: &[Box<dyn InputProxy>], outputs: &[Box<dyn OutputProxy>], ) -> Result<bool, JobExecError> { assert_eq!(port.port, 0); inputs[0].cancel_scope(&tag); inputs[0].propagate_cancel(&tag)?; outputs[0].skip(&tag)?; Ok(true) } } pub(crate) struct FeedbackOperator<D: Data> { pub _scope_level: u32, max_iters: u32, observer: TidyTagMap<()>, _ph: std::marker::PhantomData<D>, } impl<D: Data> FeedbackOperator<D> { pub fn new(_scope_level: u32, max_iters: u32) -> Self { FeedbackOperator { _scope_level, max_iters, observer: TidyTagMap::new(_scope_level - 1), _ph: std::marker::PhantomData, } } } impl<D: Data> OperatorCore for FeedbackOperator<D> { fn on_receive( &mut self, inputs: &[Box<dyn InputProxy>], outputs: &[Box<dyn OutputProxy>], ) -> Result<(), JobExecError> { let mut input = new_input_session::<D>(&inputs[0]); let output = new_output::<D>(&outputs[0]); input.for_each_batch(|dataset| { let mut session = output.new_session(&dataset.tag)?; for d in dataset.drain() { session.give(d)?; } if let Some(end) = dataset.take_end() { assert!(end.tag.len() > 0); let cur = end.tag.current_uncheck(); let p = end.tag.to_parent_uncheck(); if cur == 0 { debug_worker!("observe {:?} in iteration;", p); self.observer.insert(p, ()); session.notify_end(end)?; } else if cur == self.max_iters - 1 { debug_worker!("observe {:?} out iteration;", p); self.observer.remove(&p); session.notify_end(end)?; } else if self.observer.contains_key(&p) { session.notify_end(end)?; } else { debug_worker!("redundant end {:?}, ignore;", end.tag); } } Ok(()) })?; let mut sync_input = new_input_session::<D>(&inputs[1]); sync_input.for_each_batch(|dataset| { assert!(dataset.is_empty()); assert!(dataset.is_last()); if let Some(mut end) = dataset.take_end() { let p = end.tag.to_parent_uncheck(); if let Some(_) = self.observer.remove(&p) { debug_worker!( "observe {:?} terminate on {}th iteration;", p, end.tag.current_uncheck() ); end.tag = end.tag.advance_to(self.max_iters - 1); output.notify_end(end)?; } } Ok(()) }) } } impl<D: Data> Notifiable for FeedbackOperator<D> { fn on_notify(&mut self, n: Notification, outputs: &[Box<dyn OutputProxy>]) -> Result<(), JobExecError> { debug_worker!("feedback: on notify of {:?} on {:?}", n.tag(), n.port); if n.port == 0 { let end = n.take_end(); if end.tag.is_root() { outputs[0].notify_end(end)?; } } Ok(()) } fn on_cancel( &mut self, port: Port, tag: Tag, inputs: &[Box<dyn InputProxy>], outputs: &[Box<dyn OutputProxy>], ) -> Result<bool, JobExecError> { assert_eq!(port.port, 0); inputs[0].cancel_scope(&tag); inputs[0].propagate_cancel(&tag)?; outputs[0].skip(&tag)?; Ok(true) } }
use crate::api::Notification; use crate::communication::input::{new_input_session, InputProxy}; use crate::communication::output::{new_output, OutputProxy}; use crate::errors::JobExecError; use crate::graph::Port; use crate::operator::{DefaultNotify, Notifiable, OperatorCore}; use crate::progress::{EndSignal, Weight}; use crate::tag::tools::map::TidyTagMap; use crate::{Data, Tag}; pub(crate) struct IterSyncOperator<D: Data> { observer: TidyTagMap<usize>, notify: DefaultNotify, _ph: std::marker::PhantomData<D>, } impl<D: Data> IterSyncOperator<D> { pub fn new(scope_level: u32) -> Self { IterSyncOperator { observer: TidyTagMap::new(scope_level), notify: DefaultNotify::Single, _ph: std::marker::PhantomData, } } } impl<D: Data> OperatorCore for IterSyncOperator<D> { fn on_receive( &mut self, inputs: &[Box<dyn InputProxy>], outputs: &[Box<dyn OutputProxy>], ) -> Result<(), JobExecError> { let mut input = new_input_session::<D>(&inputs[0]); let output = new_output::<D>(&outputs[0]); input.for_each_batch(|dataset| { if dataset.is_last() { let mut count = self.observer.remove(&dataset.tag).unwrap_or(0);
if let Some(end) = dataset.take_end() { if count == 0 { let sync = end.tag.clone(); debug_worker!("detect if termination on {:?}", sync); let end = EndSignal::new(sync, Weight::all()); outputs[1].notify_end(end)?; } output.notify_end(end)?; } } else { if !dataset.is_empty() { let cnt = self .observer .get_mut_or_else(&dataset.tag, || 0); *cnt += 1; output.push_batch_mut(dataset)?; } } Ok(()) }) } } impl<D: Data> Notifiable for IterSyncOperator<D> { fn on_notify(&mut self, n: Notification, outputs: &[Box<dyn OutputProxy>]) -> Result<(), JobExecError> { if outputs.len() > 0 { match self.notify { DefaultNotify::Single => { assert_eq!(n.port, 0); let end = n.take_end(); if outputs.len() > 1 { for output in &outputs[1..] { output.notify_end(end.clone())?; } } outputs[0].notify_end(end)?; Ok(()) } _ => unreachable!(), } } else { Ok(()) } } fn on_cancel( &mut self, port: Port, tag: Tag, inputs: &[Box<dyn InputProxy>], outputs: &[Box<dyn OutputProxy>], ) -> Result<bool, JobExecError> { assert_eq!(port.port, 0); inputs[0].cancel_scope(&tag); inputs[0].propagate_cancel(&tag)?; outputs[0].skip(&tag)?; Ok(true) } } pub(crate) struct FeedbackOperator<D: Data> { pub _scope_level: u32, max_iters: u32, observer: TidyTagMap<()>, _ph: std::marker::PhantomData<D>, } impl<D: Data> FeedbackOperator<D> { pub fn new(_scope_level: u32, max_iters: u32) -> Self { FeedbackOperator { _scope_level, max_iters, observer: TidyTagMap::new(_scope_level - 1), _ph: std::marker::PhantomData, } } } impl<D: Data> OperatorCore for FeedbackOperator<D> { fn on_receive( &mut self, inputs: &[Box<dyn InputProxy>], outputs: &[Box<dyn OutputProxy>], ) -> Result<(), JobExecError> { let mut input = new_input_session::<D>(&inputs[0]); let output = new_output::<D>(&outputs[0]); input.for_each_batch(|dataset| { let mut session = output.new_session(&dataset.tag)?; for d in dataset.drain() { session.give(d)?; } if let Some(end) = dataset.take_end() { assert!(end.tag.len() > 0); let cur = end.tag.current_uncheck(); let p = end.tag.to_parent_uncheck(); if cur == 0 { debug_worker!("observe {:?} in iteration;", p); self.observer.insert(p, ()); session.notify_end(end)?; } else if cur == self.max_iters - 1 { debug_worker!("observe {:?} out iteration;", p); self.observer.remove(&p); session.notify_end(end)?; } else if self.observer.contains_key(&p) { session.notify_end(end)?; } else { debug_worker!("redundant end {:?}, ignore;", end.tag); } } Ok(()) })?; let mut sync_input = new_input_session::<D>(&inputs[1]); sync_input.for_each_batch(|dataset| { assert!(dataset.is_empty()); assert!(dataset.is_last()); if let Some(mut end) = dataset.take_end() { let p = end.tag.to_parent_uncheck(); if let Some(_) = self.observer.remove(&p) { debug_worker!( "observe {:?} terminate on {}th iteration;", p, end.tag.current_uncheck() ); end.tag = end.tag.advance_to(self.max_iters - 1); output.notify_end(end)?; } } Ok(()) }) } } impl<D: Data> Notifiable for FeedbackOperator<D> { fn on_notify(&mut self, n: Notification, outputs: &[Box<dyn OutputProxy>]) -> Result<(), JobExecError> { debug_worker!("feedback: on notify of {:?} on {:?}", n.tag(), n.port); if n.port == 0 { let end = n.take_end(); if end.tag.is_root() { outputs[0].notify_end(end)?; } } Ok(()) } fn on_cancel( &mut self, port: Port, tag: Tag, inputs: &[Box<dyn InputProxy>], outputs: &[Box<dyn OutputProxy>], ) -> Result<bool, JobExecError> { assert_eq!(port.port, 0); inputs[0].cancel_scope(&tag); inputs[0].propagate_cancel(&tag)?; outputs[0].skip(&tag)?; Ok(true) } }
if !dataset.is_empty() { count += 1; output.push_batch_mut(dataset)?; }
if_condition
[ { "content": "pub fn new_input_session<D: Data>(input: &Box<dyn InputProxy>) -> InputSession<D> {\n\n RefWrapInput::<D>::downcast(input).new_session()\n\n}\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/communication/input/mod.rs", "rank": 0, "score": 305640.9122772126 ...
Rust
src/io/tls/rustls_io.rs
fulara/rust-mysql-simple
f985535a15ea689517d03b4b90fda0bee7d99115
#![cfg(feature = "rustls-tls")] use std::{ convert::TryInto, fs::File, io::{self, Read}, sync::Arc, }; use bufstream::BufStream; use rustls::{ client::{ServerCertVerifier, WebPkiVerifier}, Certificate, ClientConfig, OwnedTrustAnchor, RootCertStore, }; use rustls_pemfile::certs; use crate::{ io::{Stream, TcpStream}, Result, SslOpts, }; impl Stream { pub fn make_secure(self, host: url::Host, ssl_opts: SslOpts) -> Result<Stream> { if self.is_socket() { return Ok(self); } let domain = match host { url::Host::Domain(domain) => domain, url::Host::Ipv4(ip) => ip.to_string(), url::Host::Ipv6(ip) => ip.to_string(), }; let mut root_store = RootCertStore::empty(); root_store.add_server_trust_anchors(webpki_roots::TLS_SERVER_ROOTS.0.iter().map(|ta| { OwnedTrustAnchor::from_subject_spki_name_constraints( ta.subject, ta.spki, ta.name_constraints, ) })); if let Some(root_cert_path) = ssl_opts.root_cert_path() { let mut root_cert_data = vec![]; let mut root_cert_file = File::open(root_cert_path)?; root_cert_file.read_to_end(&mut root_cert_data)?; let mut root_certs = Vec::new(); for cert in certs(&mut &*root_cert_data)? { root_certs.push(Certificate(cert)); } if root_certs.is_empty() && !root_cert_data.is_empty() { root_certs.push(Certificate(root_cert_data)); } for cert in &root_certs { root_store.add(cert)?; } } let config_builder = ClientConfig::builder() .with_safe_defaults() .with_root_certificates(root_store.clone()); let mut config = if let Some(identity) = ssl_opts.client_identity() { let (cert_chain, priv_key) = identity.load()?; config_builder.with_single_cert(cert_chain, priv_key)? } else { config_builder.with_no_client_auth() }; let server_name = domain .as_str() .try_into() .map_err(|_| webpki::InvalidDnsNameError)?; let mut dangerous = config.dangerous(); let web_pki_verifier = WebPkiVerifier::new(root_store, None); let dangerous_verifier = DangerousVerifier::new( ssl_opts.accept_invalid_certs(), ssl_opts.skip_domain_validation(), web_pki_verifier, ); dangerous.set_certificate_verifier(Arc::new(dangerous_verifier)); match self { Stream::TcpStream(tcp_stream) => match tcp_stream { TcpStream::Insecure(insecure_stream) => { let inner = insecure_stream .into_inner() .map_err(io::Error::from) .unwrap(); let conn = rustls::ClientConnection::new(Arc::new(config), server_name).unwrap(); let secure_stream = rustls::StreamOwned::new(conn, inner); Ok(Stream::TcpStream(TcpStream::Secure(BufStream::new( secure_stream, )))) } TcpStream::Secure(_) => Ok(Stream::TcpStream(tcp_stream)), }, _ => unreachable!(), } } } struct DangerousVerifier { accept_invalid_certs: bool, skip_domain_validation: bool, verifier: WebPkiVerifier, } impl DangerousVerifier { fn new( accept_invalid_certs: bool, skip_domain_validation: bool, verifier: WebPkiVerifier, ) -> Self { Self { accept_invalid_certs, skip_domain_validation, verifier, } } } impl ServerCertVerifier for DangerousVerifier { fn verify_server_cert( &self, end_entity: &Certificate, intermediates: &[Certificate], server_name: &rustls::ServerName, scts: &mut dyn Iterator<Item = &[u8]>, ocsp_response: &[u8], now: std::time::SystemTime, ) -> std::result::Result<rustls::client::ServerCertVerified, rustls::Error> { if self.accept_invalid_certs { Ok(rustls::client::ServerCertVerified::assertion()) } else { match self.verifier.verify_server_cert( end_entity, intermediates, server_name, scts, ocsp_response, now, ) { Ok(assertion) => Ok(assertion), Err(ref e) if e.to_string().contains("CertNotValidForName") && self.skip_domain_validation => { Ok(rustls::client::ServerCertVerified::assertion()) } Err(e) => Err(e), } } } }
#![cfg(feature = "rustls-tls")] use std::{ convert::TryInto, fs::File, io::{self, Read}, sync::Arc, }; use bufstream::BufStream; use rustls::{ client::{ServerCertVerifier, WebPkiVerifier}, Certificate, ClientConfig, OwnedTrustAnchor, RootCertStore, }; use rustls_pemfile::certs; use crate::{ io::{Stream, TcpStream}, Result, SslOpts, }; impl Stream { pub fn make_secure(self, host: url::Host, ssl_opts: SslOpts) -> Result<Stream> { if self.is_socket() { return Ok(self); } let domain = match host { url::Host::Domain(domain) => domain, url::Host::Ipv4(ip) => ip.to_string(), url::Host::Ipv6(ip) => ip.to_string(), }; let mut root_store = RootCertStore::empty(); root_store.add_server_trust_anchors(webpki_roots::TLS_SERVER_ROOTS.0.iter().map(|ta| { OwnedTrustAnchor::from_subject_spki_name_constraints( ta.subject, ta.spki, ta.name_constraints, ) })); if let Some(root_cert_path) = ssl_opts.root_cert_path() { let mut root_cert_data = vec![]; let mut root_cert_file = File::open(root_cert_path)?; root_cert_file.read_to_end(&mut root_cert_data)?; let mut root_certs = Vec::new(); for cert in certs(&mut &*root_cert_data)? { root_certs.push(Certificate(cert)); } if root_certs.is_empty() && !root_cert_data.is_empty() { root_certs.push(Certificate(root_cert_data)); } for cert in &root_certs { root_store.add(cert)?; } } let config_builder = ClientConfig::builder() .with_safe_defaults() .with_root_certificates(root_store.clone()); let mut config = if let Some(identity) = ssl_opts.client_identity() { let (cert_chain, priv_key) = identity.load()?; config_builder.with_single_cert(cert_chain, priv_key)? } else { config_builder.with_no_client_auth() }; let server_name = domain .as_str() .try_into() .map_err(|_| webpki::InvalidDnsNameError)?; let mut dangerous = config.dangerous(); let web_pki_verifier = WebPkiVerifier::new(root_store, None); let dangerous_verifier = DangerousVerifier::new( ssl_opts.accept_invalid_certs(), ssl_opts.skip_domain_validation(), web_pki_verifier, ); dangerous.set_certificate_verifier(Arc::new(dangerous_verifier)); match self { Stream::TcpStream(tcp_stream) => match tcp_stream { TcpStream::Insecure(insecure_stream) => { let inner = insecure_stream .into_inner() .map_err(io::Error::from) .unwrap(); let conn = rustls::ClientConnection::new(Arc::new(config), server_name).unwrap(); let secure_stream = rustls::StreamOwned::new(conn, inner); Ok(Stream::TcpStream(TcpStream::Secure(BufStream::new( secure_stream, )))) } TcpStream::Secure(_) => Ok(Stream::TcpStream(tcp_stream)), }, _ => unreachable!(), } } } struct DangerousVerifier { accept_invalid_certs: bool, skip_domain_validation: bool, verifier: WebPkiVerifier, } impl DangerousVerifier { fn new( accept_invalid_certs: bool, skip_domain_validation: bool, verifier: WebPkiVerifier, ) -> Self { Self { accept_invalid_certs, skip_domain_validation, verifier, } } } impl ServerCertVerifier for DangerousVerifier { fn verify_server_cert( &self, end_entity: &Certificate, intermediates: &[Certificate], server_name: &rustls::ServerName, scts: &mut dyn Iterator<Item = &[u8]>, ocsp_response: &[u8], now: std::time::SystemTime, ) -> std::result::Result<rustls::client::ServerCertVerified, rustls::Error> { if self.accept_invalid_certs { Ok(rustls::client::ServerCertVerified::assertion()) } else { match self.verifier.verify_server_cer
}
t( end_entity, intermediates, server_name, scts, ocsp_response, now, ) { Ok(assertion) => Ok(assertion), Err(ref e) if e.to_string().contains("CertNotValidForName") && self.skip_domain_validation => { Ok(rustls::client::ServerCertVerified::assertion()) } Err(e) => Err(e), } } }
function_block-function_prefixed
[ { "content": " /// Trait for protocol markers [`crate::Binary`] and [`crate::Text`].\n\n pub trait Protocol: crate::conn::query_result::Protocol {}\n\n impl Protocol for crate::Binary {}\n\n impl Protocol for crate::Text {}\n\n}\n\n\n\n#[doc(inline)]\n\npub use crate::myc::params;\n\n\n\n#[doc(hidde...
Rust
tests/fuzz.rs
johannesvollmer/rust-openexr
404e2a123fb8191e86067c3d56fc045113d02161
use std::panic::{catch_unwind}; use rand::rngs::{StdRng}; use rand::{Rng}; extern crate exr; use exr::prelude::*; use std::path::PathBuf; use std::ffi::OsStr; use std::fs::File; use std::io::{Write, Cursor}; use exr::image::read::read_first_rgba_layer_from_file; use exr::image::pixel_vec::PixelVec; fn exr_files(path: &'static str, filter: bool) -> impl Iterator<Item=PathBuf> { walkdir::WalkDir::new(path).into_iter().map(std::result::Result::unwrap) .filter(|entry| entry.path().is_file()) .filter(move |entry| !filter || entry.path().extension() == Some(OsStr::new("exr"))) .map(walkdir::DirEntry::into_path) } #[test] pub fn fuzzed(){ for ref file in exr_files("tests/images/fuzzed", false) { let _ = read().no_deep_data().largest_resolution_level().all_channels() .first_valid_layer().all_attributes().pedantic().from_file(file); let _ = read().no_deep_data().all_resolution_levels().all_channels() .all_layers().all_attributes().pedantic().from_file(file); } } #[test] pub fn damaged(){ let mut passed = true; for ref file in exr_files("tests/images/invalid", false) { let result = catch_unwind(move || { let _meta_data = MetaData::read_from_file(file, false)?; { let _minimal = read().no_deep_data() .largest_resolution_level() .rgba_channels( |_size, _channels| (), |_: &mut (), _position: Vec2<usize>, _pixel: (Sample, Sample, Sample, Sample)| {} ) .first_valid_layer().all_attributes() .from_file(&file)?; } { let _minimal = read().no_deep_data() .largest_resolution_level() .rgba_channels( |_size, _channels| (), |_: &mut (), _position: Vec2<usize>, _pixel: (Sample, Sample, Sample, Sample)| {} ) .all_layers().all_attributes() .pedantic() .from_file(&file)?; } { let _rgba = read_first_rgba_layer_from_file( file, PixelVec::<(Sample, Sample, Sample, Sample)>::constructor, PixelVec::set_pixel )?; } { let _full = read_all_data_from_file(file)?; } Ok(()) }); passed = passed && match result { Ok(Err(Error::Invalid(message))) => { println!("✓ Recognized as invalid ({}): {:?}", message, file); true }, Ok(Err(Error::NotSupported(message))) => { println!("- Unsupported ({}): {:?}", message, file); true }, Ok(Err(Error::Io(error))) => { println!("✗ Unexpected IO Error: {:?}, {:?}", file, error); false }, Err(_) => { println!("✗ Not recognized as invalid: {:?}", file); false }, Ok(Ok(_)) => { let meta_data = MetaData::read_from_file(file, true); if let Err(error) = meta_data { println!("✓ Recognized as invalid when pedantic ({}): {:?}", error, file); true } else { println!("✗ Oh no, there is nothing wrong with: {:#?}", file); false } }, _ => unreachable!(), }; } assert!(passed, "A damaged file was not handled correctly"); } #[test] #[ignore] pub fn fuzz(){ println!("started fuzzing"); let files: Vec<PathBuf> = exr_files("tests/images", true).collect(); let seed = [92,1,0,130,211,8,21,70,74,4,9,5,0,23,0,3,20,25,6,5,229,30,0,34,218,0,40,7,5,2,7,0,]; let mut random: StdRng = rand::SeedableRng::from_seed(seed); let mut records = File::create("tests/images/fuzzed/list.txt").unwrap(); records.write_all(format!("seed = {:?}", seed).as_bytes()).unwrap(); let start_index = 0; for fuzz_index in 0 .. 1024_u64 * 2048 * 4 { let file_1_name = &files[random.gen_range(0 .. files.len())]; let mutation_point = random.gen::<f32>().powi(3); let mutation = random.gen::<u8>(); if fuzz_index >= start_index { let mut file = std::fs::read(file_1_name).unwrap(); let index = ((mutation_point * file.len() as f32) as usize + 4) % file.len(); file[index] = mutation; let file = file.as_slice(); let result = catch_unwind(move || { let read_all_data = read().no_deep_data() .all_resolution_levels().all_channels().all_layers().all_attributes(); match read_all_data.from_buffered(Cursor::new(file)) { Err(Error::Invalid(error)) => println!("✓ No Panic. [{}]: Invalid: {}.", fuzz_index, error), Err(Error::NotSupported(error)) => println!("- No Panic. [{}]: Unsupported: {}.", fuzz_index, error), _ => {}, } }); if let Err(_) = result { records.write_all(fuzz_index.to_string().as_bytes()).unwrap(); records.flush().unwrap(); let seed = seed.iter().map(|num| num.to_string()).collect::<Vec<String>>().join("-"); let mut saved = File::create(format!("tests/images/fuzzed/fuzz_{}_{}.exr", fuzz_index, seed)).unwrap(); saved.write_all(file).unwrap(); println!("✗ PANIC! [{}]", fuzz_index); } } } }
use std::panic::{catch_unwind}; use rand::rngs::{StdRng}; use rand::{Rng}; extern crate exr; use exr::prelude::*; use std::path::PathBuf; use std::ffi::OsStr; use std::fs::File; use std::io::{Write, Cursor}; use exr::image::read::read_first_rgba_layer_from_file; use exr::image::pixel_vec::PixelVec; fn exr_files(path: &'static str, filter: bool) -> impl Iterator<Item=PathBuf> { walkdir::WalkDir::new(path).into_iter().map(std::result::Result::unwrap) .filter(|entry| entry.path().is_file()) .filter(move |entry| !filter || entry.path().extension() == Some(OsStr::new("exr"))) .map(walkdir::DirEntry::into_path) } #[test] pub fn fuzzed(){ for ref file in exr_files("tests/images/fuzzed", false) { let _ = read().no_deep_data().largest_resolution_level().all_channels() .first_valid_layer().all_attributes().pedantic().from_file(file); let _ = read().no_deep_data().all_resolution_levels().all_channels() .all_layers().all_attributes().pedantic().from_file(file); } } #[test] pub fn damaged(){ let mut passed = true; for ref file in exr_files("tests/images/invalid", false) { let result = catch_unwind(move || { let _meta_data = MetaData::read_from_file(file, false)?; { let _minimal = read().no_deep_data() .largest_resolution_level() .rgba_channels( |_size, _channels| (), |_: &mut (), _position: Vec2<usize>, _pixel: (Sample, Sample, Sample, Sample)| {} ) .first_valid_layer().all_attributes() .from_file(&file)?; } { let _minimal = read().no_deep_data() .largest_resolution_level() .rgba_channels( |_size, _channels| (), |_: &mut (), _position: Vec2<usize>, _pixel: (Sample, Sample, Sample, Sample)| {} ) .all_layers().all_attributes() .pedantic() .from_file(&file)?; } { let _rgba = read_first_rgba_layer_from_file( file, PixelVec::<(Sample, Sample, Sample, Sample)>::constructor, PixelVec::set_pixel )?; } { let _full = read_all_data_from_file(file)?; } Ok(()) }); passed = passed && match result { Ok(Err(Error::Invalid(message))) => { println!("✓ Recognized as invalid ({}): {:?}", message, file); true }, Ok(Err(Error::NotSupported(message))) => { println!("- Unsupported ({}): {:?}", message, file); true }, Ok(Err(Error::Io(error))) => { println!("✗ Unexpected IO Error: {:?}, {:?}", file, error); false }, Err(_) => { println!("✗ Not recognized as invalid: {:?}", file); false }, Ok(Ok(_)) => { let meta_data = MetaData::read_from_file(file, true); if let Err(error) = meta_data { println!("✓ Recognized as invalid when
tes(); match read_all_data.from_buffered(Cursor::new(file)) { Err(Error::Invalid(error)) => println!("✓ No Panic. [{}]: Invalid: {}.", fuzz_index, error), Err(Error::NotSupported(error)) => println!("- No Panic. [{}]: Unsupported: {}.", fuzz_index, error), _ => {}, } }); if let Err(_) = result { records.write_all(fuzz_index.to_string().as_bytes()).unwrap(); records.flush().unwrap(); let seed = seed.iter().map(|num| num.to_string()).collect::<Vec<String>>().join("-"); let mut saved = File::create(format!("tests/images/fuzzed/fuzz_{}_{}.exr", fuzz_index, seed)).unwrap(); saved.write_all(file).unwrap(); println!("✗ PANIC! [{}]", fuzz_index); } } } }
pedantic ({}): {:?}", error, file); true } else { println!("✗ Oh no, there is nothing wrong with: {:#?}", file); false } }, _ => unreachable!(), }; } assert!(passed, "A damaged file was not handled correctly"); } #[test] #[ignore] pub fn fuzz(){ println!("started fuzzing"); let files: Vec<PathBuf> = exr_files("tests/images", true).collect(); let seed = [92,1,0,130,211,8,21,70,74,4,9,5,0,23,0,3,20,25,6,5,229,30,0,34,218,0,40,7,5,2,7,0,]; let mut random: StdRng = rand::SeedableRng::from_seed(seed); let mut records = File::create("tests/images/fuzzed/list.txt").unwrap(); records.write_all(format!("seed = {:?}", seed).as_bytes()).unwrap(); let start_index = 0; for fuzz_index in 0 .. 1024_u64 * 2048 * 4 { let file_1_name = &files[random.gen_range(0 .. files.len())]; let mutation_point = random.gen::<f32>().powi(3); let mutation = random.gen::<u8>(); if fuzz_index >= start_index { let mut file = std::fs::read(file_1_name).unwrap(); let index = ((mutation_point * file.len() as f32) as usize + 4) % file.len(); file[index] = mutation; let file = file.as_slice(); let result = catch_unwind(move || { let read_all_data = read().no_deep_data() .all_resolution_levels().all_channels().all_layers().all_attribu
random
[ { "content": "pub fn decompress_bytes(mut remaining: Bytes<'_>, expected_byte_size: usize, pedantic: bool) -> Result<ByteVec> {\n\n let mut decompressed = Vec::with_capacity(expected_byte_size.min(8*2048));\n\n\n\n while !remaining.is_empty() && decompressed.len() != expected_byte_size {\n\n let co...
Rust
src/bin/join.rs
andreaskipf/parquet-sampler
c04e6f07e1872c6c8e1ab85e699206f0e804db6b
use std::collections::HashSet; use std::env; use std::fs::File; use std::path::Path; use parquet::file::reader::{FileReader, SerializedFileReader}; use parquet::schema::types::Type; fn main() { let args: Vec<String> = env::args().collect(); let in_file_name = &args[1]; let in_file_col_name = &args[2]; let semi_join_file_name = &args[3]; let semi_join_file_col_name = &args[4]; let out_file_name = &args[5]; println!("input file: {}", in_file_name); println!("output file: {}", out_file_name); println!("semi join file: {}", semi_join_file_name); println!("semi join col: {}", semi_join_file_col_name); let semi_join_file = File::open(&Path::new(semi_join_file_name)).unwrap(); let semi_join_reader = SerializedFileReader::new(semi_join_file).unwrap(); let semi_join_file_metadata = semi_join_reader.metadata().file_metadata(); let num_build_rows = semi_join_file_metadata.num_rows(); println!("num build side rows: {}", num_build_rows); println!("building hash table..."); let mut ht = HashSet::<String>::new(); let fields = semi_join_file_metadata.schema().get_fields(); let mut selected_fields = fields.to_vec(); selected_fields.retain(|f| f.name() == semi_join_file_col_name); assert_eq!(selected_fields.len(), 1); let schema_projection = Type::group_type_builder("schema") .with_fields(&mut selected_fields) .build() .unwrap(); let mut curr_row = 0; let mut iter = semi_join_reader .get_row_iter(Some(schema_projection)) .unwrap(); while let Some(row) = iter.next() { if curr_row % 1000000 == 0 { println!( "read {}% of input", (curr_row as f64 * 100.0 / num_build_rows as f64) as usize ); } let val = row .get_column_iter() .map(|c| c.1.to_string()) .nth(0) .unwrap(); ht.insert(val); curr_row += 1; } println!("finished building hash table (num entries: {})", ht.len()); let in_file = File::open(&Path::new(in_file_name)).unwrap(); let in_reader = SerializedFileReader::new(in_file).unwrap(); let in_file_metadata = in_reader.metadata().file_metadata(); let num_probe_rows = in_file_metadata.num_rows(); println!("num probe side rows: {}", num_probe_rows); let probe_side_fields = in_file_metadata.schema().get_fields(); let mut probe_side_col_idx = 0; let probe_side_fields_vec = probe_side_fields.to_vec(); for f in probe_side_fields.to_vec() { if f.name() == in_file_col_name { break; } probe_side_col_idx += 1; } assert_ne!(probe_side_col_idx, probe_side_fields_vec.len()); println!("probing hash table..."); let mut output_rows: Vec<parquet::record::Row> = Vec::new(); curr_row = 0; iter = in_reader.get_row_iter(None).unwrap(); while let Some(row) = iter.next() { if curr_row % 1000000 == 0 { println!( "read {}% of input", (curr_row as f64 * 100.0 / num_probe_rows as f64) as usize ); } let val = row .get_column_iter() .map(|c| c.1.to_string()) .nth(probe_side_col_idx) .unwrap(); if ht.contains(&val) { output_rows.push(row); } curr_row += 1; } println!("finished probing hash table"); println!( "semi-join reduced input by {} rows", num_probe_rows as usize - output_rows.len() ); parquet_sampler::write_output(&output_rows, in_file_metadata.schema(), out_file_name); }
use std::collections::HashSet; use std::env; use std::fs::File; use std::path::Path; use parquet::file::reader::{FileReader, SerializedFileReader}; use parquet::schema::types::Type; fn main() { let args: Vec<String> = env::args().collect(); let in_file_name = &args[1]; let in_file_col_name = &args[2]; let semi_join_file_name = &args[3]; let semi_join_file_col_name = &args[4]; let out_file_name = &args[5]; println!("input file: {}", in_file_name); println!("output file: {}", out_file_name); println!("semi join file: {}", semi_join_file_name); println!("semi join col: {}", semi_join_file_col_name); let semi_join_file = File::open(&Path::new(semi_join_file_name)).unwrap(); let semi_join_reader = SerializedFileReader::new(semi_join_file).unwrap(); let semi_join_file_metadata = semi_join_reader.metadata().file_metadata(); let num_build_rows = semi_join_file_metadata.num_rows(); println!("num build side rows: {}", num_build_rows); println!("building hash table..."); let mut ht = HashSet::<String>::new(); let fields = semi_join_file_metadata.schema().get_fields(); let mut selected_fields = fields.to_vec(); selected_fields.retain(|f| f.name() == semi_join_file_col_name); assert_eq!(selected_fields.len(), 1); let schema_projection = Type::group_type_builder("schema") .with_fields(&mut selected_fields) .build() .unwrap(); let mut curr_row = 0; let mut iter = semi_join_reader .get_row_iter(Some(schema_projection)) .unwrap(); while let Some(row) = iter.next() { if curr_row % 1000000 == 0 { println!( "read {}% of input", (curr_row as f64 * 100.0 / num_build_rows as f64) as usize ); } let val = row .get_column_iter() .map(|c| c.1.to_string()) .nth(0) .unwrap(); ht.insert(val); curr_row += 1; } println!("finished building hash table (num entries: {})", ht.len()); let in_file = File::open(&Path::new(in_file_name)).unwrap(); let in_reader = SerializedFileReader::new(in_file).unwrap(); let in_file_metadata = in_reader.metadata().file_metadata(); let num_probe_rows = in_file_metadata.num_rows(); println!("num probe side rows: {}", num_probe_rows); let probe_side_fields = in_file_metadata.schema().get_fields(); let mut probe_side_col_idx = 0; let probe_side_fields_vec = probe_side_fields.to_vec(); for f in probe_side_fields.to_vec() { if f.name() == in_file_col_name { break; } probe_side_col_idx += 1; } assert_ne!(probe_side_col_idx, probe_side_fields_vec.len()); println!("probing hash table..."); let mut output_rows: Vec<parquet::record::Row> = Vec::new(); curr_row = 0; iter = in_reader.get_row_iter(None).unwrap(); while let Some(row) = iter.next() {
let val = row .get_column_iter() .map(|c| c.1.to_string()) .nth(probe_side_col_idx) .unwrap(); if ht.contains(&val) { output_rows.push(row); } curr_row += 1; } println!("finished probing hash table"); println!( "semi-join reduced input by {} rows", num_probe_rows as usize - output_rows.len() ); parquet_sampler::write_output(&output_rows, in_file_metadata.schema(), out_file_name); }
if curr_row % 1000000 == 0 { println!( "read {}% of input", (curr_row as f64 * 100.0 / num_probe_rows as f64) as usize ); }
if_condition
[ { "content": "pub fn write_output(rows: &Vec<parquet::record::Row>, schema: &Type, file_name: &String) {\n\n // Create output parquet file.\n\n let path = Path::new(&file_name);\n\n let file = File::create(&path).unwrap();\n\n\n\n // Parquet writer requires the schema as a string.\n\n let mut buf = Vec::ne...
Rust
src/day18/mod.rs
hulthe/aoc2002
1bb5fdbd6dcce9670becaf1d6db81c23d21a57d4
#[derive(Clone, Copy, Debug)] pub enum Token { Op(Op), Num(u64), LParen, RParen, } #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum Op { Add, Mul, } #[derive(Clone, Copy, Debug, PartialEq, Eq)] enum RPN { Op(Op), Num(u64), } pub fn parse<'a>(input: &'a str) -> impl Iterator<Item = impl Iterator<Item = Token> + 'a> + 'a { input.lines().map(|line| { line.chars().filter_map(|c| { Some(match c { ')' => Token::RParen, '(' => Token::LParen, '*' => Token::Op(Op::Mul), '+' => Token::Op(Op::Add), '0' => Token::Num(0), '1' => Token::Num(1), '2' => Token::Num(2), '3' => Token::Num(3), '4' => Token::Num(4), '5' => Token::Num(5), '6' => Token::Num(6), '7' => Token::Num(7), '8' => Token::Num(8), '9' => Token::Num(9), ' ' => return None, _ => panic!("unexpected char: '{}'", c), }) }) }) } fn to_rpn<F>(input: &str, has_higher_prescedence: F) -> Vec<RPN> where F: Fn(Op, Op) -> bool, { #[derive(Clone, Copy, Debug)] enum StackElem { StackFrame, Op(Op), } fn first_true() -> impl Iterator<Item = bool> { use std::iter; iter::once(true).chain(iter::repeat(false)) } let mut stack = vec![]; let mut output = vec![]; for (line, is_first) in parse(input).zip(first_true()) { for token in line { match token { Token::Num(num) => output.push(RPN::Num(num)), Token::Op(op) => { match stack.last().copied() { None | Some(StackElem::StackFrame) => {} Some(StackElem::Op(prev_op)) if has_higher_prescedence(op, prev_op) => {} Some(StackElem::Op(prev_op)) => { output.push(RPN::Op(prev_op)); stack.pop(); } } stack.push(StackElem::Op(op)); } Token::LParen => stack.push(StackElem::StackFrame), Token::RParen => loop { match stack.pop() { None | Some(StackElem::StackFrame) => break, Some(StackElem::Op(op)) => output.push(RPN::Op(op)), } }, } } while let Some(elem) = stack.pop() { match elem { StackElem::Op(op) => output.push(RPN::Op(op)), StackElem::StackFrame => {} } } if !is_first { output.push(RPN::Op(Op::Add)); } } output } fn evalutate_rpn(ops: &[RPN]) -> u64 { let mut stack = Vec::new(); for &op in ops { match op { RPN::Num(num) => stack.push(num), RPN::Op(op) => { let rhs = stack.pop().expect("invalid op arg count, was 0"); let lhs = stack.pop().expect("invalid op arg count, was 1"); let result = match op { Op::Add => lhs + rhs, Op::Mul => lhs * rhs, }; stack.push(result); } } } debug_assert_eq!( stack.len(), 1, "invalid input, stack len was {} instead of 1", stack.len() ); stack.pop().unwrap() } pub fn part1(input: &str) -> u64 { let rpn = to_rpn( input, |_, _| false, /* no operator has a higher prescedence */ ); evalutate_rpn(&rpn) } pub fn part2(input: &str) -> u64 { let rpn = to_rpn( input, |op1, op2| (op1, op2) == (Op::Add, Op::Mul), ); evalutate_rpn(&rpn) } #[cfg(test)] mod tests { use super::{part1, part2}; #[test] pub fn test_part1() { let input = include_str!("test-input"); assert_eq!(part1(input), 26335); } #[test] pub fn test_part2() { let input = include_str!("test-input"); assert_eq!(part2(input), 693891); } }
#[derive(Clone, Copy, Debug)] pub enum Token { Op(Op), Num(u64), LParen, RParen, } #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum Op { Add, Mul, } #[derive(Clone, Copy, Debug, PartialEq, Eq)] enum RPN { Op(Op), Num(u64), } pub fn parse<'a>(input: &'a str) -> impl Iterator<Item = impl Iterator<Item = Token> + 'a> + 'a { input.lines().map(|line| { line.chars().filter_map(|c| { Some(match c { ')' => Token::RParen, '(' => Token::LParen, '*' => Token::Op(Op::Mul), '+' => Token::Op(Op::Add), '0' => Token::Num(0), '1' => Token::Num(1), '2' => Token::Num(2), '3' => Token::Num(3), '4' => Token::Num(4), '5' => Token::Num(5), '6' => Token::Num(6), '7' => Token::Num(7), '8' => Token::Num(8), '9' => Token::Num(9), ' ' => return None, _ => panic!("unexpected char: '{}'", c), }) }) }) } fn to_rpn<F>(input: &str, has_higher_prescedence: F) -> Vec<RPN> where F: Fn(Op, Op) -> bool, { #[derive(Clone, Copy, Debug)] enum StackElem { StackFrame, Op(Op), } fn first_true() -> impl Iterator<Item = bool> { use std::iter; iter::once(true).chain(iter::repeat(false)) } let mut stack = vec![]; let mut output = vec![]; for (line, is_first) in parse(input).zip(first_true()) { for token in line { match token { Token::Num(num) => output.push(RPN::Num(num)), Token::Op(op) => { match stack.last().copied() { None | Some(StackElem::StackFrame) => {} Some(StackElem::Op(prev_op)) if has_higher_prescedence(op, prev_op) => {} Some(StackElem::Op(prev_op)) => { output.push(RPN::Op(prev_op)); stack.pop(); } } stack.push(StackElem::Op(op)); } Token::LParen => stack.push(StackElem::StackFrame), Token::RParen => loop { match stack.pop() { None | Some(StackElem::StackFrame) => brea
fn evalutate_rpn(ops: &[RPN]) -> u64 { let mut stack = Vec::new(); for &op in ops { match op { RPN::Num(num) => stack.push(num), RPN::Op(op) => { let rhs = stack.pop().expect("invalid op arg count, was 0"); let lhs = stack.pop().expect("invalid op arg count, was 1"); let result = match op { Op::Add => lhs + rhs, Op::Mul => lhs * rhs, }; stack.push(result); } } } debug_assert_eq!( stack.len(), 1, "invalid input, stack len was {} instead of 1", stack.len() ); stack.pop().unwrap() } pub fn part1(input: &str) -> u64 { let rpn = to_rpn( input, |_, _| false, /* no operator has a higher prescedence */ ); evalutate_rpn(&rpn) } pub fn part2(input: &str) -> u64 { let rpn = to_rpn( input, |op1, op2| (op1, op2) == (Op::Add, Op::Mul), ); evalutate_rpn(&rpn) } #[cfg(test)] mod tests { use super::{part1, part2}; #[test] pub fn test_part1() { let input = include_str!("test-input"); assert_eq!(part1(input), 26335); } #[test] pub fn test_part2() { let input = include_str!("test-input"); assert_eq!(part2(input), 693891); } }
k, Some(StackElem::Op(op)) => output.push(RPN::Op(op)), } }, } } while let Some(elem) = stack.pop() { match elem { StackElem::Op(op) => output.push(RPN::Op(op)), StackElem::StackFrame => {} } } if !is_first { output.push(RPN::Op(Op::Add)); } } output }
function_block-function_prefixed
[ { "content": "/// Map every (parent, num, child) combination of the input into a closure\n\nfn parse_into<'a, F>(input: &'a str, mut into: F)\n\nwhere\n\n F: FnMut(&'a str, usize, &'a str),\n\n{\n\n for capture in input.lines().flat_map(|line| LINE_RGX.captures(line)) {\n\n let parent = capture.nam...
Rust
exonum/src/runtime/execution_context.rs
mobilipia/milestone-core
53134d0816264f9c60d3b86e28947669c3ccc609
use crate::{ blockchain::Schema as CoreSchema, crypto::{Hash, PublicKey}, helpers::{Height, ValidateInput}, merkledb::{access::Prefixed, BinaryValue, Fork}, runtime::{ ArtifactId, BlockchainData, CallSite, CallType, Caller, CoreError, Dispatcher, DispatcherSchema, ExecutionError, InstanceDescriptor, InstanceId, InstanceQuery, InstanceSpec, InstanceStatus, MethodId, SUPERVISOR_INSTANCE_ID, }, }; const ACCESS_ERROR_STR: &str = "An attempt to access blockchain data after execution error."; #[derive(Debug)] pub struct ExecutionContext<'a> { pub(crate) fork: &'a mut Fork, caller: Caller, interface_name: &'a str, instance: InstanceDescriptor, transaction_hash: Option<Hash>, dispatcher: &'a Dispatcher, call_stack_depth: u64, has_child_call_error: &'a mut bool, } impl<'a> ExecutionContext<'a> { pub const MAX_CALL_STACK_DEPTH: u64 = 128; pub(crate) fn for_transaction( dispatcher: &'a Dispatcher, fork: &'a mut Fork, has_child_call_error: &'a mut bool, instance: InstanceDescriptor, author: PublicKey, transaction_hash: Hash, ) -> Self { Self::new( dispatcher, fork, has_child_call_error, instance, Caller::Transaction { author }, Some(transaction_hash), ) } pub(crate) fn for_block_call( dispatcher: &'a Dispatcher, fork: &'a mut Fork, has_child_call_error: &'a mut bool, instance: InstanceDescriptor, ) -> Self { Self::new( dispatcher, fork, has_child_call_error, instance, Caller::Blockchain, None, ) } fn new( dispatcher: &'a Dispatcher, fork: &'a mut Fork, has_child_call_error: &'a mut bool, instance: InstanceDescriptor, caller: Caller, transaction_hash: Option<Hash>, ) -> Self { Self { dispatcher, fork, instance, caller, transaction_hash, interface_name: "", call_stack_depth: 0, has_child_call_error, } } pub fn transaction_hash(&self) -> Option<Hash> { self.transaction_hash } pub fn data(&self) -> BlockchainData<&Fork> { if *self.has_child_call_error { panic!(ACCESS_ERROR_STR); } BlockchainData::new(self.fork, &self.instance.name) } pub fn service_data(&self) -> Prefixed<&Fork> { self.data().for_executing_service() } pub fn caller(&self) -> &Caller { &self.caller } pub fn instance(&self) -> &InstanceDescriptor { &self.instance } pub fn in_genesis_block(&self) -> bool { let core_schema = self.data().for_core(); core_schema.next_height() == Height(0) } pub fn interface_name(&self) -> &str { self.interface_name } #[doc(hidden)] pub fn supervisor_extensions(&mut self) -> SupervisorExtensions<'_> { if self.instance.id != SUPERVISOR_INSTANCE_ID { panic!("`supervisor_extensions` called within a non-supervisor service"); } SupervisorExtensions(self.reborrow(self.instance.clone())) } pub(crate) fn initiate_adding_service( &mut self, spec: InstanceSpec, constructor: impl BinaryValue, ) -> Result<(), ExecutionError> { debug_assert!(spec.validate().is_ok(), "{:?}", spec.validate()); let runtime = self .dispatcher .runtime_by_id(spec.artifact.runtime_id) .ok_or(CoreError::IncorrectRuntime)?; let context = self.reborrow(spec.as_descriptor()); runtime .initiate_adding_service(context, &spec.artifact, constructor.into_bytes()) .map_err(|mut err| { self.should_rollback(); err.set_runtime_id(spec.artifact.runtime_id) .set_call_site(|| CallSite::new(spec.id, CallType::Constructor)); err })?; DispatcherSchema::new(&*self.fork) .initiate_adding_service(spec) .map_err(From::from) } fn reborrow(&mut self, instance: InstanceDescriptor) -> ExecutionContext<'_> { if *self.has_child_call_error { panic!(ACCESS_ERROR_STR); } ExecutionContext { fork: &mut *self.fork, caller: self.caller.clone(), transaction_hash: self.transaction_hash, instance, interface_name: self.interface_name, dispatcher: self.dispatcher, call_stack_depth: self.call_stack_depth, has_child_call_error: self.has_child_call_error, } } fn child_context<'s>( &'s mut self, interface_name: &'s str, instance: InstanceDescriptor, fallthrough_auth: bool, ) -> ExecutionContext<'s> { if *self.has_child_call_error { panic!(ACCESS_ERROR_STR); } let caller = if fallthrough_auth { self.caller.clone() } else { Caller::Service { instance_id: self.instance.id, } }; ExecutionContext { caller, transaction_hash: self.transaction_hash, dispatcher: self.dispatcher, instance, fork: &mut *self.fork, interface_name, call_stack_depth: self.call_stack_depth + 1, has_child_call_error: self.has_child_call_error, } } pub(crate) fn should_rollback(&mut self) { *self.has_child_call_error = true; } } #[doc(hidden)] pub trait ExecutionContextUnstable { fn make_child_call<'q>( &mut self, called_instance: impl Into<InstanceQuery<'q>>, interface_name: &str, method_id: MethodId, arguments: &[u8], fallthrough_auth: bool, ) -> Result<(), ExecutionError>; } impl ExecutionContextUnstable for ExecutionContext<'_> { fn make_child_call<'q>( &mut self, called_instance: impl Into<InstanceQuery<'q>>, interface_name: &str, method_id: MethodId, arguments: &[u8], fallthrough_auth: bool, ) -> Result<(), ExecutionError> { if self.call_stack_depth + 1 >= Self::MAX_CALL_STACK_DEPTH { let err = CoreError::stack_overflow(Self::MAX_CALL_STACK_DEPTH); return Err(err); } let descriptor = self .dispatcher .get_service(called_instance) .ok_or(CoreError::IncorrectInstanceId)?; let instance_id = descriptor.id; let (runtime_id, runtime) = self .dispatcher .runtime_for_service(instance_id) .ok_or(CoreError::IncorrectRuntime)?; let context = self.child_context(interface_name, descriptor, fallthrough_auth); runtime .execute(context, method_id, arguments) .map_err(|mut err| { self.should_rollback(); err.set_runtime_id(runtime_id).set_call_site(|| { CallSite::new( instance_id, CallType::Method { interface: interface_name.to_owned(), id: method_id, }, ) }); err }) } } #[doc(hidden)] #[derive(Debug)] pub struct SupervisorExtensions<'a>(pub(super) ExecutionContext<'a>); impl<'a> SupervisorExtensions<'a> { pub fn start_artifact_registration(&self, artifact: &ArtifactId, spec: Vec<u8>) { Dispatcher::commit_artifact(self.0.fork, artifact, spec); } pub fn initiate_adding_service( &mut self, instance_spec: InstanceSpec, constructor: impl BinaryValue, ) -> Result<(), ExecutionError> { self.0 .child_context("", self.0.instance.clone(), false) .initiate_adding_service(instance_spec, constructor) } pub fn initiate_stopping_service(&self, instance_id: InstanceId) -> Result<(), ExecutionError> { Dispatcher::initiate_stopping_service(self.0.fork, instance_id) } pub fn initiate_resuming_service( &mut self, instance_id: InstanceId, artifact: ArtifactId, params: impl BinaryValue, ) -> Result<(), ExecutionError> { let state = DispatcherSchema::new(&*self.0.fork) .get_instance(instance_id) .ok_or(CoreError::IncorrectInstanceId)?; if state.status != Some(InstanceStatus::Stopped) { return Err(CoreError::ServiceNotStopped.into()); } let mut spec = state.spec; spec.artifact = artifact; let runtime = self .0 .dispatcher .runtime_by_id(spec.artifact.runtime_id) .ok_or(CoreError::IncorrectRuntime)?; runtime .initiate_resuming_service( self.0.child_context("", spec.as_descriptor(), false), &spec.artifact, params.into_bytes(), ) .map_err(|mut err| { self.0.should_rollback(); err.set_runtime_id(spec.artifact.runtime_id) .set_call_site(|| CallSite::new(instance_id, CallType::Resume)); err })?; DispatcherSchema::new(&*self.0.fork) .initiate_resuming_service(instance_id, spec.artifact) .map_err(From::from) } pub fn writeable_core_schema(&self) -> CoreSchema<&Fork> { CoreSchema::new(self.0.fork) } pub fn initiate_migration( &self, new_artifact: ArtifactId, old_service: &str, ) -> Result<(), ExecutionError> { self.0 .dispatcher .initiate_migration(self.0.fork, new_artifact, old_service) } pub fn rollback_migration(&self, service_name: &str) -> Result<(), ExecutionError> { Dispatcher::rollback_migration(self.0.fork, service_name) } pub fn commit_migration( &self, service_name: &str, migration_hash: Hash, ) -> Result<(), ExecutionError> { Dispatcher::commit_migration(self.0.fork, service_name, migration_hash) } pub fn flush_migration(&mut self, service_name: &str) -> Result<(), ExecutionError> { Dispatcher::flush_migration(self.0.fork, service_name) } }
use crate::{ blockchain::Schema as CoreSchema, crypto::{Hash, PublicKey}, helpers::{Height, ValidateInput}, merkledb::{access::Prefixed, BinaryValue, Fork}, runtime::{ ArtifactId, BlockchainData, CallSite, CallType, Caller, CoreError, Dispatcher, DispatcherSchema, ExecutionError, InstanceDescriptor, InstanceId, InstanceQuery, InstanceSpec, InstanceStatus, MethodId, SUPERVISOR_INSTANCE_ID, }, }; const ACCESS_ERROR_STR: &str = "An attempt to access blockchain data after execution error."; #[derive(Debug)] pub struct ExecutionContext<'a> { pub(crate) fork: &'a mut Fork, caller: Caller, interface_name: &'a str, instance: InstanceDescriptor, transaction_hash: Option<Hash>, dispatcher: &'a Dispatcher, call_stack_depth: u64, has_child_call_error: &'a mut bool, } impl<'a> ExecutionContext<'a> { pub const MAX_CALL_STACK_DEPTH: u64 = 128; pub(crate) fn for_transaction( dispatcher: &'a Dispatcher, fork: &'a mut Fork, has_child_call_error: &'a mut bool, instance: InstanceDescriptor, author: PublicKey, transaction_hash: Hash, ) -> Self { Self::new( dispatcher, fork, has_child_call_error, instance, Caller::Transaction { author }, Some(transaction_hash), ) } pub(crate) fn for_block_call( dispatcher: &'a Dispatcher, fork: &'a mut Fork, has_child_call_error: &'a mut bool, instance: InstanceDescriptor, ) -> Self { Self::new( dispatcher, fork, has_child_call_error, instance, Caller::Blockchain, None, ) } fn new( dispatcher: &'a Dispatcher, fork: &'a mut Fork, has_child_call_error: &'a mut bool, instance: InstanceDescriptor, caller: Caller, transaction_hash: Option<Hash>, ) -> Self { Self { dispatcher, fork, instance, caller, transaction_hash, interface_name: "", call_stack_depth: 0, has_child_call_error, } } pub fn transaction_hash(&self) -> Option<Hash> { self.transaction_hash } pub fn data(&self) -> BlockchainData<&Fork> { if *self.has_child_call_error { panic!(ACCESS_ERROR_STR); } BlockchainData::new(self.fork, &self.instance.name) } pub fn service_data(&self) -> Prefixed<&Fork> { self.data().for_executing_service() } pub fn caller(&self) -> &Caller { &self.caller } pub fn instance(&self) -> &InstanceDescriptor { &self.instance } pub fn in_genesis_block(&self) -> bool { let core_schema = self.data().for_core(); core_schema.next_height() == Height(0) } pub fn interface_name(&self) -> &str { self.interface_name } #[doc(hidden)] pub fn supervisor_extensions(&mut self) -> SupervisorExtensions<'_> { if self.instance.id != SUPERVISOR_INSTANCE_ID { panic!("`supervisor_extensions` called within a non-supervisor service"); } SupervisorExtensions(self.reborrow(self.instance.clone())) } pub(crate) fn initiate_adding_service( &mut self, spec: InstanceSpec, constr
rrectInstanceId)?; if state.status != Some(InstanceStatus::Stopped) { return Err(CoreError::ServiceNotStopped.into()); } let mut spec = state.spec; spec.artifact = artifact; let runtime = self .0 .dispatcher .runtime_by_id(spec.artifact.runtime_id) .ok_or(CoreError::IncorrectRuntime)?; runtime .initiate_resuming_service( self.0.child_context("", spec.as_descriptor(), false), &spec.artifact, params.into_bytes(), ) .map_err(|mut err| { self.0.should_rollback(); err.set_runtime_id(spec.artifact.runtime_id) .set_call_site(|| CallSite::new(instance_id, CallType::Resume)); err })?; DispatcherSchema::new(&*self.0.fork) .initiate_resuming_service(instance_id, spec.artifact) .map_err(From::from) } pub fn writeable_core_schema(&self) -> CoreSchema<&Fork> { CoreSchema::new(self.0.fork) } pub fn initiate_migration( &self, new_artifact: ArtifactId, old_service: &str, ) -> Result<(), ExecutionError> { self.0 .dispatcher .initiate_migration(self.0.fork, new_artifact, old_service) } pub fn rollback_migration(&self, service_name: &str) -> Result<(), ExecutionError> { Dispatcher::rollback_migration(self.0.fork, service_name) } pub fn commit_migration( &self, service_name: &str, migration_hash: Hash, ) -> Result<(), ExecutionError> { Dispatcher::commit_migration(self.0.fork, service_name, migration_hash) } pub fn flush_migration(&mut self, service_name: &str) -> Result<(), ExecutionError> { Dispatcher::flush_migration(self.0.fork, service_name) } }
uctor: impl BinaryValue, ) -> Result<(), ExecutionError> { debug_assert!(spec.validate().is_ok(), "{:?}", spec.validate()); let runtime = self .dispatcher .runtime_by_id(spec.artifact.runtime_id) .ok_or(CoreError::IncorrectRuntime)?; let context = self.reborrow(spec.as_descriptor()); runtime .initiate_adding_service(context, &spec.artifact, constructor.into_bytes()) .map_err(|mut err| { self.should_rollback(); err.set_runtime_id(spec.artifact.runtime_id) .set_call_site(|| CallSite::new(spec.id, CallType::Constructor)); err })?; DispatcherSchema::new(&*self.fork) .initiate_adding_service(spec) .map_err(From::from) } fn reborrow(&mut self, instance: InstanceDescriptor) -> ExecutionContext<'_> { if *self.has_child_call_error { panic!(ACCESS_ERROR_STR); } ExecutionContext { fork: &mut *self.fork, caller: self.caller.clone(), transaction_hash: self.transaction_hash, instance, interface_name: self.interface_name, dispatcher: self.dispatcher, call_stack_depth: self.call_stack_depth, has_child_call_error: self.has_child_call_error, } } fn child_context<'s>( &'s mut self, interface_name: &'s str, instance: InstanceDescriptor, fallthrough_auth: bool, ) -> ExecutionContext<'s> { if *self.has_child_call_error { panic!(ACCESS_ERROR_STR); } let caller = if fallthrough_auth { self.caller.clone() } else { Caller::Service { instance_id: self.instance.id, } }; ExecutionContext { caller, transaction_hash: self.transaction_hash, dispatcher: self.dispatcher, instance, fork: &mut *self.fork, interface_name, call_stack_depth: self.call_stack_depth + 1, has_child_call_error: self.has_child_call_error, } } pub(crate) fn should_rollback(&mut self) { *self.has_child_call_error = true; } } #[doc(hidden)] pub trait ExecutionContextUnstable { fn make_child_call<'q>( &mut self, called_instance: impl Into<InstanceQuery<'q>>, interface_name: &str, method_id: MethodId, arguments: &[u8], fallthrough_auth: bool, ) -> Result<(), ExecutionError>; } impl ExecutionContextUnstable for ExecutionContext<'_> { fn make_child_call<'q>( &mut self, called_instance: impl Into<InstanceQuery<'q>>, interface_name: &str, method_id: MethodId, arguments: &[u8], fallthrough_auth: bool, ) -> Result<(), ExecutionError> { if self.call_stack_depth + 1 >= Self::MAX_CALL_STACK_DEPTH { let err = CoreError::stack_overflow(Self::MAX_CALL_STACK_DEPTH); return Err(err); } let descriptor = self .dispatcher .get_service(called_instance) .ok_or(CoreError::IncorrectInstanceId)?; let instance_id = descriptor.id; let (runtime_id, runtime) = self .dispatcher .runtime_for_service(instance_id) .ok_or(CoreError::IncorrectRuntime)?; let context = self.child_context(interface_name, descriptor, fallthrough_auth); runtime .execute(context, method_id, arguments) .map_err(|mut err| { self.should_rollback(); err.set_runtime_id(runtime_id).set_call_site(|| { CallSite::new( instance_id, CallType::Method { interface: interface_name.to_owned(), id: method_id, }, ) }); err }) } } #[doc(hidden)] #[derive(Debug)] pub struct SupervisorExtensions<'a>(pub(super) ExecutionContext<'a>); impl<'a> SupervisorExtensions<'a> { pub fn start_artifact_registration(&self, artifact: &ArtifactId, spec: Vec<u8>) { Dispatcher::commit_artifact(self.0.fork, artifact, spec); } pub fn initiate_adding_service( &mut self, instance_spec: InstanceSpec, constructor: impl BinaryValue, ) -> Result<(), ExecutionError> { self.0 .child_context("", self.0.instance.clone(), false) .initiate_adding_service(instance_spec, constructor) } pub fn initiate_stopping_service(&self, instance_id: InstanceId) -> Result<(), ExecutionError> { Dispatcher::initiate_stopping_service(self.0.fork, instance_id) } pub fn initiate_resuming_service( &mut self, instance_id: InstanceId, artifact: ArtifactId, params: impl BinaryValue, ) -> Result<(), ExecutionError> { let state = DispatcherSchema::new(&*self.0.fork) .get_instance(instance_id) .ok_or(CoreError::Inco
random
[ { "content": "#[doc(hidden)]\n\npub fn remove_local_migration_result(fork: &Fork, service_name: &str) {\n\n Schema::new(fork)\n\n .local_migration_results()\n\n .remove(service_name);\n\n}\n", "file_path": "exonum/src/runtime/dispatcher/schema.rs", "rank": 0, "score": 440123.2529413...
Rust
weave/tests/forest/product.rs
tclchiam/weave-ce
03c7b01b50111c48f6d1b471a23638825d0dbf0e
use std::fmt::Debug; use weave::Forest; pub fn left_is_empty_right_is_unit<'a, F: Forest<&'a str> + Debug + Eq + Clone>() -> (F, F, F) { let forest1 = F::unit(&["1", "2"]); let forest2 = F::empty(); let expected = F::empty(); (forest1, forest2, expected) } pub fn overlapping_unit_forests<'a, F: Forest<&'a str> + Debug + Eq + Clone>() -> (F, F, F) { let forest1 = F::unit(&["1", "2"]); let forest2 = F::unit(&["1"]); let expected = F::many(&[ vec!["1", "2"], ]); (forest1, forest2, expected) } pub fn overlapping_many_forest_and_double_unit_forest<'a, F: Forest<&'a str> + Debug + Eq + Clone>() -> (F, F, F) { let forest1 = F::many(&[ vec!["1", "2"], vec!["2", "3"] ]); let forest2 = F::unit(&["3", "4"]); let expected = F::many(&[ vec!["1", "2", "3", "4"], vec!["2", "3", "4"] ]); (forest1, forest2, expected) } pub fn overlapping_many_forest_and_unique_forest<'a, F: Forest<&'a str> + Debug + Eq + Clone>() -> (F, F, F) { let forest1 = F::many(&[ vec!["1", "2"], vec!["2", "3"] ]); let forest2 = F::unique(&["3", "4"]); let expected = F::many(&[ vec!["1", "2", "3"], vec!["1", "2", "4"], vec!["2", "3"], vec!["2", "3", "4"] ]); (forest1, forest2, expected) } pub fn disjoint_unit_forest_and_single_unit_forest<'a, F: Forest<&'a str> + Debug + Eq + Clone>() -> (F, F, F) { let forest1 = F::unit(&["1", "2"]); let forest2 = F::unit(&["3"]); let expected = F::many(&[ vec!["1", "2", "3"], ]); (forest1, forest2, expected) } pub fn disjoint_unit_forest_and_double_unit_forest<'a, F: Forest<&'a str> + Debug + Eq + Clone>() -> (F, F, F) { let forest1 = F::unit(&["1", "2"]); let forest2 = F::unit(&["3", "4"]); let expected = F::unit(&["1", "2", "3", "4"]); (forest1, forest2, expected) } pub fn disjoint_many_forest_and_single_unit_forest<'a, F: Forest<&'a str> + Debug + Eq + Clone>() -> (F, F, F) { let forest1 = F::many(&[ vec!["1", "2"], vec!["2", "3"] ]); let forest2 = F::unit(&["4"]); let expected = F::many(&[ vec!["1", "2", "4"], vec!["2", "3", "4"] ]); (forest1, forest2, expected) } pub fn disjoint_many_forest_and_unique_forest<'a, F: Forest<&'a str> + Debug + Eq + Clone>() -> (F, F, F) { let forest1 = F::many(&[ vec!["1", "2"], vec!["5", "6"] ]); let forest2 = F::unique(&["3", "4"]); let expected = F::many(&[ vec!["1", "2", "3"], vec!["1", "2", "4"], vec!["5", "6", "3"], vec!["5", "6", "4"], ]); (forest1, forest2, expected) } pub fn disjoint_many_forest_and_double_unit_forest<'a, F: Forest<&'a str> + Debug + Eq + Clone>() -> (F, F, F) { let forest1 = F::many(&[ vec!["1", "2"], vec!["5", "6"] ]); let forest2 = F::unit(&["3", "4"]); let expected = F::many(&[ vec!["1", "2", "3", "4"], vec!["5", "6", "3", "4"], ]); (forest1, forest2, expected) } pub fn disjoint_unique_forests<'a, F: Forest<&'a str> + Debug + Eq + Clone>() -> (F, F, F) { let forest1 = F::unique(&["1", "2"]); let forest2 = F::unique(&["3", "4"]); let expected = F::many(&[ vec!["1", "3"], vec!["1", "4"], vec!["2", "3"], vec!["2", "4"], ]); (forest1, forest2, expected) } pub fn forests_are_disjoint_many<'a, F: Forest<&'a str> + Debug + Eq + Clone>() -> (F, F, F) { let forest1 = F::unique(&["1", "2"]); let forest2 = F::many(&[ vec!["3", "4"], vec!["7", "8"] ]); let expected = F::many(&[ vec!["1", "3", "4"], vec!["1", "7", "8"], vec!["2", "3", "4"], vec!["2", "7", "8"], ]); (forest1, forest2, expected) }
use std::fmt::Debug; use weave::Forest; pub fn left_is_empty_right_is_unit<'a, F: Forest<&'a str> + Debug + Eq + Clone>() -> (F, F, F) { let forest1 = F::unit(&["1", "2"]); let forest2 = F::empty(); let expected = F::empty(); (forest1, forest2, expected) } pub fn overlapping_unit_forests<'a, F: Forest<&'a str> + Debug + Eq + Clone>() -> (F, F, F) { let forest1 = F::unit(&["1", "2"]); let forest2 = F::unit(&["1"]); let expected = F::many(&[ vec!["1", "2"], ]); (forest1, forest2, expected) } pub fn overlapping_many_forest_and_double_unit_forest<'a, F: Forest<&'a str> + Debug + Eq + Clone>() -> (F, F, F) { let forest1 = F::many(&[ vec!["1", "2"], vec!["2", "3"] ]); let forest2 = F::unit(&["3", "4"]); let expected = F::many(&[ vec!["1", "2", "3", "4"], vec!["2", "3", "4"] ]); (forest1, forest2, expected) } pub fn overlapping_many_forest_and_unique_forest<'a, F: Forest<&'a str> + Debug + Eq + Clone>() -> (F, F, F) { let forest1 = F::many(&[ vec!["1", "2"], vec!["2", "3"] ]); let forest2 = F::unique(&["3", "4"]); let expected = F::many(&[ vec!["1", "2", "3"], vec!["1", "2", "4"], vec!["2", "3"], vec!["2", "3", "4"] ]); (forest1, forest2, expected) } pub fn disjoint_unit_forest_and_single_unit_forest<'a, F: Forest<&'a str> + Debug + Eq + Clone>() -> (F, F, F) { let forest1 = F::unit(&["1", "2"]); let forest2 = F::unit(&["3"]); let expected = F::many(&[ vec!["1", "2", "3"], ]); (forest1, forest2, expected) } pub fn disjoint_unit_forest_and_double_unit_forest<'a, F: Forest<&'a str> + Debug + Eq + Clone>() -> (F, F, F) { let forest1 = F::unit(&["1", "2"]); let forest2 = F::unit(&["3", "4"]); let expected = F::unit(&["1", "2", "3", "4"]); (forest1, forest2, expected) } pub fn disjoint_many_forest_and_single_unit_forest<'a, F: Forest<&'a str> + Debug + Eq + Clone>() -> (F, F, F) { let forest1 = F::many(&[ vec!["1", "2"], vec!["2", "3"] ]); let forest2 = F::unit(&["4"]); let expected = F::many(&[ vec!["1", "2", "4"], vec!["2", "3", "4"] ]); (forest1, forest2, expected) } pub fn disjoint_many_forest_and_unique_forest<'a, F: Forest<&'a str> + Debug + Eq + Clone>() -> (F, F, F) { let forest1 = F::many(&[ vec!["1", "2"], vec!["5", "6"] ]); let forest2 = F::unique(&["3", "4"]); let expected = F::many(&[ vec!["1", "2", "3"], vec!["1", "2", "4"], vec!["5", "6", "3"], vec!["5", "6", "4"], ]); (forest1, forest2, expected) } pub fn disjoint_many_forest_and_double_unit_forest<'a, F: Forest<&'a str> + Debug + Eq + Clone>() -> (F, F, F) { let forest1 = F::many(&[ vec!["1", "2"], vec!["5", "6"] ]); let forest2
let forest2 = F::unique(&["3", "4"]); let expected = F::many(&[ vec!["1", "3"], vec!["1", "4"], vec!["2", "3"], vec!["2", "4"], ]); (forest1, forest2, expected) } pub fn forests_are_disjoint_many<'a, F: Forest<&'a str> + Debug + Eq + Clone>() -> (F, F, F) { let forest1 = F::unique(&["1", "2"]); let forest2 = F::many(&[ vec!["3", "4"], vec!["7", "8"] ]); let expected = F::many(&[ vec!["1", "3", "4"], vec!["1", "7", "8"], vec!["2", "3", "4"], vec!["2", "7", "8"], ]); (forest1, forest2, expected) }
= F::unit(&["3", "4"]); let expected = F::many(&[ vec!["1", "2", "3", "4"], vec!["5", "6", "3", "4"], ]); (forest1, forest2, expected) } pub fn disjoint_unique_forests<'a, F: Forest<&'a str> + Debug + Eq + Clone>() -> (F, F, F) { let forest1 = F::unique(&["1", "2"]);
random
[ { "content": "pub fn empty_forest<'a, F: Forest<&'a str> + Debug + Eq + Clone>() -> (F, &'a str, F) {\n\n let forest = F::empty();\n\n let element = \"1\";\n\n\n\n let expected = F::empty();\n\n\n\n (forest, element, expected)\n\n}\n\n\n", "file_path": "weave/tests/forest/subset_not.rs", "ra...
Rust
kernel/system/src/ia_32e/cpu/pic.rs
VenmoTools/OperatingSystem
479ce57719c67541ade7d15ed22904d937b80cc4
use super::{Port, UnsafePort}; const EOI: u8 = 0x20; const ICW4: u8 = 0x01; const ICW1: u8 = 0x11; const ICW3_M: u8 = 0x04; const ICW3_S: u8 = 0x02; const MASKED: u8 = 0xff; #[derive(Debug)] struct Pic { offset: u8, command: UnsafePort<u8>, data: UnsafePort<u8>, } impl Pic { fn handle_interrupt(&self, interrupt_id: u8) -> bool { self.offset <= interrupt_id && interrupt_id < self.offset + 8 } unsafe fn end_interrupt(&mut self) { self.command.write(EOI); } } #[derive(Debug)] pub struct ChainedPics { main: Pic, slave: Pic, } impl ChainedPics { pub const unsafe fn new(offset_1: u8, offset_2: u8) -> ChainedPics { ChainedPics { main: Pic { offset: offset_1, command: UnsafePort::new(0x20), data: UnsafePort::new(0x21), }, slave: Pic { offset: offset_2, command: UnsafePort::new(0xA0), data: UnsafePort::new(0xA1), }, } } pub unsafe fn initialize(&mut self) { let mut wait_port: Port<u32> = Port::new(0x80); let mut wait = || { wait_port.write(0) }; let saved_mask1 = self.main.data.read(); let saved_mask2 = self.slave.data.read(); self.main.command.write(ICW1); wait(); self.slave.command.write(ICW1); wait(); self.main.data.write(self.main.offset); wait(); self.slave.data.write(self.slave.offset); wait(); self.main.data.write(ICW3_M); wait(); self.slave.data.write(ICW3_S); wait(); self.main.data.write(ICW4); wait(); self.slave.data.write(ICW4); wait(); self.main.data.write(saved_mask1); self.slave.data.write(saved_mask2); } pub unsafe fn disable_8259a(&mut self) { self.main.data.write(MASKED); self.slave.data.write(MASKED); } pub fn handles_interrupt(&self, interrupt_id: u8) -> bool { self.main.handle_interrupt(interrupt_id) || self.slave.handle_interrupt(interrupt_id) } pub unsafe fn notify_end_of_interrupt(&mut self, interrupt_id: u8) { if self.handles_interrupt(interrupt_id) { if self.slave.handle_interrupt(interrupt_id) { self.slave.end_interrupt(); } self.main.end_interrupt(); } } }
use super::{Port, UnsafePort}; const EOI: u8 = 0x20; const ICW4: u8 = 0x01; const ICW1: u8 = 0x11; const ICW3_M: u8 = 0x04; const ICW3_S: u8 = 0x02; const MASKED: u8
offset: offset_1, command: UnsafePort::new(0x20), data: UnsafePort::new(0x21), }, slave: Pic { offset: offset_2, command: UnsafePort::new(0xA0), data: UnsafePort::new(0xA1), }, } } pub unsafe fn initialize(&mut self) { let mut wait_port: Port<u32> = Port::new(0x80); let mut wait = || { wait_port.write(0) }; let saved_mask1 = self.main.data.read(); let saved_mask2 = self.slave.data.read(); self.main.command.write(ICW1); wait(); self.slave.command.write(ICW1); wait(); self.main.data.write(self.main.offset); wait(); self.slave.data.write(self.slave.offset); wait(); self.main.data.write(ICW3_M); wait(); self.slave.data.write(ICW3_S); wait(); self.main.data.write(ICW4); wait(); self.slave.data.write(ICW4); wait(); self.main.data.write(saved_mask1); self.slave.data.write(saved_mask2); } pub unsafe fn disable_8259a(&mut self) { self.main.data.write(MASKED); self.slave.data.write(MASKED); } pub fn handles_interrupt(&self, interrupt_id: u8) -> bool { self.main.handle_interrupt(interrupt_id) || self.slave.handle_interrupt(interrupt_id) } pub unsafe fn notify_end_of_interrupt(&mut self, interrupt_id: u8) { if self.handles_interrupt(interrupt_id) { if self.slave.handle_interrupt(interrupt_id) { self.slave.end_interrupt(); } self.main.end_interrupt(); } } }
= 0xff; #[derive(Debug)] struct Pic { offset: u8, command: UnsafePort<u8>, data: UnsafePort<u8>, } impl Pic { fn handle_interrupt(&self, interrupt_id: u8) -> bool { self.offset <= interrupt_id && interrupt_id < self.offset + 8 } unsafe fn end_interrupt(&mut self) { self.command.write(EOI); } } #[derive(Debug)] pub struct ChainedPics { main: Pic, slave: Pic, } impl ChainedPics { pub const unsafe fn new(offset_1: u8, offset_2: u8) -> ChainedPics { ChainedPics { main: Pic {
random
[ { "content": "pub fn add_scan_code(code: u8) {\n\n let lock = scan_queue_mut();\n\n if let Err(_) = lock.push(code) {\n\n println!(\"scan code queue full dropping keyboard input\")\n\n } else {\n\n SCAN_CODE_WAKER.wake();\n\n }\n\n}\n\n\n\npub struct ScanCodeStream;\n\n\n\nimpl ScanCod...
Rust
src/sfc.rs
sambuc/ironsea_index_sfc_dbc
09cdd3d569848d8281a7de5ec90805a43fb1f7eb
#![allow(clippy::type_repetition_in_bounds)] use std::cmp::PartialEq; use std::fmt::Debug; use std::hash::Hash; use std::iter::FromIterator; use std::ops::Index; pub use ironsea_index::IndexedDestructured; pub use ironsea_index::Record; pub use ironsea_index::RecordFields; use serde::Deserialize; use serde::Serialize; use super::cell_space::CellSpace; use super::morton::MortonCode; use super::morton::MortonEncoder; use super::morton::MortonValue; type SFCCode = MortonCode; type SFCOffset = u32; const MAX_K: usize = 3; #[derive(Clone, Debug)] struct Limit<V> { idx: usize, position: Vec<V>, } #[derive(Clone, Debug)] struct Limits<'a, V> { start: Limit<&'a V>, end: Limit<&'a V>, } #[derive(Clone, Debug, Deserialize, Serialize)] struct SFCRecord<F> { offsets: [SFCOffset; MAX_K], fields: F, } #[derive(Clone, Debug, Deserialize, Serialize)] struct SFCCell<F> { code: MortonCode, records: Vec<SFCRecord<F>>, } #[derive(Clone, Debug, Deserialize, Serialize)] pub struct SpaceFillingCurve<F, K, V> where F: PartialEq, K: Debug + FromIterator<V> + Index<usize, Output = V>, V: Clone + Debug + From<usize> + Ord, { dimensions: usize, morton: MortonEncoder, space: CellSpace<K, V>, index: Vec<SFCCell<F>>, } impl<F, K, V> SpaceFillingCurve<F, K, V> where F: PartialEq, K: Debug + FromIterator<V> + Index<usize, Output = V>, V: Clone + Debug + From<usize> + Hash + Ord, { pub fn new<I, R>(iter: I, dimensions: usize, cell_bits: usize) -> Self where I: Clone + Iterator<Item = R>, R: Debug + Record<K> + RecordFields<F>, { let mut index = SpaceFillingCurve { dimensions, morton: MortonEncoder::new(dimensions, cell_bits), space: CellSpace::new(iter.clone(), dimensions, cell_bits), index: vec![], }; let mut flat_table = vec![]; let (nb_records, _) = iter.size_hint(); for record in iter.into_iter() { let position = record.key(); match index.space.key(&position) { Ok((cell_ids, offsets)) => match index.encode(&cell_ids) { Ok(code) => { let offsets = offsets.iter().map(|i| *i as SFCOffset).collect::<Vec<_>>(); flat_table.push(( code, SFCRecord { offsets: *array_ref!(offsets, 0, MAX_K), fields: record.fields(), }, )) } Err(e) => error!("Unable to encode position {:#?}: {}", cell_ids, e), }, Err(e) => error!("Invalid position {:#?}: {}", position, e), } } debug!("Processed {:#?} records into the index", nb_records); flat_table.sort_unstable_by(|a, b| a.0.cmp(&b.0)); let nb_records = flat_table.len(); let mut current_cell_code = flat_table[0].0; let mut count = 0; index.index.push(SFCCell { code: current_cell_code, records: vec![], }); for (code, record) in flat_table { if code == current_cell_code { index.index[count].records.push(record); } else { index.index.push(SFCCell { code, records: vec![record], }); current_cell_code = code; count += 1; } } debug!("Inserted {:#?} records into the index", nb_records); index } pub fn find_by_value<'s>(&'s self, value: &'s F) -> Box<dyn Iterator<Item = K> + 's> { Box::new( self.index .iter() .map(|cell| (cell, cell.records.iter())) .flat_map(move |(cell, records)| { records.filter_map(move |record| { if &record.fields == value { if let Ok(key) = self.position(cell.code, &record.offsets) { Some(key) } else { None } } else { None } }) }), ) } fn encode(&self, cell_ids: &[usize]) -> Result<SFCCode, String> { let mut t = vec![]; for v in cell_ids.iter() { t.push(*v as MortonValue); } self.morton.encode(&t) } fn last(&self) -> (Vec<usize>, Vec<usize>) { self.space.last() } fn value(&self, code: SFCCode, offsets: &[SFCOffset]) -> Result<Vec<&V>, String> { Ok(self.space.value( self.morton .decode(code) .iter() .map(|e| *e as usize) .collect(), offsets.iter().map(|e| *e as usize).collect(), )?) } fn position(&self, code: SFCCode, offsets: &[SFCOffset]) -> Result<K, String> { let position = self.value(code, offsets)?; Ok(position.iter().map(|i| (*i).clone()).collect()) } fn limits(&self, start: &K, end: &K) -> Result<Limits<V>, String> { trace!("limits: {:?} - {:?}", start, end); let (cells, offsets) = self.space.key_down(start)?; let code = self.encode(&cells)?; let idx = match self.index.binary_search_by(|e| e.code.cmp(&code)) { Err(e) => { if e > 0 { e - 1 } else { 0 } } Ok(c) => c, }; let position = self.space.value(cells, offsets)?; let start = Limit { idx, position }; let (cells, offsets) = self.space.key_up(end)?; let code = self.encode(&cells)?; let idx = match self.index.binary_search_by(|e| e.code.cmp(&code)) { Err(e) => { if e >= self.index.len() { self.index.len() } else { e } } Ok(c) => c + 1, }; let position = self.space.value(cells, offsets)?; let end = Limit { idx, position }; trace!("limits: {:?} - {:?}", start, end); Ok(Limits { start, end }) } } impl<F, K, V> IndexedDestructured<F, K> for SpaceFillingCurve<F, K, V> where F: PartialEq, K: Debug + FromIterator<V> + Index<usize, Output = V>, V: Clone + Debug + From<usize> + Hash + Ord, { fn find<'i>(&'i self, key: &K) -> Box<dyn Iterator<Item = &F> + 'i> { if let Ok((cell_ids, offsets)) = self.space.key(key) { match self.encode(&cell_ids) { Err(e) => error!("{}", e), Ok(code) => { if let Ok(cell) = self.index.binary_search_by(|a| a.code.cmp(&code)) { return Box::new(self.index[cell].records.iter().filter_map( move |record| { let mut select = true; for (k, o) in offsets.iter().enumerate().take(self.dimensions) { select &= record.offsets[k] == (*o as SFCOffset); } if select { Some(&record.fields) } else { None } }, )); } } } } Box::new(Vec::with_capacity(0).into_iter()) } fn find_range<'i>(&'i self, start: &K, end: &K) -> Box<dyn Iterator<Item = (K, &F)> + 'i> { match self.limits(start, end) { Ok(limits) => { let iter = (limits.start.idx..limits.end.idx) .filter_map(move |idx| { match self.value(self.index[idx].code, &self.index[idx].records[0].offsets) { Err(_) => None, Ok(first) => Some((idx, first)), } }) .filter_map(move |(idx, first)| { let (cell_ids, last_offsets) = self.last(); match self.space.value(cell_ids, last_offsets) { Err(_) => None, Ok(last) => Some((idx, first, last)), } }) .map(move |(idx, first, last)| { let limits = limits.clone(); let b: Box<dyn Iterator<Item = _>> = if limits.start.position <= first && first <= limits.end.position && limits.start.position <= last && last <= limits.end.position { Box::new(self.index[idx].records.iter().filter_map(move |record| { let code = self.index[idx].code; if let Ok(key) = self.position(code, &record.offsets) { Some((key, &record.fields)) } else { None } })) } else { Box::new(self.index[idx].records.iter().filter_map(move |record| { let code = self.index[idx].code; if let Ok(pos) = self.value(code, &record.offsets) { if limits.start.position <= pos && pos <= limits.end.position { if let Ok(key) = self.position(code, &record.offsets) { Some((key, &record.fields)) } else { None } } else { None } } else { None } })) }; b }) .flatten(); Box::new(iter) } Err(e) => { error!("find_range: limits failed: {}", e); Box::new(Vec::with_capacity(0).into_iter()) } } } } /* impl<F, K, V> Store for SpaceFillingCurve<F, K, V> where F: PartialEq + Serialize, K: Debug + Serialize + FromIterator<V> + Index<usize, Output = V>, V: Clone + Debug + From<usize> + Ord + Serialize, { fn store<W>(&mut self, writer: W) -> io::Result<()> where W: std::io::Write, { match bincode::serialize_into(writer, &self) { Ok(_) => Ok(()), Err(e) => Err(io::Error::new(io::ErrorKind::WriteZero, e)), } } } impl<F, K, V> Load for SpaceFillingCurve<F, K, V> where F: PartialEq + DeserializeOwned, K: Debug + DeserializeOwned + FromIterator<V> + Index<usize, Output = V>, V: Clone + Debug + DeserializeOwned + From<usize> + Ord, { fn load<Re: io::Read>(reader: Re) -> io::Result<Self> { match bincode::deserialize_from(reader) { Ok(data) => Ok(data), Err(e) => Err(io::Error::new(io::ErrorKind::InvalidData, e)), } } // only required for store_mapped_file fn load_slice(from: &[u8]) -> io::Result<Self> { match bincode::deserialize(from) { Ok(data) => Ok(data), Err(e) => Err(io::Error::new(io::ErrorKind::InvalidData, e)), } } } */
#![allow(clippy::type_repetition_in_bounds)] use std::cmp::PartialEq; use std::fmt::Debug; use std::hash::Hash; use std::iter::FromIterator; use std::ops::Index; pub use ironsea_index::IndexedDestructured; pub use ironsea_index::Record; pub use ironsea_index::RecordFields; use serde::Deserialize; use serde::Serialize; use super::cell_space::CellSpace; use super::morton::MortonCode; use super::morton::MortonEncoder; use super::morton::MortonValue; type SFCCode = MortonCode; type SFCOffset = u32; const MAX_K: usize = 3; #[derive(Clone, Debug)] struct Limit<V> { idx: usize, position: Vec<V>, } #[derive(Clone, Debug)] struct Limits<'a, V> { start: Limit<&'a V>, end: Limit<&'a V>, } #[derive(Clone, Debug, Deserialize, Serialize)] struct SFCRecord<F> { offsets: [SFCOffset; MAX_K], fields: F, } #[derive(Clone, Debug, Deserialize, Serialize)] struct SFCCell<F> { code: MortonCode, records: Vec<SFCRecord<F>>, } #[derive(Clone, Debug, Deserialize, Serialize)] pub struct SpaceFillingCurve<F, K, V> where F: PartialEq, K: Debug + FromIterator<V> + Index<usize, Output = V>, V: Clone + Debug + From<usize> + Ord, { dimensions: usize, morton: MortonEncoder, space: CellSpace<K, V>, index: Vec<SFCCell<F>>, } impl<F, K, V> SpaceFillingCurve<F, K, V> where F: PartialEq, K: Debug + FromIterator<V> + Index<usize, Output = V>, V: Clone + Debug + From<usize> + Hash + Ord, { pub fn new<I, R>(iter: I, dimensions: usize, cell_bits: usize) -> Self where I: Clone + Iterator<Item = R>, R: Debug + Record<K> + RecordFields<F>, { let mut index = SpaceFillingCurve { dimensions, morton: MortonEncoder::new(dimensions, cell_bits), space: CellSpace::new(iter.clone(), dimensions, cell_bits), index: vec![], }; let mut flat_table = vec![]; let (nb_records, _) = iter.size_hint(); for record in iter.into_iter() { let position = record.key(); match index.space.key(&position) { Ok((cell_ids, offsets)) => match index.encode(&cell_ids) { Ok(code) => { let offsets = offsets.iter().map(|i| *i as SFCOffset).collect::<Vec<_>>(); flat_table.push(( code, SFCRecord { offsets: *array_ref!(offsets, 0, MAX_K), fields: record.fields(), }, )) } Err(e) => error!("Unable to encode position {:#?}: {}", cell_ids, e), }, Err(e) => error!("Invalid position {:#?}: {}", position, e), } } debug!("Processed {:#?} records into the index", nb_records); flat_table.sort_unstable_by(|a, b| a.0.cmp(&b.0)); let nb_records = flat_table.len(); let mut current_cell_code = flat_table[0].0; let mut count = 0; index.index.push(SFCCell { code: current_cell_code, records: vec![], }); for (code, record) in flat_table { if code == current_cell_code { index.index[count].records.push(record); } else { index.index.push(SFCCell { code, records: vec![record], }); current_cell_code = code; count += 1; } } debug!("Inserted {:#?} records into the index", nb_records); index } pub fn find_by_value<'s>(&'s self, value: &'s F) -> Box<dyn Iterator<Item = K> + 's> { Box::new( self.index .iter() .map(|cell| (cell, cell.records.iter())) .flat_map(move |(cell, records)| { records.filter_map(move |record| { if &record.fields == value { if let Ok(key) = self.position(cell.code, &record.offsets) { Some(key) } else { None } } else { None } }) }), ) } fn encode(&self, cell_ids: &[usize]) -> Result<SFCCode, String> { let mut t = vec![]; for v in cell_ids.iter() { t.push(*v as MortonValue); } self.morton.encode(&t) } fn last(&self) -> (Vec<usize>, Vec<usize>) { self.space.last() } fn value(&self, code: SFCCode, offsets: &[SFCOffset]) -> Result<Vec<&V>, String> { Ok(self.space.value( self.morton .decode(code) .iter() .map(|e| *e as usize) .collect(), offsets.iter().map(|e| *e as usize).collect(), )?) } fn position(&self, code: SFCCode, offsets: &[SFCOffset]) -> Result<K, String> { let position = self.value(code, offsets)?; Ok(position.iter().map(|i| (*i).clone()).collect()) } fn limits(&self, start: &K, end: &K) -> Result<Limits<V>, String> { trace!("limits: {:?} - {:?}", start, end); let (cells, offsets) = self.space.key_down(start)?; let code = self.encode(&cells)?; let idx = match self.index.binary_search_by(|e| e.code.cmp(&code)) { Err(e) => { if e > 0 { e - 1 } else { 0 } } Ok(c) => c, }; let position = self.space.value(cells, offsets)?; let start = Limit { idx, position }; let (cells, offsets) = self.space.key_up(end)?; let code = self.encode(&cells)?; let idx = match self.index.binary_search_by(|e| e.code.cmp(&code)) { Err(e) => { if e >= self.index.len() { self.index.len() } else { e } } Ok(c) => c + 1, }; let position = self.space.value(cells, offsets)?; let end = Limit { idx, position }; trace!("limits: {:?} - {:?}", start, end); Ok(Limits { start, end }) } } impl<F, K, V> IndexedDestructured<F, K> for SpaceFillingCurve<F, K, V> where F: PartialEq, K: Debug + FromIterator<V> + Index<usize, Output = V>, V: Clone + Debug + From<usize> + Hash + Ord, { fn find<'i>(&'i self, key: &K) -> Box<dyn Iterator<Item = &F> + 'i> { if let Ok((cell_ids, offsets)) = self.space.key(key) { match self.encode(&cell_ids) { Err(e) => error!("{}", e), Ok(code) => { if let Ok(cell) = self.index.binary_search_by(|a| a.code.cmp(&code)) { return Box::new(self.index[cell].records.iter().filter_map( move |record| { let mut select = true; for (k, o) in offsets.iter().enumerate().take(self.dimensions) { select &= record.offsets[k] == (*o as SFCOffset); } if select { Some(&record.fields) } else { Non
fn find_range<'i>(&'i self, start: &K, end: &K) -> Box<dyn Iterator<Item = (K, &F)> + 'i> { match self.limits(start, end) { Ok(limits) => { let iter = (limits.start.idx..limits.end.idx) .filter_map(move |idx| { match self.value(self.index[idx].code, &self.index[idx].records[0].offsets) { Err(_) => None, Ok(first) => Some((idx, first)), } }) .filter_map(move |(idx, first)| { let (cell_ids, last_offsets) = self.last(); match self.space.value(cell_ids, last_offsets) { Err(_) => None, Ok(last) => Some((idx, first, last)), } }) .map(move |(idx, first, last)| { let limits = limits.clone(); let b: Box<dyn Iterator<Item = _>> = if limits.start.position <= first && first <= limits.end.position && limits.start.position <= last && last <= limits.end.position { Box::new(self.index[idx].records.iter().filter_map(move |record| { let code = self.index[idx].code; if let Ok(key) = self.position(code, &record.offsets) { Some((key, &record.fields)) } else { None } })) } else { Box::new(self.index[idx].records.iter().filter_map(move |record| { let code = self.index[idx].code; if let Ok(pos) = self.value(code, &record.offsets) { if limits.start.position <= pos && pos <= limits.end.position { if let Ok(key) = self.position(code, &record.offsets) { Some((key, &record.fields)) } else { None } } else { None } } else { None } })) }; b }) .flatten(); Box::new(iter) } Err(e) => { error!("find_range: limits failed: {}", e); Box::new(Vec::with_capacity(0).into_iter()) } } } } /* impl<F, K, V> Store for SpaceFillingCurve<F, K, V> where F: PartialEq + Serialize, K: Debug + Serialize + FromIterator<V> + Index<usize, Output = V>, V: Clone + Debug + From<usize> + Ord + Serialize, { fn store<W>(&mut self, writer: W) -> io::Result<()> where W: std::io::Write, { match bincode::serialize_into(writer, &self) { Ok(_) => Ok(()), Err(e) => Err(io::Error::new(io::ErrorKind::WriteZero, e)), } } } impl<F, K, V> Load for SpaceFillingCurve<F, K, V> where F: PartialEq + DeserializeOwned, K: Debug + DeserializeOwned + FromIterator<V> + Index<usize, Output = V>, V: Clone + Debug + DeserializeOwned + From<usize> + Ord, { fn load<Re: io::Read>(reader: Re) -> io::Result<Self> { match bincode::deserialize_from(reader) { Ok(data) => Ok(data), Err(e) => Err(io::Error::new(io::ErrorKind::InvalidData, e)), } } // only required for store_mapped_file fn load_slice(from: &[u8]) -> io::Result<Self> { match bincode::deserialize(from) { Ok(data) => Ok(data), Err(e) => Err(io::Error::new(io::ErrorKind::InvalidData, e)), } } } */
e } }, )); } } } } Box::new(Vec::with_capacity(0).into_iter()) }
function_block-function_prefixed
[ { "content": "#[derive(Clone, Debug, Deserialize, Serialize)]\n\nstruct CellDictionary<K, V> {\n\n table: Vec<Cell<V>>,\n\n max_offset: usize,\n\n _marker: marker::PhantomData<K>,\n\n}\n\n\n\nimpl<K, V> CellDictionary<K, V>\n\nwhere\n\n K: Debug + Index<usize, Output = V>,\n\n V: Clone + Debug + ...
Rust
src/world/mod.rs
tgolsson/physics2d-rs
ff7b4cf4f06fa54c43148fbb2e26cdb6342fb50c
mod body; mod collections; pub mod debug; mod transform; pub(crate) use self::body::BodyPair; pub use self::body::{Body, BodyId, Material}; pub(crate) use self::collections::{Bodies, ConstraintsMap}; pub use self::transform::Transform; use crate::{ collision::broad_phase::{BoundsTreeBroadPhase, BroadPhase}, collision::{collide, ContactConstraint}, joint::Joint, }; use collections::ConstraintSolverMap; #[derive(Clone)] pub struct World { bodies: Bodies, broad_phase: BoundsTreeBroadPhase, contact_constraints: ConstraintsMap<ContactConstraint>, joints: ConstraintsMap<Joint>, pub velocity_iterations: u8, pub position_iterations: u8, } impl Default for World { fn default() -> World { World::new(8, 2) } } impl World { pub fn new(velocity_iterations: u8, position_iterations: u8) -> World { World { bodies: Bodies::default(), broad_phase: BoundsTreeBroadPhase::new(), contact_constraints: ConstraintsMap::default(), joints: ConstraintsMap::default(), velocity_iterations, position_iterations, } } pub fn add_body(&mut self, mut body: Body) -> BodyId { body.proxy_id = self.broad_phase.create_proxy(&body); self.bodies.add(body) } pub fn add_joint(&mut self, bodies: (BodyId, BodyId), joint: Joint) { let bodies = BodyPair::new(bodies.0, bodies.1); let body_joints = self.joints.entry(bodies).or_insert_with(Vec::new); body_joints.push(joint); } pub fn get_joints(&self, bodies: (BodyId, BodyId)) -> Option<&Vec<Joint>> { let bodies = BodyPair::new(bodies.0, bodies.1); self.joints.get(&bodies) } pub fn get_joints_mut(&mut self, bodies: (BodyId, BodyId)) -> Option<&mut Vec<Joint>> { let bodies = BodyPair::new(bodies.0, bodies.1); self.joints.get_mut(&bodies) } pub fn get_body(&self, body_id: BodyId) -> Option<&Body> { self.bodies.get(body_id) } pub fn get_body_mut(&mut self, body_id: BodyId) -> Option<&mut Body> { self.bodies.get_mut(body_id) } pub fn bodies_iter(&self) -> impl Iterator<Item = &Body> { self.bodies.iter() } pub fn body_count(&self) -> usize { self.bodies.len() } pub fn update(&mut self, dt: f32) { for body in self.bodies.iter_mut() { body.update(dt); self.broad_phase.update_proxy(body.proxy_id, body); } { let bodies = &self.bodies; self.contact_constraints .retain(|pair, _| pair.with(bodies, |a, b| a.bounds.intersects(&b.bounds))); } self.broad_phase .new_potential_pairs(&self.bodies, &mut self.contact_constraints); self.broad_phase.post_update(); { let bodies = &self.bodies; self.contact_constraints.retain(|pair, constraints| { let body_a = &bodies[pair.0]; let body_b = &bodies[pair.1]; if let Some(new_contacts) = collide(body_a, body_b) { let new_constraints = if !constraints.is_empty() { ContactConstraint::with_persistent_contacts(constraints, &new_contacts) } else { ContactConstraint::with_contacts(&new_contacts) }; *constraints = new_constraints; true } else { false } }); } for body in self.bodies.iter_mut() { body.integrate_force(dt); } self.contact_constraints .initialize_velocity(&self.bodies, dt); self.contact_constraints .warm_start_velocity(&mut self.bodies, dt); self.joints.initialize_velocity(&self.bodies, dt); self.joints.warm_start_velocity(&mut self.bodies, dt); for _ in 0..self.velocity_iterations { self.joints.solve_velocity(&mut self.bodies, dt); self.contact_constraints .solve_velocity(&mut self.bodies, dt); } for body in self.bodies.iter_mut() { body.integrate_velocity(dt); } self.contact_constraints .warm_start_position(&mut self.bodies, dt); for _ in 0..self.position_iterations { self.joints.solve_position(&mut self.bodies, dt); self.contact_constraints .solve_position(&mut self.bodies, dt); } } }
mod body; mod collections; pub mod debug; mod transform; pub(crate) use self::body::BodyPair; pub use self::body::{Body, BodyId, Material}; pub(crate) use self::collections::{Bodies, ConstraintsMap}; pub use self::transform::Transform; use crate::{ collision::broad_phase::{BoundsTreeBroadPhase, BroadPhase}, collision::{collide, ContactConstraint}, joint::Joint, }; use collections::ConstraintSolverMap; #[derive(Clone)] pub struct World { bodies: Bodies, broad_phase: BoundsTreeBroadPhase, contact_constraints: ConstraintsMap<ContactConstraint>, joints: ConstraintsMap<Joint>, pub velocity_iterations: u8, pub position_iterations: u8, } impl Default for World { fn default() -> World { World::new(8, 2) } } impl World { pub fn new(velocity_iterations: u8, position_iterations: u8) -> World { World { bodies: Bodies::default(), broad_phase: BoundsTreeBroadPhase::new(), contact_constraints: ConstraintsMap::default(), joints: ConstraintsMap::default(), velocity_iterations, position_iterations, } } pub fn add_body(&mut self, mut body: Body) -> BodyId { body.proxy_id = self.broad_phase.create_proxy(&body); self.bodies.add(body) } pub fn add_joint(&mut self, bodies: (BodyId, BodyId), joint: Joint) { let bodies = BodyPair::new(bodies.0, bodies.1); let body_joints = self.joints.entry(bodies).or_insert_with(Vec::new); body_joints.push(joint); } pub fn get_joints(&self, bodies: (BodyId, BodyId)) -> Option<&Vec<Joint>> { let bodies = BodyPair::new(bodies.0, bodies.1); self.joints.get(&bodies) } pub fn get_joints_mut(&mut self, bodies: (BodyId, BodyId)) -> Option<&mut Vec<Joint>> { let bodies = BodyPair::new(bodies.0, bodies.1); self.joints.get_mut(&bodies) } pub fn get_body(&self, body_id: BodyId) -> Option<&Body> { self.bodies.get(body_id) } pub fn get_body_mut(&mut self, body_id: BodyId) -> Option<&mut Body> { self.bodies.get_mut(body_id) } pub fn bodies_iter(&self) -> impl Iterator<Item = &Body> { self.bodies.iter() } pub fn body_count(&self) -> usize { self.bodies.len() } pub fn update(&mut self, dt: f32) { for body in self.bodies.iter_mut() { body.update(dt); self.broad_phase.update_proxy(body.proxy_id, body); } { let bodies = &self.bodies; self.contact_constraints .retain(|pair, _| pair.with(bodies, |a, b| a.bounds.intersects(&b.bounds))); } self.broad_phase .new_potential_pairs(&self.bodies, &mut self.contact_constraints); self.broad_phase.post_update(); { let bodies = &self.bodies; self.contact_constraints.retain(|pair, constraints| { let body_a = &bodies[pair.0]; let body_b = &bodies[pair.1]; if let Some(new_contacts) = collide(body_a, body_b) { let new_constraints = if !constraints.is_empty() { ContactConstraint::with_persistent_contacts(constraints, &new_contacts) } else { ContactConstraint::with_contacts(&new_contacts) }; *constraints = new_constraints; true } else { false } }); } for body in self.bodies.iter_mut() { body.integrate_force(dt); } self.contact_constraints .in
}
itialize_velocity(&self.bodies, dt); self.contact_constraints .warm_start_velocity(&mut self.bodies, dt); self.joints.initialize_velocity(&self.bodies, dt); self.joints.warm_start_velocity(&mut self.bodies, dt); for _ in 0..self.velocity_iterations { self.joints.solve_velocity(&mut self.bodies, dt); self.contact_constraints .solve_velocity(&mut self.bodies, dt); } for body in self.bodies.iter_mut() { body.integrate_velocity(dt); } self.contact_constraints .warm_start_position(&mut self.bodies, dt); for _ in 0..self.position_iterations { self.joints.solve_position(&mut self.bodies, dt); self.contact_constraints .solve_position(&mut self.bodies, dt); } }
function_block-function_prefixed
[ { "content": "/// Clamps (limits) the value of `x` in the inclusive range from `0.0` to `1.0`.\n\n///\n\n/// If `x < 0.0`, `0.0` is returned. If `x > 1.0`, `1.0` is returned.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # use physics2d::math::clamp01;\n\n///\n\n/// assert_eq!(clamp01(-10.0), 0.0);\n\n/// a...
Rust
c2rust-refactor/src/scripting/ast_visitor.rs
martin-t/c2rust
f565bc63d87f15f79b5b232cb7155eb8ec5bbfe2
use rlua::prelude::{LuaFunction, LuaResult, LuaTable}; macro_rules! call_lua_visitor_method { ($obj: expr , $method: ident ($($params: expr),*)) => { let opt_visit_method: Option<LuaFunction> = $obj.get(stringify!($method))?; if let Some(visit_method) = opt_visit_method { let proceed = visit_method.call::<_, bool>(($obj.clone(), $($params.clone()),*))?; if !proceed { return Ok(()); } } }; } pub(crate) struct LuaAstVisitor<'lua> { visitor: LuaTable<'lua> } impl<'lua> LuaAstVisitor<'lua> { pub fn new(visitor: LuaTable<'lua>) -> Self { LuaAstVisitor { visitor, } } pub fn visit_crate(&self, lua_crate: LuaTable<'lua>) -> LuaResult<()> { call_lua_visitor_method!(self.visitor,visit_crate(lua_crate)); self.visit_mod(lua_crate.get("module")?)?; Ok(()) } pub fn visit_mod(&self, module: LuaTable<'lua>) -> LuaResult<()> { call_lua_visitor_method!(self.visitor,visit_mod(module)); let items: LuaTable = module.get("items")?; for item in items.sequence_values::<LuaTable>() { self.visit_item(item?)?; } Ok(()) } pub fn visit_impl(&self, imp: LuaTable<'lua>) -> LuaResult<()> { call_lua_visitor_method!(self.visitor,visit_impl(imp)); let items: LuaTable = imp.get("items")?; for item in items.sequence_values::<LuaTable>() { let item = item?; let kind: String = item.get("kind")?; match kind.as_str() { "ImplMethod" => { self.visit_fn_like(item)?; }, ref e => unimplemented!("visit_impl: Impl kind: {:?}", e), } } Ok(()) } pub fn visit_item(&self, item: LuaTable<'lua>) -> LuaResult<()> { call_lua_visitor_method!(self.visitor,visit_item(item)); match item.get::<_, String>("kind")?.as_str() { "Fn" => { self.visit_fn_like(item)?; }, "Impl" => { self.visit_impl(item)?; }, ref e => warn!("visit_item: Found unsupported item kind: {:?}", e), } Ok(()) } pub fn visit_expr(&self, expr: LuaTable<'lua>) -> LuaResult<()> { call_lua_visitor_method!(self.visitor,visit_expr(expr)); match expr.get::<_, String>("kind")?.as_str() { "Box" => { let boxed = expr.get("expr")?; self.visit_expr(boxed)?; }, "AssignOp" | "Binary" | "Assign" => { let lhs = expr.get("lhs")?; let rhs = expr.get("rhs")?; self.visit_expr(lhs)?; self.visit_expr(rhs)?; }, "Array" => { let values: LuaTable = expr.get("values")?; for val in values.sequence_values::<LuaTable>() { self.visit_expr(val?)?; } }, "Path" => { }, "Lit" => { }, "InlineAsm" => { let inputs: LuaTable = expr.get("inputs")?; let outputs: LuaTable = expr.get("outputs")?; for input in inputs.sequence_values::<LuaTable>() { let input = input?; let expr = input.get("expr")?; self.visit_expr(expr)?; } for output in outputs.sequence_values::<LuaTable>() { let output = output?; let expr = output.get("expr")?; self.visit_expr(expr)?; } }, "Unary" => { let expr = expr.get("expr")?; self.visit_expr(expr)?; }, "Call" => { let path = expr.get("path")?; let args: LuaTable = expr.get("args")?; self.visit_expr(path)?; for arg in args.sequence_values::<LuaTable>() { self.visit_expr(arg?)?; } }, "MethodCall" => { let args: LuaTable = expr.get("args")?; for arg in args.sequence_values::<LuaTable>() { self.visit_expr(arg?)?; } }, "Index" => { let indexed = expr.get("indexed")?; let index = expr.get("index")?; self.visit_expr(indexed)?; self.visit_expr(index)?; }, "AddrOf" => { let expr = expr.get("expr")?; self.visit_expr(expr)?; }, "Try" => { let expr = expr.get("expr")?; self.visit_expr(expr)?; }, "Match" => { let match_expr = expr.get("expr")?; let arms: LuaTable = expr.get("arms")?; for arm in arms.sequence_values::<LuaTable>() { let arm = arm?; let body = arm.get("body")?; let opt_guard = arm.get("guard")?; self.visit_expr(body)?; if let Some(guard) = opt_guard { self.visit_expr(guard)?; } } self.visit_expr(match_expr)?; }, "Cast" => { let expr = expr.get("expr")?; self.visit_expr(expr)?; }, "If" => { let cond = expr.get("cond")?; let then = expr.get("then")?; let opt_else = expr.get("else")?; self.visit_expr(cond)?; self.visit_block(then)?; if let Some(els) = opt_else { self.visit_expr(els)?; } }, "Block" => { let block = expr.get("block")?; self.visit_block(block)? }, "Tup" => { let exprs: LuaTable = expr.get("exprs")?; for expr in exprs.sequence_values::<LuaTable>() { self.visit_expr(expr?)?; } }, "Paren" => { let expr = expr.get("expr")?; self.visit_expr(expr)? }, "Field" => { let expr = expr.get("expr")?; self.visit_expr(expr)? }, "Loop" => { let block = expr.get("block")?; self.visit_block(block)? }, "While" => { let block = expr.get("block")?; let cond = expr.get("cond")?; self.visit_expr(cond)?; self.visit_block(block)? }, "Ret" => { let opt_val = expr.get("value")?; if let Some(value) = opt_val { self.visit_expr(value)?; } }, ref e => warn!("visit_expr: Found unsupported expr {}", e), } Ok(()) } pub fn visit_stmt(&self, stmt: LuaTable<'lua>) -> LuaResult<()> { call_lua_visitor_method!(self.visitor,visit_stmt(stmt)); match stmt.get::<_, String>("kind")?.as_str() { "Expr" | "Semi" => { let expr = stmt.get("expr")?; self.visit_expr(expr)?; }, "Local" => { self.visit_local(stmt)?; }, "Item" => { let item = stmt.get("item")?; self.visit_item(item)?; }, ref e => warn!("visit_stmt: Unsupported Stmt kind: {}", e), } Ok(()) } pub fn visit_local(&self, local: LuaTable<'lua>) -> LuaResult<()> { call_lua_visitor_method!(self.visitor,visit_local(local)); let opt_init = local.get("init")?; if let Some(init) = opt_init { self.visit_expr(init)?; } Ok(()) } pub fn visit_block(&self, block: LuaTable<'lua>) -> LuaResult<()> { call_lua_visitor_method!(self.visitor,visit_block(block)); let stmts: LuaTable = block.get("stmts")?; for stmt in stmts.sequence_values::<LuaTable>() { self.visit_stmt(stmt?)?; } Ok(()) } pub fn visit_fn_like(&self, fn_like: LuaTable<'lua>) -> LuaResult<()> { call_lua_visitor_method!(self.visitor,visit_fn_like(fn_like)); let opt_block = fn_like.get("block")?; if let Some(block) = opt_block { self.visit_block(block)?; } Ok(()) } pub fn finish(&self) -> LuaResult<()> { call_lua_visitor_method!(self.visitor,finish()); Ok(()) } }
use rlua::prelude::{LuaFunction, LuaResult, LuaTable}; macro_rules! call_lua_visitor_method { ($obj: expr , $method: ident ($($params: expr),*)) => { let opt_visit_method: Option<LuaFunction> = $obj.get(stringify!($method))?; if let Some(visit_method) = opt_visit_method { let proceed = visit_method.call::<_, bool>(($obj.clone(), $($params.clone()),*))?; if !proceed { return Ok(()); } } }; } pub(crate) struct LuaAstVisitor<'lua> { visitor: LuaTable<'lua> } impl<'lua> LuaAstVisitor<'lua> { pub fn new(visitor: LuaTable<'lua>) -> Self { LuaAstVisitor { visitor, } } pub fn visit_crate(&self, lua_crate: LuaTable<'lua>) -> LuaResult<()> { call_lua_visitor_method!(self.visitor,visit_crate(lua_crate)); self.visit_mod(lua_crate.get("module")?)?; Ok(()) } pub fn visit_mod(&self, module: LuaTable<'lua>) -> LuaResult<()> { call_lua_visitor_method!(self.visitor,visit_mod(module)); let items: LuaTable = module.get("items")?; for item in items.sequence_values::<LuaTable>() { self.visit_item(item?)?; } Ok(()) } pub fn visit_impl(&self, imp: LuaTable<'lua>) -> LuaResult<()> { call_lua_visitor_method!(self.visitor,visit_impl(imp)); let items: LuaTable = imp.get("items")?; for item in items.sequence_values::<LuaTable>() { let item = item?; let kind: String = item.get("kind")?; match kind.as_str() { "ImplMethod" => { self.visit_fn_like(item)?; }, ref e => unimplemented!("visit_impl: Impl kind: {:?}", e), } } Ok(()) } pub fn visit_item(&self, item: LuaTable<'lua>) -> LuaResult<()> { call_lua_visitor_method!(self.visitor,visit_item(item)); match item.get::<_, String>("kind")?.as_str() { "Fn" => { self.visit_fn_like(item)?; }, "Impl" => { self.visit_impl(item)?; }, ref e => warn!("visit_item: Found unsupported item kind: {:?}", e), } Ok(()) } pub fn visit_expr(&self, expr: LuaTable<'lua>) -> LuaResult<()> { call_lua_visitor_method!(self.visitor,visit_expr(expr)); match expr.get::<_, String>("kind")?.as_str() { "Box" => { let boxed = expr.get("expr")?; self.visit_expr(boxed)?; }, "AssignOp" | "Binary" | "Assign" => { let lhs = expr.get("lhs")?; let rhs = expr.get("rhs")?; self.visit_expr(lhs)?; self.visit_expr(rhs)?; }, "Array" => { let values: LuaTable = expr.get("values")?; for val in values.sequence_values::<LuaTable>() { self.visit_expr(val?)?; } }, "Path" => { }, "Lit" => { }, "InlineAsm" => { let inputs: LuaTable = expr.get("inputs")?; let outputs: LuaTable = expr.get("outputs")?; for input in inputs.sequence_values::<LuaTable>() { let input = input?; let expr = input.get("expr")?; self.visit_expr(expr)?; } for output in outputs.sequence_values::<LuaTable>() { let output = output?; let expr = output.get("expr")?; self.visit_expr(expr)?; } }, "Unary" => { let expr = expr.get("expr")?; self.visit_expr(expr)?; }, "Call" => { let path = expr.get("path")?; let args: LuaTable = expr.get("args")?; self.visit_expr(path)?; for arg in args.sequence_values::<LuaTable>() { self.visit_expr(arg?)?; } }, "MethodCall" => { let args: LuaTable = expr.get("args")?; for arg in args.sequence_values::<LuaTable>() { self.visit_expr(arg?)?; } }, "Index" => { let indexed = expr.get("indexed")?; let index = expr.get("index")?; self.visit_expr(indexed)?; self.visit_expr(index)?; }, "AddrOf" => { let expr = expr.get("expr")?; self.visit_expr(expr)?; }, "Try" => { let expr = expr.get("expr")?; self.visit_expr(expr)?; }, "Match" => { let match_expr = expr.get("expr")?; let arms: LuaTable = expr.get("arms")?; for arm in arms.sequence_values::<LuaTable>() { let arm = arm?; let body = arm.get("body")?; let opt_guard = arm.get("guard")?; self.visit_expr(body)?; if let Some(guard) = opt_guard { self.visit_expr(guard)?; } } self.visit_expr(match_expr)?; }, "Cast" => { let expr = expr.get("expr")?; self.visit_expr(expr)?; }, "If" => { let cond = expr.get("cond")?; let then = expr.get("then")?; let opt_else = expr.get("else")?; self.visit_expr(cond)?; self.visit_block(then)?; if let Some(els) = opt_else { self.visit_expr(els)?; } }, "Block" => { let block = expr.get("block")?; self.visit_block(block)? }, "Tup" => { let exprs: LuaTable = expr.get("exprs")?; for expr in exprs.sequence_values::<LuaTable>() { self.visit_expr(expr?)?; } }, "Paren" => { let expr = expr.get("expr")?; self.visit_expr(expr)? }, "Field" => { let expr = expr.get("expr")?; self.visit_expr(expr)? }, "Loop" => { let block = expr.get("block")?; self.visit_block(block)? }, "While" => { let block = expr.get("block")?; let cond = expr.get("cond")?; self.visit_expr(cond)?; self.visit_block(block)? }, "Ret" => { let opt_val = expr.get("value")?; if let Some(value) = opt_val { self.visit_expr(value)?; } }, ref e => warn!("visit_expr: Found unsupported expr {}", e), } Ok(()) } pub fn visit_stmt(&self, stmt: LuaTable<'lua>) -> LuaResult<()> { call_lua_visitor_method!(self.visitor,visit_stmt(stmt)); match stmt.get::<_, String>("kind")?.as_str() { "Expr" | "Semi" => { let expr = stmt.get("expr")?; self.visit_expr(expr)?; }, "Local" => { self.visit_local(stmt)?; }, "Item" => { let item = stmt.get("item")?; self.visit_item(item)?; }, ref e => warn!("visit_stmt: Unsupported Stmt kind: {}", e), } Ok(()) } pub fn visit_local(&self, local: LuaTable<'lua>) -> LuaResult<()> { call_lua_visitor_method!(self.visitor,visit_local(local)); let opt_init = local.get("init")?; if let Some(init) = opt_init { self.visit_expr(init)?; } Ok(()) } pub fn visit_block(&self, block: LuaTable<'lua>) -> LuaResult<()> { call_lua_visitor_method!(self.visitor,visit_block(block)); let stmts: LuaTable = block.get("stmts")?; for stmt in stmts.sequence_values::<LuaTable>() { self.visit_stmt(stmt?)?; } Ok(()) } pub fn visit_fn_like(&self, fn_like: LuaTable<'lua>) -> LuaResult<()> {
pub fn finish(&self) -> LuaResult<()> { call_lua_visitor_method!(self.visitor,finish()); Ok(()) } }
call_lua_visitor_method!(self.visitor,visit_fn_like(fn_like)); let opt_block = fn_like.get("block")?; if let Some(block) = opt_block { self.visit_block(block)?; } Ok(()) }
function_block-function_prefix_line
[ { "content": "fn build_struct_update(path: Path, fields: Vec<Field>, base: P<Expr>) -> Stmt {\n\n mk().semi_stmt(\n\n mk().assign_expr(\n\n &base,\n\n mk().struct_expr_base(path, fields, Some(&base))))\n\n}\n\n\n\n\n\n/// # `rename_struct` Command\n\n/// \n\n/// Obsolete - use `r...
Rust
examples/d3d12/compute/main.rs
LNSEAB/dxplr
06aa9329e889dbefaeb4e28669a8eb5547c3a1ea
use dxplr::d3d::IBlob; use dxplr::d3d12::{ ICommandAllocator, ICommandQueue, IDebug, IDescriptorHeap, IDevice, IFence, IGraphicsCommandList, IResource, }; use dxplr::{d3d, d3d12, dxgi}; use std::fs::File; use std::io::{BufReader, Read}; fn main() { let _d3d12_debug = { let debug = d3d12::get_debug_interface::<d3d12::Debug>().unwrap(); debug.enable_debug_layer(); debug }; let device = d3d12::create_device::<d3d12::Device>(None, d3d::FeatureLevel(12, 0)).unwrap(); let cmd_allocator = device .create_command_allocator::<d3d12::CommandAllocator>(d3d12::CommandListType::Direct) .unwrap(); let cmd_queue = device .create_command_queue::<d3d12::CommandQueue>( &d3d12::CommandQueueDesc::new().list_type(d3d12::CommandListType::Direct), ) .unwrap(); let cmd_list = device .create_command_list::<d3d12::GraphicsCommandList>( 0, d3d12::CommandListType::Direct, &cmd_allocator, None, ) .unwrap(); cmd_list.close().unwrap(); let fence = device.create_fence::<d3d12::Fence>(0, None).unwrap(); let event = dxplr::EventHandle::new(); let root_signature = { let desc = d3d12::RootSignatureDesc { parameters: Some(vec![d3d12::RootParameter::DescriptorTable { descriptor_ranges: vec![d3d12::DescriptorRange { range_type: d3d12::DescriptorRangeType::UAV, num_descriptors: 1, base_shader_register: 0, register_space: 0, offset_in_descriptors_from_table_start: d3d12::DescriptorRange::OFFSET_APPEND, }], shader_visibility: d3d12::ShaderVisibility::All, }]), static_samplers: None, flags: None, }; let data = d3d12::serialize_root_signature(&desc, d3d::RootSignatureVersion(1, 0)).unwrap(); device.create_root_signature(0, data.as_slice()).unwrap() }; let pipeline = { let file = File::open("examples/d3d12/compute/compute.hlsl").unwrap(); let mut reader = BufReader::new(file); let mut data = Vec::new(); reader.read_to_end(&mut data).unwrap(); let cs_bin = d3d::compile( &data, Some("compute.hlsl"), None, None, "cs_main", "cs_5_0", Some(d3d::CompileFlags::Debug), None, ) .unwrap(); device .create_compute_pipeline_state( &d3d12::ComputePipelineStateDesc::new() .root_signature(&root_signature) .cs((&cs_bin).into()), ) .unwrap() }; let descriptor_heap = device .create_descriptor_heap::<d3d12::DescriptorHeap>( &d3d12::DescriptorHeapDesc::new() .heap_type(d3d12::DescriptorHeapType::CBVSRVUAV) .num_descriptors(1) .flags(d3d12::DescriptorHeapFlags::ShaderVisible), ) .unwrap(); let buffer = device .create_committed_resource::<d3d12::Resource>( &d3d12::HeapProperties::new().heap_type(d3d12::HeapType::Default), None, &d3d12::ResourceDesc::new() .dimension(d3d12::ResourceDimension::Buffer) .width(256) .height(1) .format(dxgi::Format::Unknown) .layout(d3d12::TextureLayout::RowMajor) .flags(d3d12::ResourceFlags::AllowUnorderedAccess), d3d12::ResourceStates::UnorderedAccess, None, ) .unwrap(); unsafe { let handle = descriptor_heap.get_cpu_descriptor_handle_for_heap_start(); device.create_unordered_access_view( Some(&buffer), None, Some(&d3d12::UnorderedAccessViewDesc::Buffer { format: dxgi::Format::Unknown, first_element: 0, num_elements: 64, structure_byte_stride: 4, counter_offset_in_bytes: 0, flags: None, }), handle, ); } let upload_buffer = device .create_committed_resource::<d3d12::Resource>( &d3d12::HeapProperties::new().heap_type(d3d12::HeapType::Upload), None, &d3d12::ResourceDesc::new() .dimension(d3d12::ResourceDimension::Buffer) .width(256) .height(1) .format(dxgi::Format::Unknown) .layout(d3d12::TextureLayout::RowMajor), d3d12::ResourceStates::GenericRead, None, ) .unwrap(); unsafe { let data = upload_buffer.map(0, None).unwrap(); let a = (0..10).collect::<Vec<_>>(); std::ptr::copy_nonoverlapping( a.as_ptr() as *const u8, data.as_mut_ptr() as *mut u8, (std::mem::size_of::<u32>() * 10) as usize, ); upload_buffer.unmap(0, None); } let readback_buffer = device .create_committed_resource::<d3d12::Resource>( &d3d12::HeapProperties::new().heap_type(d3d12::HeapType::ReadBack), None, &d3d12::ResourceDesc::new() .dimension(d3d12::ResourceDimension::Buffer) .width(256) .height(1) .format(dxgi::Format::Unknown) .layout(d3d12::TextureLayout::RowMajor), d3d12::ResourceStates::CopyDest, None, ) .unwrap(); cmd_allocator.reset().unwrap(); cmd_list.reset(&cmd_allocator, Some(&pipeline)).unwrap(); cmd_list.resource_barrier(&[d3d12::ResourceBarrier::Transition { flags: None, resource: &buffer, subresource: d3d12::RESOURCE_BARRIER_ALL_SUBRESOURCES, state_before: d3d12::ResourceStates::UnorderedAccess, state_after: d3d12::ResourceStates::CopyDest, }]); cmd_list.copy_resource(&buffer, &upload_buffer); cmd_list.resource_barrier(&[d3d12::ResourceBarrier::Transition { flags: None, resource: &buffer, subresource: d3d12::RESOURCE_BARRIER_ALL_SUBRESOURCES, state_before: d3d12::ResourceStates::CopyDest, state_after: d3d12::ResourceStates::UnorderedAccess, }]); cmd_list.close().unwrap(); cmd_queue.execute_command_lists(&[cmd_list.as_command_list()]); cmd_queue.signal(&fence, 1).unwrap(); if fence.get_completed_value() < 1 { fence.set_event_on_completion(1, &event).unwrap(); event.wait(None); } cmd_allocator.reset().unwrap(); cmd_list.reset(&cmd_allocator, Some(&pipeline)).unwrap(); cmd_list.set_descriptor_heaps(&[&descriptor_heap]); cmd_list.set_compute_root_signature(&root_signature); cmd_list.set_compute_root_descriptor_table( 0, descriptor_heap.get_gpu_descriptor_handle_for_heap_start(), ); cmd_list.dispatch(1, 1, 1); cmd_list.resource_barrier(&[d3d12::ResourceBarrier::Transition { flags: None, resource: &buffer, subresource: d3d12::RESOURCE_BARRIER_ALL_SUBRESOURCES, state_before: d3d12::ResourceStates::UnorderedAccess, state_after: d3d12::ResourceStates::CopySource, }]); cmd_list.copy_resource(&readback_buffer, &buffer); cmd_list.close().unwrap(); cmd_queue.execute_command_lists(&[cmd_list.as_command_list()]); cmd_queue.signal(&fence, 2).unwrap(); if fence.get_completed_value() < 2 { fence.set_event_on_completion(2, &event).unwrap(); event.wait(None); } let mut values: [u32; 10] = [0; 10]; unsafe { let data = readback_buffer .map(0, Some(d3d12::Range::new(0, 256))) .unwrap(); std::ptr::copy_nonoverlapping( data.as_ptr(), values.as_mut_ptr() as *mut u8, std::mem::size_of::<u32>() * 10, ); readback_buffer.unmap(0, None); } for i in &values { print!("{}, ", i); } println!(""); }
use dxplr::d3d::IBlob; use dxplr::d3d12::{ ICommandAllocator, ICommandQueue, IDebug, IDescriptorHeap, IDevice, IFence, IGraphicsCommandList, IResource, }; use dxplr::{d3d, d3d12, dxgi}; use std::fs::File; use std::io::{BufReader, Read};
fn main() { let _d3d12_debug = { let debug = d3d12::get_debug_interface::<d3d12::Debug>().unwrap(); debug.enable_debug_layer(); debug }; let device = d3d12::create_device::<d3d12::Device>(None, d3d::FeatureLevel(12, 0)).unwrap(); let cmd_allocator = device .create_command_allocator::<d3d12::CommandAllocator>(d3d12::CommandListType::Direct) .unwrap(); let cmd_queue = device .create_command_queue::<d3d12::CommandQueue>( &d3d12::CommandQueueDesc::new().list_type(d3d12::CommandListType::Direct), ) .unwrap(); let cmd_list = device .create_command_list::<d3d12::GraphicsCommandList>( 0, d3d12::CommandListType::Direct, &cmd_allocator, None, ) .unwrap(); cmd_list.close().unwrap(); let fence = device.create_fence::<d3d12::Fence>(0, None).unwrap(); let event = dxplr::EventHandle::new(); let root_signature = { let desc = d3d12::RootSignatureDesc { parameters: Some(vec![d3d12::RootParameter::DescriptorTable { descriptor_ranges: vec![d3d12::DescriptorRange { range_type: d3d12::DescriptorRangeType::UAV, num_descriptors: 1, base_shader_register: 0, register_space: 0, offset_in_descriptors_from_table_start: d3d12::DescriptorRange::OFFSET_APPEND, }], shader_visibility: d3d12::ShaderVisibility::All, }]), static_samplers: None, flags: None, }; let data = d3d12::serialize_root_signature(&desc, d3d::RootSignatureVersion(1, 0)).unwrap(); device.create_root_signature(0, data.as_slice()).unwrap() }; let pipeline = { let file = File::open("examples/d3d12/compute/compute.hlsl").unwrap(); let mut reader = BufReader::new(file); let mut data = Vec::new(); reader.read_to_end(&mut data).unwrap(); let cs_bin = d3d::compile( &data, Some("compute.hlsl"), None, None, "cs_main", "cs_5_0", Some(d3d::CompileFlags::Debug), None, ) .unwrap(); device .create_compute_pipeline_state( &d3d12::ComputePipelineStateDesc::new() .root_signature(&root_signature) .cs((&cs_bin).into()), ) .unwrap() }; let descriptor_heap = device .create_descriptor_heap::<d3d12::DescriptorHeap>( &d3d12::DescriptorHeapDesc::new() .heap_type(d3d12::DescriptorHeapType::CBVSRVUAV) .num_descriptors(1) .flags(d3d12::DescriptorHeapFlags::ShaderVisible), ) .unwrap(); let buffer = device .create_committed_resource::<d3d12::Resource>( &d3d12::HeapProperties::new().heap_type(d3d12::HeapType::Default), None, &d3d12::ResourceDesc::new() .dimension(d3d12::ResourceDimension::Buffer) .width(256) .height(1) .format(dxgi::Format::Unknown) .layout(d3d12::TextureLayout::RowMajor) .flags(d3d12::ResourceFlags::AllowUnorderedAccess), d3d12::ResourceStates::UnorderedAccess, None, ) .unwrap(); unsafe { let handle = descriptor_heap.get_cpu_descriptor_handle_for_heap_start(); device.create_unordered_access_view( Some(&buffer), None, Some(&d3d12::UnorderedAccessViewDesc::Buffer { format: dxgi::Format::Unknown, first_element: 0, num_elements: 64, structure_byte_stride: 4, counter_offset_in_bytes: 0, flags: None, }), handle, ); } let upload_buffer = device .create_committed_resource::<d3d12::Resource>( &d3d12::HeapProperties::new().heap_type(d3d12::HeapType::Upload), None, &d3d12::ResourceDesc::new() .dimension(d3d12::ResourceDimension::Buffer) .width(256) .height(1) .format(dxgi::Format::Unknown) .layout(d3d12::TextureLayout::RowMajor), d3d12::ResourceStates::GenericRead, None, ) .unwrap(); unsafe { let data = upload_buffer.map(0, None).unwrap(); let a = (0..10).collect::<Vec<_>>(); std::ptr::copy_nonoverlapping( a.as_ptr() as *const u8, data.as_mut_ptr() as *mut u8, (std::mem::size_of::<u32>() * 10) as usize, ); upload_buffer.unmap(0, None); } let readback_buffer = device .create_committed_resource::<d3d12::Resource>( &d3d12::HeapProperties::new().heap_type(d3d12::HeapType::ReadBack), None, &d3d12::ResourceDesc::new() .dimension(d3d12::ResourceDimension::Buffer) .width(256) .height(1) .format(dxgi::Format::Unknown) .layout(d3d12::TextureLayout::RowMajor), d3d12::ResourceStates::CopyDest, None, ) .unwrap(); cmd_allocator.reset().unwrap(); cmd_list.reset(&cmd_allocator, Some(&pipeline)).unwrap(); cmd_list.resource_barrier(&[d3d12::ResourceBarrier::Transition { flags: None, resource: &buffer, subresource: d3d12::RESOURCE_BARRIER_ALL_SUBRESOURCES, state_before: d3d12::ResourceStates::UnorderedAccess, state_after: d3d12::ResourceStates::CopyDest, }]); cmd_list.copy_resource(&buffer, &upload_buffer); cmd_list.resource_barrier(&[d3d12::ResourceBarrier::Transition { flags: None, resource: &buffer, subresource: d3d12::RESOURCE_BARRIER_ALL_SUBRESOURCES, state_before: d3d12::ResourceStates::CopyDest, state_after: d3d12::ResourceStates::UnorderedAccess, }]); cmd_list.close().unwrap(); cmd_queue.execute_command_lists(&[cmd_list.as_command_list()]); cmd_queue.signal(&fence, 1).unwrap(); if fence.get_completed_value() < 1 { fence.set_event_on_completion(1, &event).unwrap(); event.wait(None); } cmd_allocator.reset().unwrap(); cmd_list.reset(&cmd_allocator, Some(&pipeline)).unwrap(); cmd_list.set_descriptor_heaps(&[&descriptor_heap]); cmd_list.set_compute_root_signature(&root_signature); cmd_list.set_compute_root_descriptor_table( 0, descriptor_heap.get_gpu_descriptor_handle_for_heap_start(), ); cmd_list.dispatch(1, 1, 1); cmd_list.resource_barrier(&[d3d12::ResourceBarrier::Transition { flags: None, resource: &buffer, subresource: d3d12::RESOURCE_BARRIER_ALL_SUBRESOURCES, state_before: d3d12::ResourceStates::UnorderedAccess, state_after: d3d12::ResourceStates::CopySource, }]); cmd_list.copy_resource(&readback_buffer, &buffer); cmd_list.close().unwrap(); cmd_queue.execute_command_lists(&[cmd_list.as_command_list()]); cmd_queue.signal(&fence, 2).unwrap(); if fence.get_completed_value() < 2 { fence.set_event_on_completion(2, &event).unwrap(); event.wait(None); } let mut values: [u32; 10] = [0; 10]; unsafe { let data = readback_buffer .map(0, Some(d3d12::Range::new(0, 256))) .unwrap(); std::ptr::copy_nonoverlapping( data.as_ptr(), values.as_mut_ptr() as *mut u8, std::mem::size_of::<u32>() * 10, ); readback_buffer.unmap(0, None); } for i in &values { print!("{}, ", i); } println!(""); }
function_block-full_function
[]
Rust
src/main.rs
satylogin/rss-update
5df10fa56ba515df06b6b94605f13b53de772415
pub(crate) mod config; pub(crate) mod display; pub(crate) mod feeds; pub(crate) mod readlist; use chrono::{DateTime, NaiveDate, Utc}; use clap::{App, Arg, ArgMatches}; use std::error::Error; use std::fs; use std::path::Path; pub(crate) fn base_dir() -> String { let base_path = Path::new(&dirs::home_dir().unwrap()).join(".rss-update-cli"); String::from(base_path.to_str().unwrap()) } const APP: &str = "rss-update"; const VERSION: &str = "0.1"; const ABOUT: &str = "To track and fetch updates on rss feeds."; const UNREAD: &str = "unread"; const UNREAD_ABOUT: &str = "Display contents of read list on terminal."; const ADD: &str = "add"; const ADD_ABOUT: &str = "Add new feed source to track."; const SETUP: &str = "setup"; const SETUP_ABOUT: &str = "Set up config for traking feeds."; const TRACKING: &str = "tracking"; const TRACKING_ABOUT: &str = "Lists feeds that are currently being tracked along with its metadata."; const REMOVE: &str = "remove"; const REMOVE_ABOUT: &str = "to remove feed from tracking"; const READ: &str = "read"; const READ_ABOUT: &str = "to mark post as read."; const USER_DATE_FORMAT: &str = "%Y-%m-%d"; fn parse_args() -> ArgMatches<'static> { App::new(APP) .version(VERSION) .about(ABOUT) .subcommand(App::new(UNREAD).about(UNREAD_ABOUT)) .subcommand( App::new(ADD) .about(ADD_ABOUT) .arg(Arg::from_usage( "--from [DATE] 'date to start tracking in YYYY-MM-DD (remember to pad with 0)'", )) .arg(Arg::from_usage("--feed [FEED] 'rss feed to track'").required(true)), ) .subcommand(App::new(SETUP).about(SETUP_ABOUT)) .subcommand(App::new(TRACKING).about(TRACKING_ABOUT)) .subcommand(App::new(REMOVE).about(REMOVE_ABOUT).arg( Arg::from_usage("--feed [FEED] `rss feed to remove from tracking.`").required(true), )) .subcommand( App::new(READ) .about(READ_ABOUT) .arg(Arg::from_usage("--post [URL] `post url to mark as read.`").required(true)), ) .get_matches() } fn unread() -> Result<(), Box<dyn Error>> { display::display_feeds(readlist::unread()?) } fn add_feed(args: &ArgMatches<'_>) -> Result<(), Box<dyn Error>> { let tracking_date = args.value_of("from").map_or(Utc::now(), |d| { let d = NaiveDate::parse_from_str(d, USER_DATE_FORMAT).ok().unwrap(); DateTime::from_utc(d.and_hms(0, 0, 0), Utc) }); config::update(config::Config { feed: args.value_of("feed").unwrap().to_string(), updated: Some(tracking_date), })?; Ok(()) } fn setup() -> Result<(), Box<dyn Error>> { fs::create_dir_all(base_dir())?; config::setup()?; readlist::setup()?; Ok(()) } fn tracking() -> Result<(), Box<dyn Error>> { display::display_configs(config::get()?) } fn remove_feed(args: &ArgMatches<'_>) -> Result<(), Box<dyn Error>> { let feed = args.value_of("feed").unwrap().to_string(); config::remove(&feed)?; Ok(()) } fn mark_read(args: &ArgMatches<'_>) -> Result<(), Box<dyn Error>> { let post = args.value_of("post").unwrap().to_string(); readlist::mark_read(&post)?; Ok(()) } async fn fetch_new_feeds() -> Result<(), Box<dyn Error>> { let configs = config::get()?; let conext = feeds::feeds_and_config(configs, Utc::now()).await?; let readlist = readlist::update(conext.feeds)?; config::replace(conext.configs)?; display::display_feeds(readlist) } #[tokio::main] async fn main() -> Result<(), Box<dyn Error>> { let args = parse_args(); match args.subcommand() { (UNREAD, Some(_)) => unread(), (ADD, Some(s_args)) => add_feed(s_args), (SETUP, Some(_)) => setup(), (TRACKING, Some(_)) => tracking(), (REMOVE, Some(s_args)) => remove_feed(s_args), (READ, Some(s_args)) => mark_read(s_args), _ => fetch_new_feeds().await, } }
pub(crate) mod config; pub(crate) mod display; pub(crate) mod feeds; pub(crate) mod readlist; use chrono::{DateTime, NaiveDate, Utc}; use clap::{App, Arg, ArgMatches}; use std::error::Error; use std::fs; use std::path::Path; pub(crate) fn base_dir() -> String { let base_path = Path::new(&dirs::home_dir().unwrap()).join(".rss-update-cli"); String::from(base_path.to_str().unwrap()) } const APP: &str = "rss-update"; const VERSION: &str = "0.1"; const ABOUT: &str = "To track and fetch updates on rss feeds."; const UNREAD: &str = "unread"; const UNREAD_ABOUT: &str = "Display contents of read list on terminal."; const ADD: &str = "add"; const ADD_ABOUT: &str = "Add new feed source to track."; const SETUP: &str = "setup"; const SETUP_ABOUT: &str = "Set up config for traking feeds."; const TRACKING: &str = "tracking"; const TRACKING_ABOUT: &str = "Lists feeds that are currently being tracked along with its metadata."; const REMOVE: &str = "remove"; const REMOVE_ABOUT: &str = "to remove feed from tracking"; const READ: &str = "read"; const READ_ABOUT: &str = "to mark post as read."; const USER_DATE_FORMAT: &str = "%Y-%m-%d"; fn parse_args() -> ArgMatches<'static> { App::new(APP) .version(VERSION) .about(ABOUT) .subcommand(App::new(UNREAD).about(UNREAD_ABOUT)) .subcommand( App::new(ADD) .about(ADD_ABOUT) .arg(Arg::from_usage( "--from [DATE] 'date to start tracking in YYYY-MM-DD (remember to pad with 0)'", )) .arg(Arg::from_usage("--feed [FEED] 'rss feed to track'").required(true)), ) .subcommand(App::new(SETUP).about(SETUP_ABOUT)) .subcommand(App::new(TRACKING).about(TRACKING_ABOUT)) .subcommand(App::new(REMOVE).about(REMOVE_ABOUT).arg( Arg::from_usage("--feed [FEED] `rss feed to remove from tracking.`").required(true), )) .subcommand( App::new(READ) .about(READ_ABOUT) .arg(Arg::from_usage("--post [URL] `post url to mark as read.`").required(true)), ) .get_matches() } fn unread() -> Result<(), Box<dyn Error>> { display::display_feeds(readlist::unread()?) } fn add_feed(args: &ArgMatches<'_>) -> Result<(), Box<dyn Error>> { let tracking_date = args.value_of("from").map_or(Utc::now(), |d| { let d = NaiveDate::parse_from_str(d, USER_DATE_FORMAT).ok().unwrap(); DateTime::from_utc(d.and_hms(0, 0, 0), Utc) }); config::update(config::Config { feed: args.value_of("feed").unwrap().to_string(), updated: Some(tracking_date), })?; Ok(()) } fn setup() -> Result<(), Box<dyn Error>> { fs::create_dir_all(base_dir())?; config::setup()?; readlist::setup()?; Ok(()) } fn tracking() -> Result<(), Box<dyn Error>> { display::display_configs(config::get()?) } fn remove_feed(args: &ArgMatches<'_>) -> Result<(), Box<dyn Error>> { let feed = args.value_of("feed").unwrap().to_string(); config::remove(&feed)?; Ok(()) } fn mark_read(args: &ArgMatches<'_>) -> Result<(), Box<dyn Error>> { let post = args.value_of("post").unwrap().to_string(); readlist::mark_read(&post)?; Ok(()) } async fn fetch_new_feeds() -> Result<(), Box<dyn Error>> { let configs = config::get()?; let conext = feeds::feeds_and_config(configs, Utc::now()).await?; let readlist = readlist::update(conext.feeds)?; config::replace(conext.configs)?; display::display_feeds(readlist) } #[tokio::main]
async fn main() -> Result<(), Box<dyn Error>> { let args = parse_args(); match args.subcommand() { (UNREAD, Some(_)) => unread(), (ADD, Some(s_args)) => add_feed(s_args), (SETUP, Some(_)) => setup(), (TRACKING, Some(_)) => tracking(), (REMOVE, Some(s_args)) => remove_feed(s_args), (READ, Some(s_args)) => mark_read(s_args), _ => fetch_new_feeds().await, } }
function_block-full_function
[ { "content": "fn _mark_read(mut readlist: ReadList, post: &str) -> ReadList {\n\n for to_read in readlist.values_mut() {\n\n *to_read = to_read\n\n .iter()\n\n .filter(|p| **p != post)\n\n .map(std::clone::Clone::clone)\n\n .collect::<Vec<_>>();\n\n }\n\n...
Rust
weechat/src/hooks/modifier.rs
troethe/rust-weechat
8533abf0e000659f567e404d3c8aa0d773eff685
use libc::c_char; use std::{borrow::Cow, ffi::CStr, os::raw::c_void, ptr}; use weechat_sys::{t_gui_buffer, t_weechat_plugin}; use super::Hook; use crate::{buffer::Buffer, LossyCString, Weechat}; #[cfg_attr(feature = "docs", doc(cfg(unsound)))] pub struct ModifierHook { _hook: Hook, _hook_data: Box<ModifierHookData>, } struct ModifierHookData { callback: Box<dyn ModifierCallback>, weechat_ptr: *mut t_weechat_plugin, } pub enum ModifierData<'a> { String(Cow<'a, str>), Buffer(Buffer<'a>), } impl<'a> ModifierData<'a> { fn pointer_is_buffer(modifier_name: &str) -> bool { if modifier_name.starts_with("bar_condition_") { true } else { matches!( modifier_name, "bar_condition_yyy" | "history_add" | "input_text_content" | "input_text_display" | "input_text_display_with_cursor" | "input_text_for_buffer" ) } } fn from_name( weechat: &'a Weechat, modifier_name: &str, data: *const c_char, ) -> Option<ModifierData<'a>> { if data.is_null() { return None; } let modifier_data = unsafe { CStr::from_ptr(data).to_string_lossy() }; if ModifierData::pointer_is_buffer(modifier_name) { if modifier_data.len() < 2 || !modifier_data.starts_with("0x") { None } else { let ptr = u64::from_str_radix(&modifier_data[2..], 16).ok()?; Some(ModifierData::Buffer( weechat.buffer_from_ptr(ptr as *mut t_gui_buffer), )) } } else { Some(ModifierData::String(modifier_data)) } } } pub trait ModifierCallback { fn callback( &mut self, weechat: &Weechat, modifier_name: &str, data: Option<ModifierData>, string: Cow<str>, ) -> Option<String>; } impl<T: FnMut(&Weechat, &str, Option<ModifierData>, Cow<str>) -> Option<String> + 'static> ModifierCallback for T { fn callback( &mut self, weechat: &Weechat, modifier_name: &str, data: Option<ModifierData>, string: Cow<str>, ) -> Option<String> { self(weechat, modifier_name, data, string) } } impl ModifierHook { #[cfg_attr(feature = "docs", doc(cfg(unsound)))] pub fn new(modifier_name: &str, callback: impl ModifierCallback + 'static) -> Result<Self, ()> { unsafe extern "C" fn c_hook_cb( pointer: *const c_void, _data: *mut c_void, modifier_name: *const c_char, modifier_data: *const c_char, string: *const c_char, ) -> *mut c_char { let hook_data: &mut ModifierHookData = { &mut *(pointer as *mut ModifierHookData) }; let cb = &mut hook_data.callback; let modifier_name = CStr::from_ptr(modifier_name).to_str().unwrap_or_default(); let string = if string.is_null() { Cow::from("") } else { CStr::from_ptr(string).to_string_lossy() }; let weechat = Weechat::from_ptr(hook_data.weechat_ptr); let data = ModifierData::from_name(&weechat, modifier_name, modifier_data); let modified_string = cb.callback(&weechat, modifier_name, data, string); if let Some(modified_string) = modified_string { let string_length = modified_string.len(); let modified_string = LossyCString::new(modified_string); let strndup = weechat.get().strndup.unwrap(); strndup(modified_string.as_ptr(), string_length as i32) } else { ptr::null_mut() } } Weechat::check_thread(); let weechat = unsafe { Weechat::weechat() }; let data = Box::new(ModifierHookData { callback: Box::new(callback), weechat_ptr: weechat.ptr, }); let data_ref = Box::leak(data); let hook_modifier = weechat.get().hook_modifier.unwrap(); let modifier_name = LossyCString::new(modifier_name); let hook_ptr = unsafe { hook_modifier( weechat.ptr, modifier_name.as_ptr(), Some(c_hook_cb), data_ref as *const _ as *const c_void, ptr::null_mut(), ) }; let hook_data = unsafe { Box::from_raw(data_ref) }; let hook = Hook { ptr: hook_ptr, weechat_ptr: weechat.ptr, }; if hook_ptr.is_null() { Err(()) } else { Ok(Self { _hook: hook, _hook_data: hook_data, }) } } }
use libc::c_char; use std::{borrow::Cow, ffi::CStr, os::raw::c_void, ptr}; use weechat_sys::{t_gui_buffer, t_weechat_plugin}; use super::Hook; use crate::{buffer::Buffer, LossyCString, Weechat}; #[cfg_attr(feature = "docs", doc(cfg(unsound)))] pub struct ModifierHook { _hook: Hook, _hook_data: Box<ModifierHookData>, } struct ModifierHookData { callback: Box<dyn ModifierCallback>, weechat_ptr: *mut t_weechat_plugin, } pub enum ModifierData<'a> { String(Cow<'a, str>), Buffer(Buffer<'a>), } impl<'a> ModifierData<'a> { fn pointer_is_buffer(modifier_name: &str) -> bool { if modifier_name.starts_with("bar_condition_") { true } else { matches!( modifier_name, "bar_condition_yyy" | "history_add" | "input_text_content" | "input_text_display" | "input_text_display_with_cursor" | "input_text_for_buffer" ) } } fn from_name( weechat: &'a Weechat, modifier_name: &str, data: *const c_char, ) -> Option<ModifierData<'a>> { if data.is_null() { return None; } let modifier_data = unsafe { CStr::from_ptr(data).to_string_lossy() }; if ModifierData::pointer_is_buffer(modifier_name) { if modifier_data.len() < 2 || !modifier_data.starts_with("0x") { None } else { let ptr = u64::from_str_radix(&modifier_data[2..], 16).ok()?; Some(ModifierData::Buffer( weechat.buffer_from_ptr(ptr as *mut t_gui_buffer), )) } } else { Some(ModifierData::String(modifier_data)) } } } pub trait ModifierCallback { fn callback( &mut self, weechat: &Weechat, modifier_name: &str, data: Option<ModifierData>, string: Cow<str>, ) -> Option<String>; } impl<T: FnMut(&Weechat, &str, Option<ModifierData>, Cow<str>) -> Option<String> + 'static> ModifierCallback for T { fn callback( &mut self, weechat: &Weechat, modifier_name: &str, data: Option<ModifierData>, string: Cow<str>, ) -> Option<String> { self(weechat, modifier_name, data, string) } } impl ModifierHook { #[cfg_attr(feature = "docs", doc(cfg(unsound)))] pub fn new(modifier_name: &str, callback: impl ModifierCallback + 'static) -> Result<Self, ()> { unsafe extern "C" fn c_hook_cb( pointer: *const c_void, _data: *mut c_void, modifier_name: *const c_char, modifier_data: *const c_char, string: *const c_char, ) -> *mut c_char { let hook_data: &mut ModifierHookData = { &mut *(pointer as *mut ModifierHookData) }; let cb = &mut hook_data.callback; let modifier_name = CStr::from_ptr(modifier_name).to_str().unwrap_or_default(); let string = if string.is_null() { Cow::from("") } else { CStr::from_ptr(string).to_string_lossy() }; let weechat = Weechat::from_ptr(hook_data.weechat_ptr); let data = ModifierData::from_name(&weechat, modifier_name, modifier_data); let modified_string = cb.callback(&weechat, modifier_name, data, string); if let Some(modified_string) = modified_string { let string_length = modified_string.len(); let modified_string = LossyCString::new(modified_string); let strndup = weechat.get().strndup.unwrap(); strndup(modified_string.as_ptr(), string_length as i32) } else { ptr::null_mut() } } Weechat::check_thread(); let weechat = unsaf
}
e { Weechat::weechat() }; let data = Box::new(ModifierHookData { callback: Box::new(callback), weechat_ptr: weechat.ptr, }); let data_ref = Box::leak(data); let hook_modifier = weechat.get().hook_modifier.unwrap(); let modifier_name = LossyCString::new(modifier_name); let hook_ptr = unsafe { hook_modifier( weechat.ptr, modifier_name.as_ptr(), Some(c_hook_cb), data_ref as *const _ as *const c_void, ptr::null_mut(), ) }; let hook_data = unsafe { Box::from_raw(data_ref) }; let hook = Hook { ptr: hook_ptr, weechat_ptr: weechat.ptr, }; if hook_ptr.is_null() { Err(()) } else { Ok(Self { _hook: hook, _hook_data: hook_data, }) } }
function_block-function_prefixed
[ { "content": "/// Trait for the bar item callback\n\n///\n\n/// A blanket implementation for pure `FnMut` functions exists, if data needs to\n\n/// be passed to the callback implement this over your struct.\n\npub trait BarItemCallback: 'static {\n\n /// The callback that should be called after the bar items...
Rust
src/widget/menu/menu_entry.rs
codec-abc/kas
5193ecd446e2147e7eb60e0081c1f66af049f244
use std::fmt::{self, Debug}; use super::Menu; use kas::class::{CloneText, HasBool, SetAccel}; use kas::draw::TextClass; use kas::event::VirtualKeyCodes; use kas::layout::{RulesSetter, RulesSolver}; use kas::prelude::*; use kas::widget::{AccelLabel, CheckBoxBare}; #[widget(config=noauto)] #[handler(handle=noauto)] #[derive(Clone, Debug, Default, Widget)] pub struct MenuEntry<M: Clone + Debug + 'static> { #[widget_core] core: kas::CoreData, keys: VirtualKeyCodes, label: PreparedText, underline: usize, label_off: Coord, msg: M, } impl<M: Clone + Debug + 'static> WidgetConfig for MenuEntry<M> { fn configure(&mut self, mgr: &mut Manager) { mgr.add_accel_keys(self.id(), &self.keys); } fn key_nav(&self) -> bool { true } } impl<M: Clone + Debug + 'static> Layout for MenuEntry<M> { fn size_rules(&mut self, size_handle: &mut dyn SizeHandle, axis: AxisInfo) -> SizeRules { let size = size_handle.menu_frame(); self.label_off = size.into(); let frame_rules = SizeRules::extract_fixed(axis.is_vertical(), size + size, Margins::ZERO); let text_rules = size_handle.text_bound(&mut self.label, TextClass::LabelSingle, axis); text_rules.surrounded_by(frame_rules, true) } fn set_rect(&mut self, rect: Rect, align: AlignHints) { self.core.rect = rect; self.label.update_env(|env| { env.set_bounds(rect.size.into()); env.set_align(align.unwrap_or(Align::Default, Align::Centre)); }); } fn draw(&self, draw_handle: &mut dyn DrawHandle, mgr: &event::ManagerState, disabled: bool) { draw_handle.menu_entry(self.core.rect, self.input_state(mgr, disabled)); let pos = self.core.rect.pos + self.label_off; if mgr.show_accel_labels() { draw_handle.text_with_underline( pos, Coord::ZERO, &self.label, TextClass::LabelSingle, self.underline, ); } else { draw_handle.text(pos, &self.label, TextClass::LabelSingle); } } } impl<M: Clone + Debug + 'static> MenuEntry<M> { pub fn new<S: Into<AccelString>>(label: S, msg: M) -> Self { let label = label.into(); let text = PreparedText::new_single(label.text().into()); let underline = label.underline(); let keys = label.take_keys(); MenuEntry { core: Default::default(), keys, label: text, underline, label_off: Coord::ZERO, msg, } } pub fn set_msg(&mut self, msg: M) { self.msg = msg; } } impl<M: Clone + Debug + 'static> CloneText for MenuEntry<M> { fn clone_text(&self) -> kas::text::RichText { self.label.clone_text() } } impl<M: Clone + Debug + 'static> SetAccel for MenuEntry<M> { fn set_accel_string(&mut self, label: AccelString) -> TkAction { let text = label.text().to_string(); self.keys = label.take_keys(); self.label.set_and_prepare(text) } } impl<M: Clone + Debug + 'static> event::Handler for MenuEntry<M> { type Msg = M; fn handle(&mut self, _: &mut Manager, event: Event) -> Response<M> { match event { Event::Activate => self.msg.clone().into(), event => Response::Unhandled(event), } } } impl<M: Clone + Debug> Menu for MenuEntry<M> {} #[handler(msg = M, generics = <> where M: From<VoidMsg>)] #[widget(config=noauto)] #[derive(Clone, Default, Widget)] pub struct MenuToggle<M: 'static> { #[widget_core] core: CoreData, layout_data: layout::FixedRowStorage<[SizeRules; 3], [u32; 2]>, #[widget] checkbox: CheckBoxBare<M>, #[widget] label: AccelLabel, } impl<M: 'static> Debug for MenuToggle<M> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "MenuToggle {{ core: {:?}, layout_data: {:?}, checkbox: {:?}, label: {:?} }}", self.core, self.layout_data, self.checkbox, self.label, ) } } impl<M: 'static> MenuToggle<M> { #[inline] pub fn new_on<T: Into<AccelString>, F>(f: F, label: T) -> Self where F: Fn(bool) -> M + 'static, { MenuToggle { core: Default::default(), layout_data: Default::default(), checkbox: CheckBoxBare::new_on(f), label: AccelLabel::new(label), } } #[inline] pub fn state(mut self, state: bool) -> Self { self.checkbox = self.checkbox.state(state); self } } impl MenuToggle<VoidMsg> { #[inline] pub fn new<T: Into<AccelString>>(label: T) -> Self { MenuToggle { core: Default::default(), layout_data: Default::default(), checkbox: CheckBoxBare::new(), label: AccelLabel::new(label), } } #[inline] pub fn on_toggle<M, F>(self, f: F) -> MenuToggle<M> where F: Fn(bool) -> M + 'static, { MenuToggle { core: self.core, layout_data: self.layout_data, checkbox: self.checkbox.on_toggle(f), label: self.label, } } } impl<M: 'static> WidgetConfig for MenuToggle<M> { fn configure(&mut self, mgr: &mut Manager) { mgr.add_accel_keys(self.checkbox.id(), self.label.keys()); } } impl<M: 'static> Layout for MenuToggle<M> { fn size_rules( &mut self, size_handle: &mut dyn SizeHandle, axis: AxisInfo, ) -> kas::layout::SizeRules { let mut solver = layout::RowSolver::new(axis, (kas::Right, 2usize), &mut self.layout_data); let child = &mut self.checkbox; solver.for_child(&mut self.layout_data, 0usize, |axis| { child.size_rules(size_handle, axis) }); let child = &mut self.label; solver.for_child(&mut self.layout_data, 1usize, |axis| { child.size_rules(size_handle, axis) }); solver.finish(&mut self.layout_data) } fn set_rect(&mut self, rect: Rect, align: AlignHints) { self.core.rect = rect; let mut setter = layout::RowSetter::<_, [u32; 2], _>::new( rect, (kas::Right, 2usize), align, &mut self.layout_data, ); let align = kas::AlignHints::NONE; self.checkbox.set_rect( setter.child_rect(&mut self.layout_data, 0usize), align.clone(), ); self.label .set_rect(setter.child_rect(&mut self.layout_data, 1usize), align); } fn find_id(&self, coord: Coord) -> Option<WidgetId> { if !self.rect().contains(coord) { return None; } Some(self.checkbox.id()) } fn draw(&self, draw_handle: &mut dyn DrawHandle, mgr: &event::ManagerState, disabled: bool) { let state = self.checkbox.input_state(mgr, disabled); draw_handle.menu_entry(self.core.rect, state); self.checkbox.draw(draw_handle, mgr, state.disabled); self.label.draw(draw_handle, mgr, state.disabled); } } impl<M: From<VoidMsg>> Menu for MenuToggle<M> {} impl<M: 'static> HasBool for MenuToggle<M> { #[inline] fn get_bool(&self) -> bool { self.checkbox.get_bool() } #[inline] fn set_bool(&mut self, state: bool) -> TkAction { self.checkbox.set_bool(state) } }
use std::fmt::{self, Debug}; use super::Menu; use kas::class::{CloneText, HasBool, SetAccel}; use kas::draw::TextClass; use kas::event::VirtualKeyCodes; use kas::layout::{RulesSetter, RulesSolver}; use kas::prelude::*; use kas::widget::{AccelLabel, CheckBoxBare}; #[widget(config=noauto)] #[handler(handle=noauto)] #[derive(Clone, Debug, Default, Widget)] pub struct MenuEntry<M: Clone + Debug + 'static> { #[widget_core] core: kas::CoreData, keys: VirtualKeyCodes, label: PreparedText, underline: usize, label_off: Coord, msg: M, } impl<M: Clone + Debug + 'static> WidgetConfig for MenuEntry<M> { fn configure(&mut self, mgr: &mut Manager) { mgr.add_accel_keys(self.id(), &self.keys); } fn key_nav(&self) -> bool { true } } impl<M: Clone + Debug + 'static> Layout for MenuEntry<M> { fn size_rules(&mut self, size_handle: &mut dyn SizeHandle, axis: AxisInfo) -> SizeRules { let size = size_handle.menu_frame(); self.label_off = size.into(); let frame_rules = SizeRules::extract_fixed(axis.is_vertical(), size + size, Margins::ZERO); let text_rules = size_handle.text_bound(&mut self.label, TextClass::LabelSingle, axis); text_rules.surrounded_by(frame_rules, true) } fn set_rect(&mut self, rect: Rect, align: AlignHints) { self.core.rect = rect; self.label.update_env(|env| { env.set_bounds(rect.size.into()); env.set_align(align.unwrap_or(Align::Default, Align::Centre)); }); } fn draw(&self, draw_handle: &mut dyn DrawHandle, mgr: &event::ManagerState, disabled: bool) { draw_handle.menu_entry(self.core.rect, self.input_state(mgr, disabled)); let pos = self.core.rect.pos + self.label_off; if mgr.show_accel_labels() { draw_handle.text_with_underline( pos, Coord::ZERO, &self.label, TextClass::LabelSingle, self.underline, ); } else { draw_handle.text(pos, &self.label, TextClass::LabelSingle); } } } impl<M: Clone + Debug + 'static> MenuEntry<M> { pub fn new<S: Into<AccelString>>(label: S, msg: M) -> Self { let label = label.into(); let text = PreparedText::new_single(label.text().into()); let underline = label.underline(); let keys = label.take_keys(); MenuEntry { core: Default::default(), keys, label: text, underline, label_off: Coord::ZERO, msg, } } pub fn set_msg(&mut self, msg: M) { self.msg = msg; } } impl<M: Clone + Debug + 'static> CloneText for MenuEntry<M> { fn clone_text(&self) -> kas::text::RichText { self.label.clone_text() } } impl<M: Clone + Debug + 'static> SetAccel for MenuEntry<M> { fn set_accel_string(&mut self, label: AccelString) -> TkAction { let text = label.text().to_string(); self.keys = label.take_keys(); self.label.set_and_prepare(text) } } impl<M: Clone + Debug + 'static> event::Handler for MenuEntry<M> { type Msg = M; fn handle(&mut self, _: &mut Manager, event: Event) -> Response<M> { match event { Event::Activate => self.msg.clone().into(), event => Response::Unhandled(event), } } } impl<M: Clone + Debug> Menu for MenuEntry<M> {} #[handler(msg = M, generics = <> where M: From<VoidMsg>)] #[widget(config=noauto)] #[derive(Clone, Default, Widget)] pub struct MenuToggle<M: 'static> { #[widget_core] core: CoreData, layout_data: layout::FixedRowStorage<[SizeRules; 3], [u32; 2]>, #[widget] checkbox: CheckBoxBare<M>, #[widget] label: AccelLabel, } impl<M: 'static> Debug for MenuToggle<M> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "MenuToggle {{ core: {:?}, layout_data: {:?}, checkbox: {:?}, label: {:?} }}", self.core, self.layout_data, self.checkbox, self.label, ) } } impl<M: 'static> MenuToggle<M> { #[inline] pub fn new_on<T: Into<AccelString>, F>(f: F, label: T) -> Self where F: Fn(bool) -> M + 'static, { MenuToggle { core: Default::default(), layout_data: Default::default(), checkbox: CheckBoxBare::new_on(f), label: AccelLabel::new(label), } } #[inline] pub fn state(mut self, state: bool) -> Self { self.checkbox = self.checkbox.state(state); self } } impl MenuToggle<VoidMsg> { #[inline]
#[inline] pub fn on_toggle<M, F>(self, f: F) -> MenuToggle<M> where F: Fn(bool) -> M + 'static, { MenuToggle { core: self.core, layout_data: self.layout_data, checkbox: self.checkbox.on_toggle(f), label: self.label, } } } impl<M: 'static> WidgetConfig for MenuToggle<M> { fn configure(&mut self, mgr: &mut Manager) { mgr.add_accel_keys(self.checkbox.id(), self.label.keys()); } } impl<M: 'static> Layout for MenuToggle<M> { fn size_rules( &mut self, size_handle: &mut dyn SizeHandle, axis: AxisInfo, ) -> kas::layout::SizeRules { let mut solver = layout::RowSolver::new(axis, (kas::Right, 2usize), &mut self.layout_data); let child = &mut self.checkbox; solver.for_child(&mut self.layout_data, 0usize, |axis| { child.size_rules(size_handle, axis) }); let child = &mut self.label; solver.for_child(&mut self.layout_data, 1usize, |axis| { child.size_rules(size_handle, axis) }); solver.finish(&mut self.layout_data) } fn set_rect(&mut self, rect: Rect, align: AlignHints) { self.core.rect = rect; let mut setter = layout::RowSetter::<_, [u32; 2], _>::new( rect, (kas::Right, 2usize), align, &mut self.layout_data, ); let align = kas::AlignHints::NONE; self.checkbox.set_rect( setter.child_rect(&mut self.layout_data, 0usize), align.clone(), ); self.label .set_rect(setter.child_rect(&mut self.layout_data, 1usize), align); } fn find_id(&self, coord: Coord) -> Option<WidgetId> { if !self.rect().contains(coord) { return None; } Some(self.checkbox.id()) } fn draw(&self, draw_handle: &mut dyn DrawHandle, mgr: &event::ManagerState, disabled: bool) { let state = self.checkbox.input_state(mgr, disabled); draw_handle.menu_entry(self.core.rect, state); self.checkbox.draw(draw_handle, mgr, state.disabled); self.label.draw(draw_handle, mgr, state.disabled); } } impl<M: From<VoidMsg>> Menu for MenuToggle<M> {} impl<M: 'static> HasBool for MenuToggle<M> { #[inline] fn get_bool(&self) -> bool { self.checkbox.get_bool() } #[inline] fn set_bool(&mut self, state: bool) -> TkAction { self.checkbox.set_bool(state) } }
pub fn new<T: Into<AccelString>>(label: T) -> Self { MenuToggle { core: Default::default(), layout_data: Default::default(), checkbox: CheckBoxBare::new(), label: AccelLabel::new(label), } }
function_block-full_function
[ { "content": "struct WidgetHeirarchy<'a>(&'a dyn WidgetConfig, usize);\n\nimpl<'a> fmt::Display for WidgetHeirarchy<'a> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n write!(\n\n f,\n\n \"\\n{}{}\\t{}\\tpos={:?}\\tsize={:?}\",\n\n \"- \".re...
Rust
src/sc-core/src/services/public_api/mod.rs
vinimin/fluvio
142c050a2f1aaa83aeda19705fedd670fffaf1a1
mod public_server; mod flv; mod kf; mod api { pub use super::flv::api_versions_req::*; pub use super::kf::metadata_req::*; pub use super::flv::create_topics_req::*; pub use super::flv::delete_topics_req::*; pub use super::flv::fetch_topics_req::*; pub use super::flv::topic_composition_req::*; pub use super::flv::create_custom_spus_req::*; pub use super::flv::delete_custom_spus_req::*; pub use super::flv::fetch_spu_req::*; pub use super::flv::create_spu_groups_req::*; pub use super::flv::delete_spu_groups_req::*; pub use super::flv::fetch_spu_groups_req::*; } use std::sync::Arc; use std::fmt::Debug; use log::info; use log::debug; use serde::Serialize; use serde::de::DeserializeOwned; use sc_api::PublicRequest; use sc_api::ScApiKey; use kf_service::KfApiServer; use public_server::PublicService; use k8_metadata::metadata::InputObjectMeta; use k8_metadata::metadata::InputK8Obj; use k8_metadata::metadata::K8List; use k8_metadata::metadata::Spec as K8Spec; use k8_metadata_client::MetadataClient; use crate::core::ShareLocalStores; use crate::metadata::K8WSUpdateService; use crate::core::LocalStores; pub type SharedPublicContext<C> = Arc<PublicContext<C>>; pub type PublicApiServer<C> = KfApiServer<PublicRequest, ScApiKey, SharedPublicContext<C>, PublicService<C>>; pub fn create_public_server<C>( metadata: ShareLocalStores, k8_ws: K8WSUpdateService<C>, namespace: String, ) -> PublicApiServer<C> where C: MetadataClient { let addr = metadata.config().public_endpoint.addr.clone(); info!("start public api service at: {}", addr); KfApiServer::new( addr, Arc::new(PublicContext { metadata, k8_ws, namespace, }), PublicService::new(), ) } #[derive(Clone)] pub struct PublicContext<C> { metadata: ShareLocalStores, k8_ws: K8WSUpdateService<C>, namespace: String, } impl <C>PublicContext<C> where C: MetadataClient { pub fn k8_client(&self) -> &C { self.k8_ws.client() } pub fn k8_ws(&self) -> &K8WSUpdateService<C> { &self.k8_ws } pub fn metadata(&self) -> &LocalStores { &self.metadata } pub async fn create<S>( &self, name: String, spec: S ) -> Result<(),C::MetadataClientError> where S: K8Spec + Serialize + Default + Debug + Clone + DeserializeOwned + Send, <S as K8Spec>::Status: Default + Debug + Serialize + DeserializeOwned + Send { debug!("creating k8 spec: {:#?}",spec); let input = InputK8Obj { api_version: S::api_version(), kind: S::kind(), metadata: InputObjectMeta { name, namespace: self.namespace.clone(), ..Default::default() }, spec, ..Default::default() }; let client = self.k8_ws.client(); client.apply(input).await?; Ok(()) } pub async fn delete<S>( &self, name: &str, ) -> Result<(),C::MetadataClientError> where S: K8Spec + Serialize + Default + Debug + Clone + DeserializeOwned , <S as K8Spec>::Status: Default + Debug + DeserializeOwned { debug!("deleting k8 obj: {}",name); let meta = InputObjectMeta { name: name.to_owned(), namespace: self.namespace.clone(), ..Default::default() }; let client = self.k8_ws.client(); client.delete_item::<S,_>(&meta).await?; Ok(()) } pub async fn retrieve_items<S>( &self ) -> Result<K8List<S,S::Status>, C::MetadataClientError> where S: K8Spec, K8List<S,S::Status>: DeserializeOwned, { let client = self.k8_ws.client(); client.retrieve_items::<S>(&self.namespace).await } }
mod public_server; mod flv; mod kf; mod api { pub use super::flv::api_versions_req::*; pub use super::kf::metadata_req::*; pub use super::flv::create_topics_req::*; pub use super::flv::delete_topics_req::*; pub use super::flv::fetch_topics_req::*; pub use super::flv::topic_composition_req::*; pub use super::flv::create_custom_spus_req::*; pub use super::flv::delete_custom_spus_req::*; pub use super::flv::fetch_spu_req::*; pub use super::flv::create_spu_groups_req::*; pub use super::flv::delete_spu_groups_req::*; pub use super::flv::fetch_spu_groups_req::*; } use std::sync::Arc; use std::fmt::Debug; use log::info; use log::debug; use serde::Serialize; use serde::de::DeserializeOwned; use sc_api::PublicRequest; use sc_api::ScApiKey; use kf_service::KfApiServer; use public_server::PublicService; use k8_metadata::metadata::InputObjectMeta; use k8_metadata::metadata::InputK8Obj; use k8_metadata::metadata::K8List; use k8_metadata::metadata::Spec as K8Spec; use k8_metadata_client::MetadataClient; use crate::core::ShareLocalStores; use crate::metadata::K8WSUpdateService; use crate::core::LocalStores; pub type SharedPublicContext<C> = Arc<PublicContext<C>>; pub type PublicApiServer<C> = KfApiServer<PublicRequest, ScApiKey, SharedPublicContext<C>, PublicService<C>>; pub fn create_public_server<C>( metadata: ShareLocalStores, k8_ws: K8WSUpdateService<C>, namespace: String, ) -> PublicApiServer<C> where C: MetadataClient { let addr = metadata.config().public_endpoint.addr.clone(); info!("start public api service at: {}", addr); KfApiServer::new( addr, Arc::new(PublicContext { metadata, k8_ws, namespace, }), PublicService::new(), ) } #[derive(Clone)] pub struct PublicContext<C> { metadata: ShareLocalStores, k8_ws: K8WSUpdateService<C>, namespace: String, } impl <C>PublicContext<C> where C: MetadataClient { pub fn k8_client(&self) -> &C { self.k8_ws.client() } pub fn k8_ws(&self) -> &K8WSUpdateService<C> { &self.k8_ws } pub fn metadata(&self) -> &LocalStores { &self.metadata } pub async fn create<S>( &self, name: String, spec: S ) -> Result<(),C::MetadataClientError> where S: K8Spec + Serialize + Default + Debug + Clone + DeserializeOwned + Send, <S as K8Spec>::Status: Default + Debug + Serialize + DeserializeOwned + Send { debug!("creating k8 spec: {:#?}",spec); let input = InputK8Obj { api_v
Ok(()) } pub async fn delete<S>( &self, name: &str, ) -> Result<(),C::MetadataClientError> where S: K8Spec + Serialize + Default + Debug + Clone + DeserializeOwned , <S as K8Spec>::Status: Default + Debug + DeserializeOwned { debug!("deleting k8 obj: {}",name); let meta = InputObjectMeta { name: name.to_owned(), namespace: self.namespace.clone(), ..Default::default() }; let client = self.k8_ws.client(); client.delete_item::<S,_>(&meta).await?; Ok(()) } pub async fn retrieve_items<S>( &self ) -> Result<K8List<S,S::Status>, C::MetadataClientError> where S: K8Spec, K8List<S,S::Status>: DeserializeOwned, { let client = self.k8_ws.client(); client.retrieve_items::<S>(&self.namespace).await } }
ersion: S::api_version(), kind: S::kind(), metadata: InputObjectMeta { name, namespace: self.namespace.clone(), ..Default::default() }, spec, ..Default::default() }; let client = self.k8_ws.client(); client.apply(input).await?;
random
[ { "content": "// start server\n\npub fn create_public_server(addr: SocketAddr, ctx: DefaultSharedGlobalContext) -> PublicApiServer\n\n{\n\n info!(\"starting SPU: {} at public service at: {}\", ctx.local_spu_id(),addr);\n\n\n\n KfApiServer::new(addr, ctx, PublicService::new())\n\n}\n", "file_path": "sr...
Rust
src/filter.rs
jos61404/ffm
e8afb9025a457013c05d5012069c416492aa4135
use crate::file::{isolation_extension, isolation_name}; use console::style; use indicatif::ProgressBar; use std::convert::TryInto; use std::path::*; #[derive(Debug)] pub struct FilterDataStruct { pub path: PathBuf, pub dir_name: String, pub file_name: String, } pub fn name( pb_filter_name: ProgressBar, pb_copy_file: &ProgressBar, filter_path: &String, paths: &Vec<PathBuf>, filters: &Vec<String>, ) -> std::io::Result<Vec<FilterDataStruct>> { let mut filter_data: Vec<FilterDataStruct> = Vec::new(); pb_filter_name.set_length(paths.len().try_into().unwrap()); for path in paths { let file_stem = path.file_stem().unwrap(); pb_filter_name.set_message(&format!( "{} 過濾名稱 : {:#?}", style("[4/6]").bold().dim(), file_stem )); pb_filter_name.inc(1); for filter in filters { let filter = filter.as_str(); let file_stem = file_stem.to_str().unwrap(); if filter == file_stem { let file_name = path.file_name().unwrap().to_str().unwrap(); filter_data.push(FilterDataStruct { path: path.to_owned(), dir_name: filter.to_string(), file_name: file_name.to_string(), }); isolation_name( &pb_filter_name, &pb_copy_file, filter_path, FilterDataStruct { path: path.to_owned(), dir_name: filter.to_string(), file_name: file_name.to_string(), }, )?; } } } pb_filter_name.set_message(&format!( "{} 過濾名稱 : {}", style("[4/6]").bold().dim(), "完成" )); pb_filter_name.finish(); return Ok(filter_data); } pub fn extension( pb_filter_extension: ProgressBar, pb_copy_file: &ProgressBar, filter_path: &String, paths: &Vec<PathBuf>, filters: &Vec<String>, ) -> std::io::Result<Vec<FilterDataStruct>> { let mut filter_data: Vec<FilterDataStruct> = Vec::new(); pb_filter_extension.set_length(paths.len().try_into().unwrap()); for path in paths { let file_stem = path.file_stem().unwrap(); pb_filter_extension.set_message(&format!( "{} 過濾副名 : {:#?}", style("[5/6]").bold().dim(), file_stem )); pb_filter_extension.inc(1); if path.exists() == true && path.extension().is_some() == true { let file_extension = path.extension().unwrap(); for filter in filters { let filter = filter.as_str(); let file_extension = file_extension.to_str().unwrap(); if filter == file_extension { filter_data.push(FilterDataStruct { path: path.to_owned(), dir_name: filter.to_string(), file_name: path.file_name().unwrap().to_str().unwrap().to_string(), }); isolation_extension( &pb_filter_extension, &pb_copy_file, filter_path, FilterDataStruct { path: path.to_owned(), dir_name: filter.to_string(), file_name: path.file_name().unwrap().to_str().unwrap().to_string(), }, )?; } } } } pb_filter_extension.set_message(&format!( "{} 過濾副名 : {}", style("[5/6]").bold().dim(), "完成" )); pb_filter_extension.finish(); return Ok(filter_data); }
use crate::file::{isolation_extension, isolation_name}; use console::style; use indicatif::ProgressBar; use std::convert::TryInto; use std::path::*; #[derive(Debug)] pub struct FilterDataStruct { pub path: PathBuf, pub dir_name: String, pub file_name: String, } pub fn name( pb_filter_name: ProgressBar, pb_copy_file: &ProgressBar, filter_path: &String, paths: &Vec<PathBuf>, filters: &Vec<String>, ) -> std::io::Result<Vec<FilterDataStruct>> { let mut filter_data: Vec<FilterDataStruct> = Vec::new(); pb_filter_name.set_length(paths.len().try_into().unwrap()); for path in paths { let file_stem = path.file_stem().unwrap(); pb_filter_name.set_message(&format!( "{} 過濾名稱 : {:#?}", style("[4/6]").bold().dim(), file_stem )); pb_filter_name.inc(1); for filter in filters { let filter = filter.as_str(); let file_stem = file_stem.to_str().unwrap(); if filter == file_stem { let file_name = path.file_name().unwrap().to_str().unwrap(); filter_data.push(FilterDataStruct { path: path.to_owned(), dir_name: filter.to_string(), file_name: file_name.to_string(), }); isolation_name( &pb_filter_name, &pb_copy_file, filter_path, FilterDataStruct { path: path.to_owned(), dir_name: filter.to_string(), file_name: file_name.to_string(), }, )?; } } } pb_filt
_message(&format!( "{} 過濾副名 : {}", style("[5/6]").bold().dim(), "完成" )); pb_filter_extension.finish(); return Ok(filter_data); }
er_name.set_message(&format!( "{} 過濾名稱 : {}", style("[4/6]").bold().dim(), "完成" )); pb_filter_name.finish(); return Ok(filter_data); } pub fn extension( pb_filter_extension: ProgressBar, pb_copy_file: &ProgressBar, filter_path: &String, paths: &Vec<PathBuf>, filters: &Vec<String>, ) -> std::io::Result<Vec<FilterDataStruct>> { let mut filter_data: Vec<FilterDataStruct> = Vec::new(); pb_filter_extension.set_length(paths.len().try_into().unwrap()); for path in paths { let file_stem = path.file_stem().unwrap(); pb_filter_extension.set_message(&format!( "{} 過濾副名 : {:#?}", style("[5/6]").bold().dim(), file_stem )); pb_filter_extension.inc(1); if path.exists() == true && path.extension().is_some() == true { let file_extension = path.extension().unwrap(); for filter in filters { let filter = filter.as_str(); let file_extension = file_extension.to_str().unwrap(); if filter == file_extension { filter_data.push(FilterDataStruct { path: path.to_owned(), dir_name: filter.to_string(), file_name: path.file_name().unwrap().to_str().unwrap().to_string(), }); isolation_extension( &pb_filter_extension, &pb_copy_file, filter_path, FilterDataStruct { path: path.to_owned(), dir_name: filter.to_string(), file_name: path.file_name().unwrap().to_str().unwrap().to_string(), }, )?; } } } } pb_filter_extension.set
random
[ { "content": "pub fn add(original_path: &Path, file_path: String, file_name: String) -> std::io::Result<()> {\n\n let mut log_path = String::from(&file_path);\n\n log_path.push_str(\"/log.txt\");\n\n\n\n let mut log_file = fs::OpenOptions::new()\n\n .read(true)\n\n .write(true)\n\n ...
Rust
crates/rune-cli/src/inspect.rs
ferrous-systems/rune
11d752759003ee2707efb904f596cbe8f9ee07a6
use std::{ collections::{BTreeMap, HashMap}, path::PathBuf, }; use anyhow::{Context, Error}; use build_info::BuildInfo; use hotg_rune_syntax::{ hir::{HirId, Rune, SourceKind}, yaml::{Type, Value}, }; use serde::{Serialize, Serializer}; use strum::VariantNames; use wasmparser::{Parser, Payload}; use crate::Format; #[derive(Debug, Clone, PartialEq, structopt::StructOpt)] pub struct Inspect { #[structopt( short, long, help = "The format to use when printing output", default_value = "text", possible_values = Format::VARIANTS, parse(try_from_str) )] format: Format, #[structopt(help = "The Rune to inspect", parse(from_os_str))] rune: PathBuf, } impl Inspect { pub fn execute(self) -> Result<(), Error> { let wasm = std::fs::read(&self.rune).with_context(|| { format!("Unable to read \"{}\"", self.rune.display()) })?; let meta = Metadata::from_wasm_binary(&wasm); match self.format { Format::Json => { let s = serde_json::to_string_pretty(&meta) .context("Unable to format the metadata as JSON")?; println!("{}", s); }, Format::Text => print_meta(&meta), } Ok(()) } } fn print_meta(meta: &Metadata) { if let Some(build_info) = &meta.rune_cli_build_info { let git = build_info .version_control .as_ref() .expect("The project uses version control") .git() .expect("The project uses git"); println!( "Compiled by: {} v{} ({} {})", build_info.crate_info.name, build_info.crate_info.version, git.commit_short_id, git.commit_timestamp.date().naive_utc(), ); } if let Some(SimplifiedRune { capabilities }) = &meta.simplified_rune { if !capabilities.is_empty() { print_capabilities(&capabilities); } } } fn print_capabilities(capabilities: &BTreeMap<String, SimplifiedCapability>) { println!("Capabilities:"); for (name, value) in capabilities { let SimplifiedCapability { capability_type, outputs, parameters, } = value; println!(" {} ({})", name, capability_type); if !outputs.is_empty() { println!(" Outputs:"); for output in outputs { println!(" - {}{:?}", output.name, output.dimensions); } } if !parameters.is_empty() { println!(" Parameters:"); for (key, value) in parameters { println!(" - {}: {:?}", key, value); } } } } #[derive(Debug, Default, Clone, serde::Serialize)] pub(crate) struct Metadata { rune_cli_build_info: Option<BuildInfo>, #[serde(skip)] rune: Option<Rune>, simplified_rune: Option<SimplifiedRune>, } impl Metadata { pub(crate) fn from_wasm_binary(wasm: &[u8]) -> Self { Metadata::from_custom_sections(wasm_custom_sections(wasm)) } fn from_custom_sections<'a>( sections: impl Iterator<Item = CustomSection<'a>>, ) -> Self { let mut meta = Metadata::default(); for section in sections { match section.name { hotg_rune_codegen::GRAPH_CUSTOM_SECTION => { match serde_json::from_slice(section.data) { Ok(rune) => { meta.simplified_rune = Some(SimplifiedRune::from_rune(&rune)); meta.rune = Some(rune); }, Err(e) => { log::warn!( "Unable to deserialize the Rune graph: {}", e ); }, } }, hotg_rune_codegen::VERSION_CUSTOM_SECTION => { match serde_json::from_slice(section.data) { Ok(v) => { meta.rune_cli_build_info = Some(v); }, Err(e) => { log::warn!( "Unable to deserialize the version: {}", e ); }, } }, _ => {}, } } meta } pub(crate) fn take_rune(&mut self) -> Option<Rune> { self.rune.take() } } #[derive(Debug, Clone, serde::Serialize)] struct SimplifiedRune { capabilities: BTreeMap<String, SimplifiedCapability>, } impl SimplifiedRune { fn from_rune(rune: &Rune) -> Self { let mut capabilities = BTreeMap::new(); for (&id, node) in &rune.stages { let name = rune.names[id].to_string(); let outputs = node .output_slots .iter() .map(|slot| rune.slots[slot].element_type) .map(|type_id| resolve_type(&rune, type_id)) .collect(); match &node.stage { hotg_rune_syntax::hir::Stage::Source( hotg_rune_syntax::hir::Source { kind, parameters }, ) => { let kind = kind.clone(); let parameters = parameters.clone(); capabilities.insert( name, SimplifiedCapability { capability_type: kind, parameters, outputs, }, ); }, hotg_rune_syntax::hir::Stage::Sink(_) => {}, hotg_rune_syntax::hir::Stage::Model(_) => {}, hotg_rune_syntax::hir::Stage::ProcBlock(_) => {}, } } SimplifiedRune { capabilities } } } fn resolve_type(rune: &Rune, type_id: HirId) -> Type { let (primitive, dims) = match &rune.types[&type_id] { hotg_rune_syntax::hir::Type::Primitive(p) => (p, vec![1]), hotg_rune_syntax::hir::Type::Buffer { underlying_type, dimensions, } => match &rune.types[underlying_type] { hotg_rune_syntax::hir::Type::Primitive(p) => { (p, dimensions.clone()) }, _ => unreachable!(), }, hotg_rune_syntax::hir::Type::Unknown | hotg_rune_syntax::hir::Type::Any => { unreachable!("All types should have been resolved") }, }; Type { name: primitive.rust_name().to_string(), dimensions: dims, } } #[derive(Debug, Clone, serde::Serialize)] struct SimplifiedCapability { #[serde(serialize_with = "serialize_source_kind")] capability_type: SourceKind, outputs: Vec<Type>, parameters: HashMap<String, Value>, } fn serialize_source_kind<S: Serializer>( kind: &SourceKind, ser: S, ) -> Result<S::Ok, S::Error> { kind.to_string().serialize(ser) } fn wasm_custom_sections( wasm: &[u8], ) -> impl Iterator<Item = CustomSection<'_>> + '_ { Parser::default() .parse_all(wasm) .filter_map(Result::ok) .filter_map(|payload| match payload { Payload::CustomSection { name, data, .. } => { Some(CustomSection { name, data }) }, _ => None, }) } #[derive(Debug, Copy, Clone, PartialEq)] struct CustomSection<'a> { name: &'a str, data: &'a [u8], }
use std::{ collections::{BTreeMap, HashMap}, path::PathBuf, }; use anyhow::{Context, Error}; use build_info::BuildInfo; use hotg_rune_syntax::{ hir::{HirId, Rune, SourceKind}, yaml::{Type, Value}, }; use serde::{Serialize, Serializer}; use strum::VariantNames; use wasmparser::{Parser, Payload}; use crate::Format; #[derive(Debug, Clone, PartialEq, structopt::StructOpt)] pub struct Inspect { #[structopt( short, long, help = "The format to use when printing output", default_value = "text", possible_values = Format::VARIANTS, parse(try_from_str) )] format: Format, #[structopt(help = "The Rune to inspect", parse(from_os_str))] rune: PathBuf, } impl Inspect { pub fn execute(self) -> Result<(), Error> { let wasm = std::fs::read(&self.rune).with_context(|| { format!("Unable to read \"{}\"", self.rune.display()) })?; let meta = Metadata::from_wasm_binary(&wasm); match self.format { Format::Json => { let s = serde_json::to_string_pretty(&meta) .context("Unable to format the metadata as JSON")?; println!("{}", s); }, Format::Text => print_meta(&meta), } Ok(()) } } fn print_meta(meta: &Metadata) { if let Some(build_info) = &meta.rune_cli_build_info { let git = build_info .version_control .as_ref() .expect("The project uses version control") .git() .expect("The project uses git"); println!( "Compiled by: {} v{} ({} {})", build_info.crate_info.name, build_info.crate_info.version, git.commit_short_id, git.commit_timestamp.date().naive_utc(), ); } if let Some(SimplifiedRune { capabilities }) = &meta.simplified_rune { if !capabilities.is_empty() { print_capabilities(&capabilities); } } } fn print_capabilities(capabilities: &BTreeMap<String, SimplifiedCapability>) { println!("Capabilities:"); for (name, value) in capabilities { let SimplifiedCapability { capability_type, outputs, parameters, } = value; println!(" {} ({})", name, capability_type); if !outputs.is_empty() { println!(" Outputs:"); for output in outputs { println!(" - {}{:?}", output.name, output.dimensions); } } if !parameters.is_empty() { println!(" Parameters:"); for (key, value) in parameters { println!(" - {}: {:?}", key, value); } } } } #[derive(Debug, Default, Clone, serde::Serialize)] pub(crate) struct Metadata { rune_cli_build_info: Option<BuildInfo>, #[serde(skip)] rune: Option<Rune>, simplified_rune: Option<SimplifiedRune>, } impl Metadata { pub(crate) fn from_wasm_binary(wasm: &[u8]) -> Self { Metadata::from_custom_sections(wasm_custom_sections(wasm)) } fn from_custom_sections<'a>( sections: impl Iterator<Item = CustomSection<'a>>, ) -> Self { let mut meta = Metadata::default(); for section in sections { match section.name { hotg_rune_codegen::GRAPH_CUSTOM_SECTION => { match serde_json::from_slice(section.data) { Ok(rune) => { meta.simplified_rune = Some(SimplifiedRune::from_rune(&rune)); meta.rune = Some(rune); }, Err(e) => { log::warn!( "Unable to deserialize the Rune graph: {}", e ); }, } }, hotg_rune_codegen::VERSION_CUSTOM_SECTION => { match serde_json::from_slice(section.data) { Ok(v) => { meta.rune_cli_build_info = Some(v); }, Err(e) => { log::warn!( "Unable to deserialize the version: {}", e ); }, } }, _ => {}, } } meta } pub(crate) fn take_rune(&mut self) -> Option<Rune> { self.rune.take() } } #[derive(Debug, Clone, serde::Serialize)] struct SimplifiedRune { capabilities: BTreeMap<String, SimplifiedCapability>, } impl SimplifiedRune { fn from_rune(rune: &Rune) -> Self { let mut capabilities = BTreeMap::new(); for (&id, node) in &rune.stages { let name = rune.names[id].to_string(); let outputs = node .output_slots .iter() .map(|slot| rune.slots[slot].element_type) .map(|type_id| resolve_type(&rune, type_id)) .collect(); match &node.stage { hotg_rune_syntax::hir::Stage::Source( hotg_rune_syntax::hir::Source { kind, parameters }, ) => { let kind = kind.clone(); let parameters = parameters.clone(); capabilities.insert( name, SimplifiedCapability { capability_type: kind, parameter
} fn resolve_type(rune: &Rune, type_id: HirId) -> Type { let (primitive, dims) = match &rune.types[&type_id] { hotg_rune_syntax::hir::Type::Primitive(p) => (p, vec![1]), hotg_rune_syntax::hir::Type::Buffer { underlying_type, dimensions, } => match &rune.types[underlying_type] { hotg_rune_syntax::hir::Type::Primitive(p) => { (p, dimensions.clone()) }, _ => unreachable!(), }, hotg_rune_syntax::hir::Type::Unknown | hotg_rune_syntax::hir::Type::Any => { unreachable!("All types should have been resolved") }, }; Type { name: primitive.rust_name().to_string(), dimensions: dims, } } #[derive(Debug, Clone, serde::Serialize)] struct SimplifiedCapability { #[serde(serialize_with = "serialize_source_kind")] capability_type: SourceKind, outputs: Vec<Type>, parameters: HashMap<String, Value>, } fn serialize_source_kind<S: Serializer>( kind: &SourceKind, ser: S, ) -> Result<S::Ok, S::Error> { kind.to_string().serialize(ser) } fn wasm_custom_sections( wasm: &[u8], ) -> impl Iterator<Item = CustomSection<'_>> + '_ { Parser::default() .parse_all(wasm) .filter_map(Result::ok) .filter_map(|payload| match payload { Payload::CustomSection { name, data, .. } => { Some(CustomSection { name, data }) }, _ => None, }) } #[derive(Debug, Copy, Clone, PartialEq)] struct CustomSection<'a> { name: &'a str, data: &'a [u8], }
s, outputs, }, ); }, hotg_rune_syntax::hir::Stage::Sink(_) => {}, hotg_rune_syntax::hir::Stage::Model(_) => {}, hotg_rune_syntax::hir::Stage::ProcBlock(_) => {}, } } SimplifiedRune { capabilities } }
function_block-function_prefixed
[ { "content": "fn generate_graph(w: &mut dyn Write, rune: &Rune) -> Result<(), Error> {\n\n writeln!(w, \"digraph {{\")?;\n\n writeln!(w, \" rankdir=TD;\")?;\n\n writeln!(w, \" node [shape=plaintext];\")?;\n\n\n\n declare_nodes(w, &rune.stages, &rune.names)?;\n\n declare_edges(w, &rune)?;\n\n\n\...
Rust
src/helpers.rs
jmagnuson/uds
e35887f3468d5917c4a36b8a6936732efeb2b9ee
/* See each function for copyright holders */ use std::os::unix::io::{RawFd, AsRawFd, IntoRawFd}; use std::io::{self, ErrorKind}; use std::mem; use libc::{c_int, sockaddr, socklen_t, AF_UNIX}; use libc::{bind, connect, getsockname, getpeername}; use libc::{socket, accept, close, listen, socketpair}; use libc::{ioctl, FIONBIO, FIOCLEX, FIONCLEX}; use libc::{fcntl, F_DUPFD_CLOEXEC, EINVAL, dup}; #[cfg(any(target_os="illumos", target_os="solaris"))] use libc::{F_GETFD, F_SETFD, FD_CLOEXEC}; #[cfg(not(any(target_vendor="apple", target_os="illumos", target_os="solaris")))] use libc::{SOCK_CLOEXEC, SOCK_NONBLOCK}; #[cfg(not(any(target_vendor="apple", target_os="netbsd", target_os="illumos", target_os="solaris")))] use libc::{accept4, ENOSYS}; #[cfg(target_vendor="apple")] use libc::{setsockopt, SOL_SOCKET, SO_NOSIGPIPE, c_void}; use crate::addr::*; const LISTEN_BACKLOG: c_int = 10; #[cfg(not(target_vendor="apple"))] pub use libc::MSG_NOSIGNAL; #[cfg(target_vendor="apple")] pub const MSG_NOSIGNAL: c_int = 0; pub fn set_cloexec(fd: RawFd, close_on_exec: bool) -> Result<(), io::Error> { let op = if close_on_exec {FIOCLEX} else {FIONCLEX}; match cvt!(unsafe { ioctl(fd, op) }) { Ok(_) => Ok(()), #[cfg(any(target_os="illumos", target_os="solaris"))] Err(ref e) if e.kind() == ErrorKind::InvalidInput => { unsafe { let prev = cvt!(fcntl(fd, F_GETFD))?; let change_to = if close_on_exec {prev | FD_CLOEXEC} else {prev & !FD_CLOEXEC}; if change_to != prev { cvt!(fcntl(fd, F_SETFD, change_to))?; } Ok(()) } }, Err(e) => Err(e), } } pub fn set_nonblocking(fd: RawFd, nonblocking: bool) -> Result<(), io::Error> { cvt!(unsafe { ioctl(fd, FIONBIO, &mut (nonblocking as c_int)) })?; Ok(()) } type SetSide = unsafe extern "C" fn(RawFd, *const sockaddr, socklen_t) -> c_int; unsafe fn set_unix_addr(socket: RawFd, set_side: SetSide, addr: &UnixSocketAddr) -> Result<(), io::Error> { let (addr, len) = addr.as_raw_general(); loop { if set_side(socket, addr, len) != -1 { break Ok(()); } let err = io::Error::last_os_error(); if err.kind() != ErrorKind::Interrupted { break Err(err); } } } pub fn bind_to(socket: RawFd, addr: &UnixSocketAddr) -> Result<(), io::Error> { unsafe { set_unix_addr(socket, bind, addr) } } pub fn connect_to(socket: RawFd, addr: &UnixSocketAddr) -> Result<(), io::Error> { unsafe { set_unix_addr(socket, connect, addr) } } type GetSide = unsafe extern "C" fn(RawFd, *mut sockaddr, *mut socklen_t) -> c_int; unsafe fn get_unix_addr(socket: RawFd, get_side: GetSide) -> Result<UnixSocketAddr, io::Error> { UnixSocketAddr::new_from_ffi(|addr_ptr, addr_len| { match get_side(socket, addr_ptr, addr_len) { -1 => Err(io::Error::last_os_error()), _ => Ok(()), } }).map(|((), addr)| addr ) } pub fn local_addr(socket: RawFd) -> Result<UnixSocketAddr, io::Error> { unsafe { get_unix_addr(socket, getsockname) } } pub fn peer_addr(socket: RawFd) -> Result<UnixSocketAddr, io::Error> { unsafe { get_unix_addr(socket, getpeername) } } pub struct Socket(RawFd); impl Drop for Socket { fn drop(&mut self) { unsafe { close(self.0) }; } } impl IntoRawFd for Socket { fn into_raw_fd(self) -> RawFd { let fd = self.0; mem::forget(self); fd } } impl AsRawFd for Socket { fn as_raw_fd(&self) -> RawFd { self.0 } } impl Socket { fn set_nosigpipe(&self, nosigpipe: bool) -> Result<(), io::Error> { #![allow(unused_variables)] #[cfg(target_vendor="apple")] { unsafe { let nosigpipe = &(nosigpipe as c_int) as *const c_int as *const c_void; let int_size = mem::size_of::<c_int>() as socklen_t; cvt!(setsockopt(self.0, SOL_SOCKET, SO_NOSIGPIPE, nosigpipe, int_size))?; } } Ok(()) } pub fn new(socket_type: c_int, nonblocking: bool) -> Result<Self, io::Error> { #[cfg(not(any(target_vendor="apple", target_os="illumos", target_os="solaris")))] { let type_flags = socket_type | SOCK_CLOEXEC | if nonblocking {SOCK_NONBLOCK} else {0}; match cvt!(unsafe { socket(AF_UNIX, type_flags, 0) }) { Ok(fd) => return Ok(Socket(fd)), Err(ref e) if e.raw_os_error() == Some(EINVAL) => {/*try without*/} Err(e) => return Err(e), } } let fd = cvt!(unsafe { socket(AF_UNIX, socket_type, 0) })?; let socket = Socket(fd); set_cloexec(socket.0, true)?; socket.set_nosigpipe(true)?; if nonblocking { set_nonblocking(socket.0, true)?; } Ok(socket) } pub fn accept_from(fd: RawFd, nonblocking: bool) -> Result<(Self, UnixSocketAddr), io::Error> { unsafe { UnixSocketAddr::new_from_ffi(|addr_ptr, len_ptr| { #[cfg(any( target_os="linux", target_os="android", target_os="freebsd", target_os="dragonfly", target_os="openbsd" ))] { let flags = SOCK_CLOEXEC | if nonblocking {SOCK_NONBLOCK} else {0}; match cvt_r!(accept4(fd, addr_ptr, len_ptr, flags)) { Ok(fd) => return Ok(Socket(fd)), Err(ref e) if e.raw_os_error() == Some(ENOSYS) => {/*try normal accept()*/}, Err(e) => return Err(e), } } let fd = cvt_r!(accept(fd, addr_ptr, len_ptr))?; let socket = Socket(fd); set_cloexec(socket.0, true)?; socket.set_nosigpipe(true)?; if nonblocking { set_nonblocking(socket.0, true)?; } Ok(socket) }) } } pub fn start_listening(&self) -> Result<(), io::Error> { cvt!(unsafe { listen(self.0, LISTEN_BACKLOG) }).map(|_| () ) } pub fn try_clone_from(fd: RawFd) -> Result<Self, io::Error> { match cvt!(unsafe { fcntl(fd, F_DUPFD_CLOEXEC, 0) }) { Ok(cloned) => { let socket = Socket(cloned); socket.set_nosigpipe(true)?; return Ok(socket); }, Err(ref e) if e.raw_os_error() == Some(EINVAL) => {/*try dup() instead*/} Err(e) => return Err(e), } let cloned = cvt!(unsafe { dup(fd) })?; let socket = Socket(cloned); set_cloexec(socket.0, true)?; socket.set_nosigpipe(true)?; Ok(socket) } pub fn pair(socket_type: c_int, nonblocking: bool) -> Result<(Self, Self), io::Error> { let mut fd_buf = [-1; 2]; #[cfg(not(any(target_vendor="apple", target_os="illumos", target_os="solaris")))] { let type_flags = socket_type | SOCK_CLOEXEC | if nonblocking {SOCK_NONBLOCK} else {0}; match cvt!(unsafe { socketpair(AF_UNIX, type_flags, 0, fd_buf[..].as_mut_ptr()) }) { Ok(_) => return Ok((Socket(fd_buf[0]), Socket(fd_buf[1]))), Err(ref e) if e.raw_os_error() == Some(EINVAL) => {/*try without*/} Err(e) => return Err(e), } } cvt!(unsafe { socketpair(AF_UNIX, socket_type, 0, fd_buf[..].as_mut_ptr()) })?; let a = Socket(fd_buf[0]); let b = Socket(fd_buf[1]); set_cloexec(a.0, true)?; set_cloexec(b.0, true)?; a.set_nosigpipe(true)?; b.set_nosigpipe(true)?; if nonblocking { set_nonblocking(a.0, true)?; set_nonblocking(b.0, true)?; } Ok((a, b)) } }
/* See each function for copyright holders */ use std::os::unix::io::{RawFd, AsRawFd, IntoRawFd}; use std::io::{self, ErrorKind}; use std::mem; use libc::{c_int, sockaddr, socklen_t, AF_UNIX}; use libc::{bind, connect, getsockname, getpeername}; use libc::{socket, accept, close, listen, socketpair}; use libc::{ioctl, FIONBIO, FIOCLEX, FIONCLEX}; use libc::{fcntl, F_DUPFD_CLOEXEC, EINVAL, dup}; #[cfg(any(target_os="illumos", target_os="solaris"))] use libc::{F_GETFD, F_SETFD, FD_CLOEXEC}; #[cfg(not(any(target_vendor="apple", target_os="illumos", target_os="solaris")))] use libc::{SOCK_CLOEXEC, SOCK_NONBLOCK}; #[cfg(not(any(target_vendor="apple", target_os="netbsd", target_os="illumos", target_os="solaris")))] use libc::{accept4, ENOSYS}; #[cfg(target_vendor="apple")] use libc::{setsockopt, SOL_SOCKET, SO_NOSIGPIPE, c_void}; use crate::addr::*; const LISTEN_BACKLOG: c_int = 10; #[cfg(not(target_vendor="apple"))] pub use libc::MSG_NOSIGNAL; #[cfg(target_vendor="apple")] pub const MSG_NOSIGNAL: c_int = 0; pub fn set_cloexec(fd: RawFd, close_on_exec: bool) -> Result<(), io::Error> { let op = if close_on_exec {FIOCLEX} else {FIONCLEX}; match cvt!(unsafe { ioctl(fd, op) }) { Ok(_) => Ok(()), #[cfg(any(target_os="illumos", target_os="solaris"))] Err(ref e) if e.kind() == ErrorKind::InvalidInput => { unsafe { let prev = cvt!(fcntl(fd, F_GETFD))?; let change_to = if close_on_exec {prev | FD_CLOEXEC} else {prev & !FD_CLOEXEC}; if change_to != prev { cvt!(fcntl(fd, F_SETFD, change_to))?; } Ok(()) } }, Err(e) => Err(e), } } pub fn set_nonblocking(fd: RawFd, nonblocking: bool) -> Result<(), io::Error> { cvt!(unsafe { ioctl(fd, FIONBIO, &mut (nonblocking as c_int)) })?; Ok(()) } type SetSide = unsafe extern "C" fn(RawFd, *const sockaddr, socklen_t) -> c_int; unsafe fn set_unix_addr(socket: RawFd, set_side: SetSide, addr: &UnixSocketAddr) -> Result<(), io::Error> { let (addr, len) = addr.as_raw_general(); loop { if set_side(socket, addr, len) != -1 { break Ok(()); }
pub fn bind_to(socket: RawFd, addr: &UnixSocketAddr) -> Result<(), io::Error> { unsafe { set_unix_addr(socket, bind, addr) } } pub fn connect_to(socket: RawFd, addr: &UnixSocketAddr) -> Result<(), io::Error> { unsafe { set_unix_addr(socket, connect, addr) } } type GetSide = unsafe extern "C" fn(RawFd, *mut sockaddr, *mut socklen_t) -> c_int; unsafe fn get_unix_addr(socket: RawFd, get_side: GetSide) -> Result<UnixSocketAddr, io::Error> { UnixSocketAddr::new_from_ffi(|addr_ptr, addr_len| { match get_side(socket, addr_ptr, addr_len) { -1 => Err(io::Error::last_os_error()), _ => Ok(()), } }).map(|((), addr)| addr ) } pub fn local_addr(socket: RawFd) -> Result<UnixSocketAddr, io::Error> { unsafe { get_unix_addr(socket, getsockname) } } pub fn peer_addr(socket: RawFd) -> Result<UnixSocketAddr, io::Error> { unsafe { get_unix_addr(socket, getpeername) } } pub struct Socket(RawFd); impl Drop for Socket { fn drop(&mut self) { unsafe { close(self.0) }; } } impl IntoRawFd for Socket { fn into_raw_fd(self) -> RawFd { let fd = self.0; mem::forget(self); fd } } impl AsRawFd for Socket { fn as_raw_fd(&self) -> RawFd { self.0 } } impl Socket { fn set_nosigpipe(&self, nosigpipe: bool) -> Result<(), io::Error> { #![allow(unused_variables)] #[cfg(target_vendor="apple")] { unsafe { let nosigpipe = &(nosigpipe as c_int) as *const c_int as *const c_void; let int_size = mem::size_of::<c_int>() as socklen_t; cvt!(setsockopt(self.0, SOL_SOCKET, SO_NOSIGPIPE, nosigpipe, int_size))?; } } Ok(()) } pub fn new(socket_type: c_int, nonblocking: bool) -> Result<Self, io::Error> { #[cfg(not(any(target_vendor="apple", target_os="illumos", target_os="solaris")))] { let type_flags = socket_type | SOCK_CLOEXEC | if nonblocking {SOCK_NONBLOCK} else {0}; match cvt!(unsafe { socket(AF_UNIX, type_flags, 0) }) { Ok(fd) => return Ok(Socket(fd)), Err(ref e) if e.raw_os_error() == Some(EINVAL) => {/*try without*/} Err(e) => return Err(e), } } let fd = cvt!(unsafe { socket(AF_UNIX, socket_type, 0) })?; let socket = Socket(fd); set_cloexec(socket.0, true)?; socket.set_nosigpipe(true)?; if nonblocking { set_nonblocking(socket.0, true)?; } Ok(socket) } pub fn accept_from(fd: RawFd, nonblocking: bool) -> Result<(Self, UnixSocketAddr), io::Error> { unsafe { UnixSocketAddr::new_from_ffi(|addr_ptr, len_ptr| { #[cfg(any( target_os="linux", target_os="android", target_os="freebsd", target_os="dragonfly", target_os="openbsd" ))] { let flags = SOCK_CLOEXEC | if nonblocking {SOCK_NONBLOCK} else {0}; match cvt_r!(accept4(fd, addr_ptr, len_ptr, flags)) { Ok(fd) => return Ok(Socket(fd)), Err(ref e) if e.raw_os_error() == Some(ENOSYS) => {/*try normal accept()*/}, Err(e) => return Err(e), } } let fd = cvt_r!(accept(fd, addr_ptr, len_ptr))?; let socket = Socket(fd); set_cloexec(socket.0, true)?; socket.set_nosigpipe(true)?; if nonblocking { set_nonblocking(socket.0, true)?; } Ok(socket) }) } } pub fn start_listening(&self) -> Result<(), io::Error> { cvt!(unsafe { listen(self.0, LISTEN_BACKLOG) }).map(|_| () ) } pub fn try_clone_from(fd: RawFd) -> Result<Self, io::Error> { match cvt!(unsafe { fcntl(fd, F_DUPFD_CLOEXEC, 0) }) { Ok(cloned) => { let socket = Socket(cloned); socket.set_nosigpipe(true)?; return Ok(socket); }, Err(ref e) if e.raw_os_error() == Some(EINVAL) => {/*try dup() instead*/} Err(e) => return Err(e), } let cloned = cvt!(unsafe { dup(fd) })?; let socket = Socket(cloned); set_cloexec(socket.0, true)?; socket.set_nosigpipe(true)?; Ok(socket) } pub fn pair(socket_type: c_int, nonblocking: bool) -> Result<(Self, Self), io::Error> { let mut fd_buf = [-1; 2]; #[cfg(not(any(target_vendor="apple", target_os="illumos", target_os="solaris")))] { let type_flags = socket_type | SOCK_CLOEXEC | if nonblocking {SOCK_NONBLOCK} else {0}; match cvt!(unsafe { socketpair(AF_UNIX, type_flags, 0, fd_buf[..].as_mut_ptr()) }) { Ok(_) => return Ok((Socket(fd_buf[0]), Socket(fd_buf[1]))), Err(ref e) if e.raw_os_error() == Some(EINVAL) => {/*try without*/} Err(e) => return Err(e), } } cvt!(unsafe { socketpair(AF_UNIX, socket_type, 0, fd_buf[..].as_mut_ptr()) })?; let a = Socket(fd_buf[0]); let b = Socket(fd_buf[1]); set_cloexec(a.0, true)?; set_cloexec(b.0, true)?; a.set_nosigpipe(true)?; b.set_nosigpipe(true)?; if nonblocking { set_nonblocking(a.0, true)?; set_nonblocking(b.0, true)?; } Ok((a, b)) } }
let err = io::Error::last_os_error(); if err.kind() != ErrorKind::Interrupted { break Err(err); } } }
function_block-function_prefix_line
[ { "content": "pub fn peer_credentials(_: RawFd) -> Result<ConnCredentials, io::Error> {\n\n Err(io::Error::new(Other, \"not available\"))\n\n}\n\n\n\n\n\n\n\n#[cfg(any(target_os=\"linux\", target_os=\"android\"))]\n\npub type RawReceivedCredentials = libc::ucred;\n\n\n\n\n\n/// Process credentials received t...
Rust
cs39/src/compile.rs
gretchenfrage/CS639S20_Demos
d75a16cf66b6b95eb74fc8f101020672b62e5a90
use crate::navigate::{find_demo, DemoLookup}; use std::{ path::{Path, PathBuf}, ffi::{OsStr, OsString}, fs::{ self, read_to_string, read_dir, create_dir_all, FileType }, env, collections::HashMap, process::{ Command, ExitStatus, } }; use rand::prelude::*; #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] pub struct Compiled { pub workdir: PathBuf, pub binary: PathBuf, } pub fn cpp_files<P: AsRef<Path>>(path: P) -> impl Iterator<Item=OsString> { read_dir(path).unwrap() .filter_map(Result::ok) .filter_map(|f| f.file_type().ok() .filter(FileType::is_file) .map(|_| f.path())) .filter_map(|p| p.extension() .filter(|e| *e == "cpp" || *e == "h") .and_then(|_| p.file_name().map(OsStr::to_owned))) } #[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] #[allow(dead_code)] pub enum Compiler { ClangPp, Gcc9, Gcc, GccMkl, } impl Compiler { fn compile<P>(self, path: P) -> ExitStatus where P: AsRef<Path>, { match self { Compiler::ClangPp => Command::new("clang++") .args("-std=c++11 -stdlib=libc++ -w -O3".split_whitespace()) .args(cpp_files(&path)) .current_dir(&path) .status().unwrap(), Compiler::Gcc9 => Command::new("gcc-9") .args("-x c++ -fopenmp -w -O3 ".split_whitespace()) .args(cpp_files(&path)) .arg("-lstdc++") .current_dir(&path) .status().unwrap(), Compiler::Gcc => Command::new("gcc") .args("-x c++ -fopenmp -w -O3 ".split_whitespace()) .args(cpp_files(&path)) .arg("-lstdc++") .current_dir(&path) .status().unwrap(), Compiler::GccMkl => { let mklroot = env::var("MKLROOT") .expect("missing required env var MKLROOT"); Command::new("gcc") .args(format!( "-x c++ -fopenmp -w -O3 -m64 -I{}/include", mklroot).split_whitespace()) .args(cpp_files(&path)) .args(format!( " -lstdc++ -L${}/lib/intel64 -Wl,--no-as-needed -lmkl_intel_ilp64 -lmkl_gnu_thread -lmkl_core -lgomp -lpthread -lm -ldl", mklroot).split_whitespace()) .current_dir(&path) .status().unwrap() }, } /* gcc -x c++ -fopenmp -w -O3 -m64 -I${MKLROOT}/include *.cpp *.h -lstdc++ -L${MKLROOT}/lib/intel64 -Wl,--no-as-needed -lmkl_intel_ilp64 -lmkl_gnu_thread -lmkl_core -lgomp -lpthread -lm -ldl */ } } pub fn compile(lookup: &DemoLookup, major: u32, minor: u32) -> Result<Compiled, ()> { let path = find_demo(lookup, major, minor)?; let compiler = Compiler::GccMkl; println!("[INFO] compiling with {:?}", compiler); println!(); let status = compiler.compile(&path); if !status.success() { eprintln!(); eprintln!("[ERROR] compile failure {}", status.code().unwrap()); return Err(()); } Ok(Compiled { workdir: path.clone(), binary: path.join("a.out") }) } pub fn modify_compile<P, F>( repo: P, lookup: &DemoLookup, major: u32, minor: u32, edit: F ) -> Result<Compiled, ()> where P: AsRef<Path>, F: FnOnce(&mut HashMap<OsString, String>), { let path = find_demo(lookup, major, minor)?; let mut code: HashMap<OsString, String> = cpp_files(&path) .map(|file| ( file.clone(), read_to_string(path.join(file)).unwrap() )) .collect(); edit(&mut code); let temp = repo.as_ref().join("tmp").join(format!("rng-{}", random::<u16>())); println!("[INFO] building code in {:?}", temp); create_dir_all(&temp).unwrap(); for (file, content) in code { let path = temp.join(file); fs::write(path, content).unwrap(); } let compiler = Compiler::Gcc9; println!("[INFO] compiling with {:?}", compiler); println!(); let status = compiler.compile(&temp); if !status.success() { eprintln!(); eprintln!("[ERROR] compile failure {}", status.code().unwrap()); return Err(()); } Ok(Compiled { workdir: temp.clone(), binary: temp.join("a.out") }) }
use crate::navigate::{find_demo, DemoLookup}; use std::{ path::{Path, PathBuf}, ffi::{OsStr, OsString}, fs::{ self, read_to_string, read_dir, create_dir_all, FileType }, env, collections::HashMap, process::{ Command, ExitStatus, } }; use rand::prelude::*; #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] pub struct Compiled { pub workdir: PathBuf, pub binary: PathBuf, } pub fn cpp_files<P: AsRef<Path>>(path: P) -> impl Iterator<Item=OsString> { read_dir(path).unwrap() .filter_map(Result::ok) .filter_map(|f| f.file_type().ok() .filter(FileType::is_file) .map(|_| f.path())) .filter_map(|p| p.extension() .filter(|e| *e == "cpp" || *e == "h") .and_then(|_| p.file_name().map(OsStr::to_owned))) } #[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] #[allow(dead_code)] pub enum Compiler { ClangPp, Gcc9, Gcc, GccMkl, } impl Compiler { fn compile<P>(self, path: P) -> ExitStatus where P: AsRef<Path>, { match self { Compiler::ClangPp => Command::new("clang++") .args("-std=c++11 -stdlib=libc++ -w -O3".split_whitespace()) .args(cpp_files(&path)) .current_dir(&path) .status().unwrap(), Compiler::Gcc9 => Command::new("gcc-9") .args("-x c++ -fopenmp -w -O3 ".split_whitespace()) .args(cpp_files(&path)) .arg("-lstdc++") .current_dir(&path) .status().unwrap(), Compiler::Gcc => Command::new("gcc") .args("-x c++ -fopenmp -w -O3 ".split_whitespace()) .args(cpp_files(&path)) .arg("-lstdc++") .current_dir(&path) .status().unwrap(), Compiler::GccMkl => { let mklroot = env::var("MKLROOT") .expect("missing required env var MKLROOT"); Command::new("gcc") .args(format!( "-x c++ -fopenmp -w -O3 -m64 -I{}/include", mklroot).split_whitespace()) .args(cpp_files(&path)) .args(format!( " -lstdc++ -L${}/lib/intel64 -Wl,--no-as-needed -lmkl_intel_ilp64 -lmkl_gnu_thread -lmkl_core -lgomp -lpthread -lm -ldl", mklroot).split_whitespace()) .current_dir(&path) .status().unwrap() }, } /* gcc -x c++ -fopenmp -w -O3 -m64 -I${MKLROOT}/include *.cpp *.h -lstdc++ -L${MKLROOT}/lib/intel64 -Wl,--no-as-needed -lmkl_intel_ilp64 -lmkl_gnu_thread -lmkl_core -lgomp -lpthread -lm -ldl */ } } pub fn compile(lookup: &DemoLookup, major: u32, minor: u32) -> Result<Compiled, ()> { let path = find_demo(lookup, major, minor)?; let compiler = Compiler::GccMkl; println!("[INFO] compiling with {:?}", compiler); println!(); let status = compiler.compile(&path); if !status.success() { eprintln!(); eprintln!("[ERROR] compile failure {}", status.code().unwrap()); return Err(()); } Ok(Compiled { workdir: path.clone(), binary: path.join("a.out") }) } pub fn modify_compile<P, F>( repo: P, lookup: &DemoLookup, major: u32, minor: u32, edit: F ) -> Result<Compiled, ()> where P: AsRef<Path>, F: FnOnce(&mut HashMap<OsString, String>), { let path = find_demo(lookup, major, minor)?;
edit(&mut code); let temp = repo.as_ref().join("tmp").join(format!("rng-{}", random::<u16>())); println!("[INFO] building code in {:?}", temp); create_dir_all(&temp).unwrap(); for (file, content) in code { let path = temp.join(file); fs::write(path, content).unwrap(); } let compiler = Compiler::Gcc9; println!("[INFO] compiling with {:?}", compiler); println!(); let status = compiler.compile(&temp); if !status.success() { eprintln!(); eprintln!("[ERROR] compile failure {}", status.code().unwrap()); return Err(()); } Ok(Compiled { workdir: temp.clone(), binary: temp.join("a.out") }) }
let mut code: HashMap<OsString, String> = cpp_files(&path) .map(|file| ( file.clone(), read_to_string(path.join(file)).unwrap() )) .collect();
assignment_statement
[ { "content": "/// `run` task.\n\npub fn run_demo(lookup: &DemoLookup, major: u32, minor: u32) -> Result<(), ()> {\n\n let Compiled { workdir, binary } = compile(lookup, major, minor)?;\n\n \n\n println!(\"[INFO] running\");\n\n println!();\n\n let status = Command::new(&binary)\n\n .curren...
Rust
src/model/readout/xi.rs
merfishtools/merfishtools
3a9d5e749a9f5d8df7a15936beb8cf34ff135bda
use std::mem; use itertools::Itertools; use ndarray::prelude::*; use bio::stats::{LogProb, Prob}; use crate::io::codebook::Codeword; pub struct Xi { p0: Vec<LogProb>, p1: Vec<LogProb>, } impl Xi { pub fn new(p0: &[Prob], p1: &[Prob]) -> Self { let tolog = |&p| LogProb::from(p); Xi { p0: p0.iter().map(&tolog).collect_vec(), p1: p1.iter().map(&tolog).collect_vec(), } } pub fn prob(&self, source: &Codeword, target: &Codeword) -> [LogProb; 2] { let mut curr = Array1::from_elem(3, LogProb::ln_zero()); let mut prev = Array1::from_elem(3, LogProb::ln_zero()); prev[1] = LogProb::ln_one(); for k in 0..source.len() { for d in 1..curr.shape()[0] { let (d_0, d_1, p_0) = match (source.get(k).unwrap(), target.get(k).unwrap()) { (true, true) => (d - 1, d, self.p1[k]), (false, false) => (d - 1, d, self.p0[k]), (false, true) => (d, d - 1, self.p0[k]), (true, false) => (d, d - 1, self.p1[k]), }; let p = (p_0 + prev[d_0]).ln_add_exp(p_0.ln_one_minus_exp() + prev[d_1]); curr[d] = p; } mem::swap(&mut prev, &mut curr); } [prev[1], prev[2]] } } #[cfg(test)] mod tests { use super::*; use bio::stats::{LogProb, Prob}; use bit_vec::BitVec; #[test] fn test_xi_exact_or_mismatch() { let a = BitVec::from_bytes(&[0b10101101]); let b = BitVec::from_bytes(&[0b10101011]); let p0 = LogProb::from(Prob(0.005)); let p1 = LogProb::from(Prob(0.01)); let xi = Xi::new(&[Prob::from(p0); 8], &[Prob::from(p1); 8]); let p = xi.prob(&a, &b); let truth = LogProb(*p1.ln_one_minus_exp() * 4.0) + LogProb(*p0.ln_one_minus_exp() * 2.0) + p1 + p0; assert_relative_eq!(*p[0], *truth); let truth_mismatch = LogProb::ln_sum_exp(&[ LogProb::from(Prob(*Prob::from(truth - p1.ln_one_minus_exp() + p1) * 4.0)), LogProb::from(Prob(*Prob::from(truth - p0.ln_one_minus_exp() + p0) * 2.0)), truth - p1 + p1.ln_one_minus_exp(), truth - p0 + p0.ln_one_minus_exp(), ]); assert_relative_eq!(*p[1], *truth_mismatch, epsilon = 0.001); } #[test] fn test_xi_exact_total() { let p0 = LogProb::from(Prob(0.005)); let p1 = LogProb::from(Prob(0.01)); let xi = Xi::new(&[Prob::from(p0); 8], &[Prob::from(p1); 8]); let a = BitVec::from_bytes(&[0b10101101]); let mut probs = Vec::new(); for b in 0..=255 { let b = BitVec::from_bytes(&[b]); probs.push(xi.prob(&a, &b)[0]); } let p = LogProb::ln_sum_exp(&probs); assert_relative_eq!(*p, *LogProb::ln_one()); } #[test] fn test_xi_noise() { let p0 = LogProb::from(Prob(0.04)); let p1 = LogProb::from(Prob(0.1)); let xi = Xi::new(&[Prob::from(p0); 16], &[Prob::from(p1); 16]); let noise = BitVec::from_elem(16, false); let target = BitVec::from_bytes(&[0b10001100, 0b00000010]); let p = xi.prob(&noise, &target); let truth_exact = LogProb(*p0 * 4.0) + LogProb(*p0.ln_one_minus_exp() * 12.0); let truth_mismatch = LogProb::ln_sum_exp(&[ LogProb::from(Prob( *Prob::from(truth_exact - p0.ln_one_minus_exp() + p0) * 12.0, )), LogProb::from(Prob( *Prob::from(truth_exact - p0 + p0.ln_one_minus_exp()) * 4.0, )), ]); assert_relative_eq!(*p[0], *truth_exact, epsilon = 0.0001); assert_relative_eq!(*p[1], *truth_mismatch); } }
use std::mem; use itertools::Itertools; use ndarray::prelude::*; use bio::stats::{LogProb, Prob}; use crate::io::codebook::Codeword; pub struct Xi { p0: Vec<LogProb>, p1: Vec<LogProb>, } impl Xi { pub fn new(p0: &[Prob], p1: &[Prob]) -> Self { let tolog = |&p| LogProb::from(p); Xi { p0: p0.iter().map(&tolog).collect_vec(), p1: p1.iter().map(&tolog).collect_vec(), } } pub fn prob(&self, source: &Codeword, target: &Codeword) -> [LogProb; 2] { let mut curr = Array1::from_elem(3, LogProb::ln_zero()); let mut prev = Array1::from_elem(3, LogProb::ln_zero()); prev[1] = LogProb::ln_one(); for k in 0..source.len() { for d in 1..curr.shape()[0] { let (d_0, d_1, p_0) = match (source.get(k).unwrap(), target.get(k).unwrap()) { (true, true) => (d - 1, d, self.p1[k]), (false, false) => (d - 1, d, self.p0[k]), (false, true) => (d, d - 1, self.p0[k]), (true, false) => (d, d - 1, self.p1[k]), }; let p = (p_0 + prev[d_0]).ln_add_exp(p_0.ln_one_minus_exp() + prev[d_1]); curr[d] = p; } mem::swap(&mut prev, &mut curr); } [prev[1], prev[2]] } } #[cfg(test)] mod tests { use super::*; use bio::stats::{LogProb, Prob}; use bit_vec::BitVec; #[test] fn test_xi_exact_or_mismatch() { let a = BitVec::from_bytes(&[0b10101101]); let b = BitVec::from_bytes(&[0b10101011]); let p0 = LogProb::from(Prob(0.005)); let p1 = LogProb::from(Prob(0.01)); let xi = Xi::new(&[Prob::from(p0); 8], &[Prob::from(p1); 8]); let p = xi.prob(&a, &b); let truth = LogProb(*p1.ln_one_minus_exp() * 4.0) + LogProb(*p0.ln_one_minus_exp() * 2.0) + p1 + p0; assert_relative_eq!(*p[0], *truth); let truth_mismatch = LogProb::ln_sum_exp(&[ LogProb::from(Prob(*Prob::from(truth - p1.ln_one_minus_exp() + p1) * 4.0)), LogProb::from(Prob(*Prob::from(truth - p0.ln_one_minus_exp() + p0) * 2.0)),
truth_mismatch); } }
truth - p1 + p1.ln_one_minus_exp(), truth - p0 + p0.ln_one_minus_exp(), ]); assert_relative_eq!(*p[1], *truth_mismatch, epsilon = 0.001); } #[test] fn test_xi_exact_total() { let p0 = LogProb::from(Prob(0.005)); let p1 = LogProb::from(Prob(0.01)); let xi = Xi::new(&[Prob::from(p0); 8], &[Prob::from(p1); 8]); let a = BitVec::from_bytes(&[0b10101101]); let mut probs = Vec::new(); for b in 0..=255 { let b = BitVec::from_bytes(&[b]); probs.push(xi.prob(&a, &b)[0]); } let p = LogProb::ln_sum_exp(&probs); assert_relative_eq!(*p, *LogProb::ln_one()); } #[test] fn test_xi_noise() { let p0 = LogProb::from(Prob(0.04)); let p1 = LogProb::from(Prob(0.1)); let xi = Xi::new(&[Prob::from(p0); 16], &[Prob::from(p1); 16]); let noise = BitVec::from_elem(16, false); let target = BitVec::from_bytes(&[0b10001100, 0b00000010]); let p = xi.prob(&noise, &target); let truth_exact = LogProb(*p0 * 4.0) + LogProb(*p0.ln_one_minus_exp() * 12.0); let truth_mismatch = LogProb::ln_sum_exp(&[ LogProb::from(Prob( *Prob::from(truth_exact - p0.ln_one_minus_exp() + p0) * 12.0, )), LogProb::from(Prob( *Prob::from(truth_exact - p0 + p0.ln_one_minus_exp()) * 4.0, )), ]); assert_relative_eq!(*p[0], *truth_exact, epsilon = 0.0001); assert_relative_eq!(*p[1], *
random
[ { "content": "pub fn parse_codeword(codeword: &[u8]) -> Codeword {\n\n let mut _codeword = BitVec::with_capacity(codeword.len());\n\n for &b in codeword {\n\n if b == b'1' {\n\n _codeword.push(true);\n\n } else if b == b'0' {\n\n _codeword.push(false)\n\n } else ...
Rust
src/jot.rs
brian-dawn/jot
411acce3d7dd7c72954adf162d59fc3a15fb0184
use anyhow::Result; use chrono::prelude::*; use std::fs::File; use std::io::prelude::*; use std::path::Path; use std::path::PathBuf; use colorful::Colorful; use itertools::Itertools; use regex::Regex; use std::collections::HashSet; use crate::config; use crate::constants::*; use crate::utils; use crate::utils::{count_real_chars, pluralize_time_unit, pretty_duration}; #[derive(Debug, PartialEq, Eq, Clone)] pub struct Jot { pub datetime: DateTime<Local>, pub message: String, pub msg_type: MessageType, pub id: usize, pub uuid: Option<String>, pub tags: HashSet<String>, pub path: PathBuf, } #[derive(Debug, Eq, PartialEq, Clone)] pub enum MessageType { Note, Todo(Option<DateTime<Local>>), } impl Jot { pub fn new( path: &Path, message: &str, message_type: MessageType, previous_uuids: &HashSet<String>, ) -> Jot { let local: DateTime<Local> = Local::now().with_nanosecond(0).unwrap(); Jot { datetime: local, message: message.trim().to_string(), msg_type: message_type, id: 0, uuid: Some(utils::generate_new_uuid(previous_uuids)), tags: HashSet::new(), path: path.to_owned(), } } pub fn pprint(&self) { let msg = crate::utils::break_apart_long_string(&self.message.clone()); self.pprint_with_custom_msg(Some(&msg)); } pub fn pprint_with_custom_msg(&self, msg_override: Option<&str>) { let now: DateTime<Local> = Local::now().with_nanosecond(0).unwrap(); let time_difference = now - self.datetime; let (amount, amount_unit) = pretty_duration(time_difference); let plural_amount_unit = pluralize_time_unit(amount, amount_unit); let header_string = match self.msg_type { MessageType::Todo(None) => format!( "{} {} {} ago", TODO.magenta().bold(), amount.to_string().bold().blue(), plural_amount_unit ), MessageType::Todo(Some(completed_date)) => { let time_difference = now - completed_date; let (amount, amount_unit) = pretty_duration(time_difference); let plural_amount_unit = pluralize_time_unit(amount, amount_unit); format!( "{} completed {} {} ago", TODO.green().bold(), amount.to_string().bold().blue(), plural_amount_unit ) } MessageType::Note => format!( "{} {} {} ago", NOTE.blue().bold(), amount.to_string().bold().blue(), plural_amount_unit ), }; let msg = msg_override.unwrap_or(&self.message).trim(); let header = format!( "{} [{}]", header_string, self.uuid .clone() .unwrap_or(self.id.to_string()) .cyan() .bold() ); let bar_length = std::cmp::max( msg.lines() .map(|line| count_real_chars(line).unwrap_or(0)) .max() .unwrap_or(0), count_real_chars(header.trim()).unwrap_or(0), ); let header_chars = count_real_chars(&header).unwrap_or(0); let s_header = std::iter::repeat("─") .take(std::cmp::max(0, bar_length as i64 - header_chars as i64 - 2) as usize) .collect::<String>(); let s = std::iter::repeat("─") .take(count_real_chars(&s_header).unwrap_or(0) + header_chars) .collect::<String>(); let mut tag_msg = msg.to_string(); let found = TAG_RE.find_iter(&msg).collect::<Vec<_>>().into_iter().rev(); for m in found { let highlighted = &tag_msg[m.start()..m.end()].to_string().bold(); tag_msg.replace_range(m.start()..m.end(), &highlighted.to_string()); } println!("{}{}{}{}", "┌─", header, s_header, "─┐"); println!("{}", tag_msg); println!("{}{}{}", "└─", s, "─┘"); } fn write_to_header_string(&self) -> String { let date_str = self.datetime.to_rfc3339(); match self.msg_type { MessageType::Note => { if let Some(uuid) = &self.uuid { format!("[{} id={}]", date_str, uuid) } else { format!("[{}]", date_str) } } MessageType::Todo(maybe_completed_date) => { let completed_str = maybe_completed_date .map(|date| date.to_rfc3339()) .unwrap_or(TODO_NOT_DONE_PLACEHOLDER.to_string()); if let Some(uuid) = &self.uuid { format!( "[{} {} {} id={}]", date_str, TODO_HEADER, completed_str, uuid ) } else { format!("[{} {} {}]", date_str, TODO_HEADER, completed_str) } } } } } impl std::fmt::Display for Jot { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let st = format!("{}\n{}", self.write_to_header_string(), self.message); write!(f, "{}\n\n", &st.trim()) } } impl MessageType { fn from_string(i: &str) -> Option<(Option<String>, MessageType)> { let parts: Vec<&str> = i.split_whitespace().collect(); let id_part = parts .iter() .find(|p| p.starts_with("id=")) .map(|id_part| id_part.split("=").last().unwrap_or("").to_string()); match *parts.get(0)? { TODO_HEADER => { let date = parts.get(1)?.trim(); if date == TODO_NOT_DONE_PLACEHOLDER { Some((id_part, MessageType::Todo(None))) } else { let parsed_date: DateTime<FixedOffset> = DateTime::parse_from_rfc3339(&date).ok()?; Some(( id_part, MessageType::Todo(Some(DateTime::from(parsed_date))), )) } } _ => Some((id_part, MessageType::Note)), } } } pub fn stream_jots(config: config::Config, reversed: bool) -> Result<impl Iterator<Item = Jot>> { assert!(config.journal_path.is_dir()); let mut dirs = std::fs::read_dir(config.journal_path)? .map(|entry| Ok(entry?.path())) .collect::<Result<Vec<_>>>()?; if reversed { dirs.sort_by(|a, b| b.cmp(a)); } else { dirs.sort(); } let jot_stream = dirs .into_iter() .filter(|entry| entry.is_file()) .filter_map(|file_path| { let mut file = File::open(&file_path).ok()?; let mut contents = String::new(); file.read_to_string(&mut contents).ok()?; let lines = contents.lines().collect::<Vec<_>>(); let header_line = lines.first()?; let message = lines.iter().skip(1).join("\n"); Some(parse_jot(header_line, &message, &file_path)?) }) .zip(1..) .map(|(mut jot, index)| { jot.id = index; jot }); Ok(jot_stream) } lazy_static! { static ref TAG_RE: Regex = Regex::new(r"@[a-zA-Z][0-9a-zA-Z_]*").unwrap(); } fn parse_jot(header_line: &str, message: &str, path: &Path) -> Option<Jot> { lazy_static! { static ref RE: Regex = Regex::new(r"\[(\d\d\d\d\-\d\d\-\d\dT\d\d:\d\d:\d\d-\d\d:\d\d)(.*?)\].*").unwrap(); } let caps = RE.captures(header_line)?; let date = caps.get(1)?.as_str().trim().to_owned(); let message_type = caps.get(2).map(|m| m.as_str()).unwrap_or("").trim(); let tags = TAG_RE .find_iter(message) .map(|tag| tag.as_str().to_owned()) .collect(); let parsed_date: DateTime<FixedOffset> = DateTime::parse_from_rfc3339(&date).ok()?; let (id, msg_type) = MessageType::from_string(&message_type).unwrap_or((None, MessageType::Note)); Some(Jot { datetime: DateTime::from(parsed_date), message: message.trim().to_string(), tags, id: 0, uuid: id, msg_type: msg_type, path: path.to_owned(), }) }
use anyhow::Result; use chrono::prelude::*; use std::fs::File; use std::io::prelude::*; use std::path::Path; use std::path::PathBuf; use colorful::Colorful; use itertools::Itertools; use regex::Regex; use std::collections::HashSet; use crate::config; use crate::constants::*; use crate::utils; use crate::utils::{count_real_chars, pluralize_time_unit, pretty_duration}; #[derive(Debug, PartialEq, Eq, Clone)] pub struct Jot { pub datetime: DateTime<Local>, pub message: String, pub msg_type: MessageType, pub id: usize, pub uuid: Option<String>, pub tags: HashSet<String>, pub path: PathBuf, } #[derive(Debug, Eq, PartialEq, Clone)] pub enum MessageType { Note, Todo(Option<DateTime<Local>>), } impl Jot { pub fn new( path: &Path, message: &str, message_type: MessageType, previous_uuids: &HashSet<String>, ) -> Jot { let local: DateTime<Local> = Local::now().with_nanosecond(0).unwrap(); Jot { datetime: local, message: message.trim().to_string(), msg_type: message_type, id: 0, uuid: Some(utils::generate_new_uuid(previous_uuids)), tags: HashSet::new(), path: path.to_owned(), } } pub fn pprint(&self) { let msg = crate::utils::break_apart_long_string(&self.message.clone()); self.pprint_with_custom_msg(Some(&msg)); } pub fn pprint_with_custom_msg(&self, msg_override: Option<&str>) { let now: DateTime<Local> = Local::now().with_nanosecond(0).unwrap(); let time_difference = now - self.datetime; let (amount, amount_unit) = pretty_duration(time_difference); let plural_amount_unit = pluralize_time_unit(amount, amount_unit); let header_string = match self.msg_type { MessageType::Todo(None) => format!( "{} {} {} ago", TODO.magenta().bold(), amount.to_string().bold().blue(), plural_amount_unit ), MessageType::Todo(Some(completed_date)) => { let time_difference = now - completed_date; let (amount, amount_unit) = pretty_duration(time_difference); let plural_amount_unit = pluralize_time_unit(amount, amount_unit); format!( "{} completed {} {} ago", TODO.green().bold(), amount.to_string().bold().blue(), plural_amount_unit ) } MessageType::Note => format!( "{} {} {} ago", NOTE.blue().bold(), amount.to_string().bold().blue(), plural_amount_unit ), }; let msg = msg_override.unwrap_or(&self.message).trim(); let header = format!( "{} [{}]", header_string, self.uuid .clone() .unwrap_or(self.id.to_string()) .cyan() .bold() ); let bar_length = std::cmp::max( msg.lines() .map(|line| count_real_chars(line).unwrap_or(0)) .max() .unwrap_or(0), count_real_chars(header.trim()).unwrap_or(0), ); let header_chars = count_real_chars(&header).unwrap_or(0); let s_header = std::iter::repeat("─") .take(std::cmp::max(0, bar_length as i64 - header_chars as i64 - 2) as usize) .collect::<String>(); let s = std::iter::repeat("─") .take(count_real_chars(&s_header).unwrap_or(0) + header_chars) .collect::<String>(); let mut tag_msg = msg.to_string(); let found = TAG_RE.find_iter(&msg).collect::<Vec<_>>().into_iter().rev(); for m in found { let highlighted = &tag_msg[m.start()..m.end()].to_string().bold(); tag_msg.replace_range(m.start()..m.end(), &highlighted.to_string()); } println!("{}{}{}{}", "┌─", header, s_header, "─┐"); println!("{}", tag_msg); println!("{}{}{}", "└─", s, "─┘"); } fn write_to_header_string(&self) -> String { let date_str = self.datetime.to_rfc3339(); match self.msg_type { MessageType::Note => { if let Some(uuid) = &self.uuid { format!("[{} id={}]", date_str, uuid) } else { format!("[{}]", date_str) } } MessageType::Todo(maybe_completed_date) => { let completed_str = maybe_completed_date .map(|date| date.to_rfc3339()) .unwrap_or(TODO_NOT_DONE_PLACEHOLDER.to_string()); if let Some(uuid) = &self.uuid { format!( "[{} {} {} id={}]", date_str, TODO_HEADER, completed_str, uuid ) } else { format!("[{} {} {}]", date_str, TODO_HEADER, completed_str) } } } } } impl std::fmt::Display for Jot { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let st = format!("{}\n{}", self.write_to_header_string(), self.message); write!(f, "{}\n\n", &st.trim()) } } impl MessageType { fn from_string(i: &str) -> Option<(Option<String>, MessageType)> { let parts: Vec<&str> = i.split_whitespace().collect(); let id_part = parts .iter() .find(|p| p.starts_with("id=")) .map(|id_part| id_part.split("=").last().unwrap_or("").to_string()); match *parts.get(0)? { TODO_HEADER => { let date = parts.get(1)?.trim(); if date == TODO_NOT_DONE_PLACEHOLDER { Some((id_part, MessageType::Todo(None))) } else { let parsed_date: DateTime<FixedOffset> = DateTime::parse_from_rfc3339(&date).ok()?; Some(( id_part, MessageType::Todo(Some(DateTime::from(parsed_dat
} pub fn stream_jots(config: config::Config, reversed: bool) -> Result<impl Iterator<Item = Jot>> { assert!(config.journal_path.is_dir()); let mut dirs = std::fs::read_dir(config.journal_path)? .map(|entry| Ok(entry?.path())) .collect::<Result<Vec<_>>>()?; if reversed { dirs.sort_by(|a, b| b.cmp(a)); } else { dirs.sort(); } let jot_stream = dirs .into_iter() .filter(|entry| entry.is_file()) .filter_map(|file_path| { let mut file = File::open(&file_path).ok()?; let mut contents = String::new(); file.read_to_string(&mut contents).ok()?; let lines = contents.lines().collect::<Vec<_>>(); let header_line = lines.first()?; let message = lines.iter().skip(1).join("\n"); Some(parse_jot(header_line, &message, &file_path)?) }) .zip(1..) .map(|(mut jot, index)| { jot.id = index; jot }); Ok(jot_stream) } lazy_static! { static ref TAG_RE: Regex = Regex::new(r"@[a-zA-Z][0-9a-zA-Z_]*").unwrap(); } fn parse_jot(header_line: &str, message: &str, path: &Path) -> Option<Jot> { lazy_static! { static ref RE: Regex = Regex::new(r"\[(\d\d\d\d\-\d\d\-\d\dT\d\d:\d\d:\d\d-\d\d:\d\d)(.*?)\].*").unwrap(); } let caps = RE.captures(header_line)?; let date = caps.get(1)?.as_str().trim().to_owned(); let message_type = caps.get(2).map(|m| m.as_str()).unwrap_or("").trim(); let tags = TAG_RE .find_iter(message) .map(|tag| tag.as_str().to_owned()) .collect(); let parsed_date: DateTime<FixedOffset> = DateTime::parse_from_rfc3339(&date).ok()?; let (id, msg_type) = MessageType::from_string(&message_type).unwrap_or((None, MessageType::Note)); Some(Jot { datetime: DateTime::from(parsed_date), message: message.trim().to_string(), tags, id: 0, uuid: id, msg_type: msg_type, path: path.to_owned(), }) }
e))), )) } } _ => Some((id_part, MessageType::Note)), } }
function_block-function_prefixed
[ { "content": "/// Pluralize words e.g. Hour => Hours, etc.\n\npub fn pluralize_time_unit(amount: i64, time_unit: &str) -> String {\n\n if amount == 1 {\n\n return time_unit.to_string();\n\n }\n\n return format!(\"{}s\", time_unit);\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 0, ...
Rust
libs/bart_derive/src/parser.rs
maghoff/stache
9f947b550b48427f3e207491c64cba4457b105ff
use ast::Ast; use std::iter::*; use token::*; #[derive(Debug)] pub enum Error<'a> { Mismatch { expected: &'static str, found: Option<Token<'a>> } } fn section<'a, T>(token_stream: &mut Peekable<T>) -> Result<Ast<'a>, Error<'a>> where T: Iterator<Item=Token<'a>> { let (section_type, name) = match token_stream.next() { Some(Token::SectionOpener(section_type, name)) => Ok((section_type, name)), x => Err(Error::Mismatch { expected: "section opener", found: x }) }?; let nested = Box::new(sequence(token_stream)?); match token_stream.next() { Some(Token::SectionCloser(ref close_name)) if close_name == &name => Ok(()), x => Err(Error::Mismatch { expected: "section closer", found: x }) }?; Ok(match section_type { SectionType::Iteration => Ast::Iteration { name: name, nested: nested }, SectionType::NegativeIteration => Ast::NegativeIteration { name: name, nested: nested }, SectionType::Conditional => Ast::Conditional { name: name, nested: nested }, SectionType::NegativeConditional => Ast::NegativeConditional { name: name, nested: nested }, SectionType::Scope => Ast::Scope { name: name, nested: nested }, }) } fn sequence<'a, T>(token_stream: &mut Peekable<T>) -> Result<Ast<'a>, Error<'a>> where T: Iterator<Item=Token<'a>> { let mut seq: Vec<Ast> = vec![]; loop { seq.push( match token_stream.peek() { Some(&Token::Literal(text)) => { token_stream.next(); Ast::Literal(text) }, Some(&Token::Interpolation(_)) => { match token_stream.next() { Some(Token::Interpolation(name)) => Ast::Interpolation(name), _ => panic!("Outer match should guarantee match in inner match"), } }, Some(&Token::UnescapedInterpolation(_)) => { match token_stream.next() { Some(Token::UnescapedInterpolation(name)) => Ast::UnescapedInterpolation(name), _ => panic!("Outer match should guarantee match in inner match"), } }, Some(&Token::SectionOpener(..)) => section(token_stream)?, Some(&Token::PartialInclude(..)) => { match token_stream.next() { Some(Token::PartialInclude(partial_name, root)) => { Ast::PartialInclude { partial_name, root } }, _ => panic!("Outer match should guarantee match in inner match"), } }, _ => break } ) } Ok(Ast::Sequence(seq)) } fn parse_impl<'a, T>(mut token_stream: Peekable<T>) -> Result<Ast<'a>, Error<'a>> where T: Iterator<Item=Token<'a>> { let seq = sequence(&mut token_stream)?; if let Some(x) = token_stream.next() { return Err(Error::Mismatch { expected: "EOF", found: Some(x) }); } Ok(seq) } pub fn parse<'a, T>(token_stream: T) -> Result<Ast<'a>, Error<'a>> where T: IntoIterator<Item=Token<'a>> { parse_impl(token_stream.into_iter().peekable()) } #[cfg(test)] mod test { use super::*; #[test] fn it_works() { assert_eq!( Ast::Sequence(vec![ Ast::Literal("text"), ]), parse(vec![ Token::Literal("text") ]).unwrap() ) } #[test] fn simple_iteration() { assert_eq!( Ast::Sequence(vec![ Ast::Literal("text a"), Ast::Iteration { name: simple_name("x"), nested: Box::new(Ast::Sequence(vec![ Ast::Literal("text b"), ])) }, Ast::Literal("text c"), ]), parse(vec![ Token::Literal("text a"), Token::SectionOpener(SectionType::Iteration, simple_name("x")), Token::Literal("text b"), Token::SectionCloser(simple_name("x")), Token::Literal("text c"), ]).unwrap() ) } #[test] fn simple_negative_iteration() { assert_eq!( Ast::Sequence(vec![ Ast::Literal("text a"), Ast::NegativeIteration { name: simple_name("x"), nested: Box::new(Ast::Sequence(vec![ Ast::Literal("text b"), ])) }, Ast::Literal("text c"), ]), parse(vec![ Token::Literal("text a"), Token::SectionOpener(SectionType::NegativeIteration, simple_name("x")), Token::Literal("text b"), Token::SectionCloser(simple_name("x")), Token::Literal("text c"), ]).unwrap() ) } #[test] fn simple_conditional() { assert_eq!( Ast::Sequence(vec![ Ast::Literal("text a"), Ast::Conditional { name: simple_name("x"), nested: Box::new(Ast::Sequence(vec![ Ast::Literal("text b"), ])) }, Ast::Literal("text c"), ]), parse(vec![ Token::Literal("text a"), Token::SectionOpener(SectionType::Conditional, simple_name("x")), Token::Literal("text b"), Token::SectionCloser(simple_name("x")), Token::Literal("text c"), ]).unwrap() ) } #[test] fn simple_negative_conditional() { assert_eq!( Ast::Sequence(vec![ Ast::Literal("text a"), Ast::NegativeConditional { name: simple_name("x"), nested: Box::new(Ast::Sequence(vec![ Ast::Literal("text b"), ])) }, Ast::Literal("text c"), ]), parse(vec![ Token::Literal("text a"), Token::SectionOpener(SectionType::NegativeConditional, simple_name("x")), Token::Literal("text b"), Token::SectionCloser(simple_name("x")), Token::Literal("text c"), ]).unwrap() ) } #[test] fn simple_scope() { assert_eq!( Ast::Sequence(vec![ Ast::Literal("text a"), Ast::Scope { name: simple_name("x"), nested: Box::new(Ast::Sequence(vec![ Ast::Literal("text b"), ])) }, Ast::Literal("text c"), ]), parse(vec![ Token::Literal("text a"), Token::SectionOpener(SectionType::Scope, simple_name("x")), Token::Literal("text b"), Token::SectionCloser(simple_name("x")), Token::Literal("text c"), ]).unwrap() ) } #[test] fn section_closer_mismatch() { let res = parse(vec![ Token::SectionOpener(SectionType::Iteration, simple_name("x")), Token::SectionCloser(simple_name("y")), ]); assert!(res.is_err()) } #[test] fn understands_unescaped_interpolation() { assert_eq!( Ast::Sequence(vec![ Ast::Literal("a"), Ast::UnescapedInterpolation(simple_name("b")), Ast::Literal("c"), ]), parse(vec![ Token::Literal("a"), Token::UnescapedInterpolation(simple_name("b")), Token::Literal("c"), ]).unwrap() ) } #[test] fn partials() { assert_eq!( Ast::Sequence(vec![ Ast::PartialInclude { partial_name: "partial", root: simple_name("a") }, ]), parse(vec![ Token::PartialInclude("partial", simple_name("a")) ]).unwrap() ) } }
use ast::Ast; use std::iter::*; use token::*; #[derive(Debug)] pub enum Error<'a> { Mismatch { expected: &'static str, found: Option<Token<'a>> } } fn section<'a, T>(token_stream: &mut Peekable<T>) -> Result<Ast<'a>, Error<'a>> where T: Iterator<Item=Token<'a>> { let (section_type, name) = match token_stream.next() { Some(Token::SectionOpener(section_type, name)) => Ok((section_type, name)), x => Err(Error::Mismatch { expected: "section opener", found: x }) }?; let nested = Box::new(sequence(token_stream)?); match token_stream.next() { Some(Token::SectionCloser(ref close_name)) if close_name == &name => Ok(()), x => Err(Error::Mismatch { expected: "section closer", found: x }) }?; Ok(match section_type { SectionType::Iteration => Ast::Iteration { name: name, nested: nested }, SectionType::NegativeIteration => Ast::NegativeIteration { name: name, nested: nested }, SectionType::Conditional => Ast::Conditional { name: name, nested: nested }, SectionType::NegativeConditional => Ast::NegativeConditional { name: name, nested: nested }, SectionType::Scope => Ast::Scope { name: name, nested: nested }, }) } fn sequence<'a, T>(token_stream: &mut Peekable<T>) -> Result<Ast<'a>, Error<'a>> where T: Iterator<Item=Token<'a>> { let mut seq: Vec<Ast> = vec![]; loop { seq.push( match token_stream.peek() { Some(&Token::Literal(text)) => { token_stream.next(); Ast::Literal(text) }, Some(&Token::Interpolation(_)) => { match token_stream.next() { Some(Token::Interpolation(name)) => Ast::Interpolation(name), _ => panic!("Outer match should guarantee match in inner match"), } }, Some(&Token::UnescapedInterpolation(_)) => { match token_stream.next() { Some(Token::UnescapedInterpolation(name)) => Ast::UnescapedInterpolation(name), _ => panic!("Outer match should guarantee match in inner match"), } }, Some(&Token::SectionOpener(..)) => section(token_stream)?, Some(&Token::PartialInclude(..)) => { match token_stream.next() { Some(Token::PartialInclude(partial_name, root)) => { Ast::PartialInclude { partial_name, root } }, _ => panic!("Outer match should guarantee match in inner match"), } }, _ => break } ) } Ok(Ast::Sequence(seq)) } fn parse_impl<'a, T>(mut token_stream: Peekable<T>) -> Result<Ast<'a>, Error<'a>> where T: Iterator<Item=Token<'a>> { let seq = sequence(&mut token_stream)?; if let Some(x) = token_stream.next() { return Err(Error::Mismatch { expected: "EOF", found: Some(x) }); } Ok(seq) } pub fn parse<'a, T>(token_stream: T) -> Result<Ast<'a>, Error<'a>> where T: IntoIterator<Item=Token<'a>> { parse_impl(token_stream.into_iter().peekable()) } #[cfg(test)] mod test { use super::*; #[test] fn it_works() { assert_eq!( Ast::Sequence(vec![ Ast::Literal("text"), ]), parse(vec![ Token::Literal("text") ]).unwrap() ) } #[test] fn simple_iteration() { assert_eq!( Ast::Sequence(vec![ Ast::Literal("text a"), Ast::Iteration { name: simple_name("x"), nested: Box::new(Ast::Sequence(vec![ Ast::Literal("text b"), ])) }, Ast::Literal("text c"), ]), parse(vec![ Token::Literal("text a"), Token::SectionOpener(SectionType::Iteration, simple_name("x")), Token::Literal("text b"), Token::SectionCloser(simple_name("x")), Token::Literal("text c"), ]).unwrap() ) } #[test] fn simple_negative_iteration() { assert_eq!( Ast::Sequence(vec![ Ast::Literal("text a"), Ast::NegativeIteration { name: simple_name("x"), nested: Box::new(Ast::Sequence(vec![ Ast::Literal("text b"), ])) }, Ast::Literal("text c"), ]), parse(vec![ Token::Literal("text a"), Token::SectionOpener(SectionType::NegativeIteration, simple_name("x")), Token::Literal("text b"), Token::SectionCloser(simple_name("x")), Token::Literal("text c"), ]).unwrap() ) } #[test] fn simple_conditional() { assert_eq!( Ast::Sequence(vec![ Ast::Literal("text a"), Ast::Conditional { name: simple_name("x"), nested: Box::new(Ast::Sequence(vec![ Ast::Literal("text b"), ])) }, Ast::Literal("text c"), ]), parse(vec![ Token::Literal("text a"), Token::SectionOpener(SectionType::Conditional, simple_name("x")), Token::Literal("text b"), Token::SectionCloser(simple_name("x")), Token::Literal("text c"), ]).unwrap() ) } #[test]
#[test] fn simple_scope() { assert_eq!( Ast::Sequence(vec![ Ast::Literal("text a"), Ast::Scope { name: simple_name("x"), nested: Box::new(Ast::Sequence(vec![ Ast::Literal("text b"), ])) }, Ast::Literal("text c"), ]), parse(vec![ Token::Literal("text a"), Token::SectionOpener(SectionType::Scope, simple_name("x")), Token::Literal("text b"), Token::SectionCloser(simple_name("x")), Token::Literal("text c"), ]).unwrap() ) } #[test] fn section_closer_mismatch() { let res = parse(vec![ Token::SectionOpener(SectionType::Iteration, simple_name("x")), Token::SectionCloser(simple_name("y")), ]); assert!(res.is_err()) } #[test] fn understands_unescaped_interpolation() { assert_eq!( Ast::Sequence(vec![ Ast::Literal("a"), Ast::UnescapedInterpolation(simple_name("b")), Ast::Literal("c"), ]), parse(vec![ Token::Literal("a"), Token::UnescapedInterpolation(simple_name("b")), Token::Literal("c"), ]).unwrap() ) } #[test] fn partials() { assert_eq!( Ast::Sequence(vec![ Ast::PartialInclude { partial_name: "partial", root: simple_name("a") }, ]), parse(vec![ Token::PartialInclude("partial", simple_name("a")) ]).unwrap() ) } }
fn simple_negative_conditional() { assert_eq!( Ast::Sequence(vec![ Ast::Literal("text a"), Ast::NegativeConditional { name: simple_name("x"), nested: Box::new(Ast::Sequence(vec![ Ast::Literal("text b"), ])) }, Ast::Literal("text c"), ]), parse(vec![ Token::Literal("text a"), Token::SectionOpener(SectionType::NegativeConditional, simple_name("x")), Token::Literal("text b"), Token::SectionCloser(simple_name("x")), Token::Literal("text c"), ]).unwrap() ) }
function_block-full_function
[ { "content": "#[cfg(test)]\n\npub fn simple_name(name: &'static str) -> Name<'static> {\n\n Name {\n\n leading_dots: 0,\n\n segments: vec![name],\n\n function_call: false,\n\n }\n\n}\n", "file_path": "libs/bart_derive/src/token.rs", "rank": 0, "score": 232040.98408252635 ...
Rust
cli/tools/vendor/mod.rs
divy-work/deno
5bde4c7ecae35cb29a003504eafa03f1776ee3a0
use std::path::Path; use std::path::PathBuf; use deno_ast::ModuleSpecifier; use deno_core::anyhow::bail; use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::resolve_url_or_path; use deno_runtime::permissions::Permissions; use log::warn; use crate::config_file::FmtOptionsConfig; use crate::flags::VendorFlags; use crate::fs_util; use crate::fs_util::relative_specifier; use crate::fs_util::specifier_to_file_path; use crate::lockfile; use crate::proc_state::ProcState; use crate::resolver::ImportMapResolver; use crate::resolver::JsxResolver; use crate::tools::fmt::format_json; mod analyze; mod build; mod import_map; mod mappings; mod specifiers; #[cfg(test)] mod test; pub async fn vendor(ps: ProcState, flags: VendorFlags) -> Result<(), AnyError> { let raw_output_dir = match &flags.output_path { Some(output_path) => output_path.to_owned(), None => PathBuf::from("vendor/"), }; let output_dir = fs_util::resolve_from_cwd(&raw_output_dir)?; validate_output_dir(&output_dir, &flags, &ps)?; let graph = create_graph(&ps, &flags).await?; let vendored_count = build::build( graph, &output_dir, ps.maybe_import_map.as_deref(), &build::RealVendorEnvironment, )?; eprintln!( concat!("Vendored {} {} into {} directory.",), vendored_count, if vendored_count == 1 { "module" } else { "modules" }, raw_output_dir.display(), ); if vendored_count > 0 { let import_map_path = raw_output_dir.join("import_map.json"); if maybe_update_config_file(&output_dir, &ps) { eprintln!( concat!( "\nUpdated your local Deno configuration file with a reference to the ", "new vendored import map at {}. Invoking Deno subcommands will now ", "automatically resolve using the vendored modules. You may override ", "this by providing the `--import-map <other-import-map>` flag or by ", "manually editing your Deno configuration file.", ), import_map_path.display(), ); } else { eprintln!( concat!( "\nTo use vendored modules, specify the `--import-map {}` flag when ", r#"invoking Deno subcommands or add an `"importMap": "<path_to_vendored_import_map>"` "#, "entry to a deno.json file.", ), import_map_path.display(), ); } } Ok(()) } fn validate_output_dir( output_dir: &Path, flags: &VendorFlags, ps: &ProcState, ) -> Result<(), AnyError> { if !flags.force && !is_dir_empty(output_dir)? { bail!(concat!( "Output directory was not empty. Please specify an empty directory or use ", "--force to ignore this error and potentially overwrite its contents.", )); } if let Some(import_map_path) = ps .maybe_import_map .as_ref() .and_then(|m| specifier_to_file_path(m.base_url()).ok()) .and_then(|p| fs_util::canonicalize_path(&p).ok()) { std::fs::create_dir_all(&output_dir)?; let output_dir = fs_util::canonicalize_path(output_dir).with_context(|| { format!("Failed to canonicalize: {}", output_dir.display()) })?; if import_map_path.starts_with(&output_dir) { let cwd = fs_util::canonicalize_path(&std::env::current_dir()?)?; bail!( concat!( "Specifying an import map file ({}) in the deno vendor output ", "directory is not supported. Please specify no import map or one ", "located outside this directory." ), import_map_path .strip_prefix(&cwd) .unwrap_or(&import_map_path) .display() .to_string(), ); } } Ok(()) } fn maybe_update_config_file(output_dir: &Path, ps: &ProcState) -> bool { assert!(output_dir.is_absolute()); let config_file = match &ps.maybe_config_file { Some(f) => f, None => return false, }; let fmt_config = config_file .to_fmt_config() .unwrap_or_default() .unwrap_or_default(); let result = update_config_file( &config_file.specifier, &ModuleSpecifier::from_file_path(output_dir.join("import_map.json")) .unwrap(), &fmt_config.options, ); match result { Ok(()) => true, Err(err) => { warn!("Error updating config file. {:#}", err); false } } } fn update_config_file( config_specifier: &ModuleSpecifier, import_map_specifier: &ModuleSpecifier, fmt_options: &FmtOptionsConfig, ) -> Result<(), AnyError> { if config_specifier.scheme() != "file" { return Ok(()); } let config_path = specifier_to_file_path(config_specifier)?; let config_text = std::fs::read_to_string(&config_path)?; let relative_text = match relative_specifier(config_specifier, import_map_specifier) { Some(text) => text, None => return Ok(()), }; if let Some(new_text) = update_config_text(&config_text, &relative_text, fmt_options) { std::fs::write(config_path, new_text)?; } Ok(()) } fn update_config_text( text: &str, import_map_specifier: &str, fmt_options: &FmtOptionsConfig, ) -> Option<String> { use jsonc_parser::ast::ObjectProp; use jsonc_parser::ast::Value; let ast = jsonc_parser::parse_to_ast(text, &Default::default()).ok()?; let obj = match ast.value { Some(Value::Object(obj)) => obj, _ => return None, }; let import_map_specifier = import_map_specifier.replace('\"', "\\\""); match obj.get("importMap") { Some(ObjectProp { value: Value::StringLit(lit), .. }) => Some(format!( "{}{}{}", &text[..lit.range.start + 1], import_map_specifier, &text[lit.range.end - 1..], )), None => { let insert_position = obj.range.end - 1; let insert_text = format!( r#"{}"importMap": "{}""#, if obj.properties.is_empty() { "" } else { "," }, import_map_specifier ); let new_text = format!( "{}{}{}", &text[..insert_position], insert_text, &text[insert_position..], ); format_json(&new_text, fmt_options) .ok() .map(|formatted_text| formatted_text.unwrap_or(new_text)) } Some(_) => None, } } fn is_dir_empty(dir_path: &Path) -> Result<bool, AnyError> { match std::fs::read_dir(&dir_path) { Ok(mut dir) => Ok(dir.next().is_none()), Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(true), Err(err) => { bail!("Error reading directory {}: {}", dir_path.display(), err) } } } async fn create_graph( ps: &ProcState, flags: &VendorFlags, ) -> Result<deno_graph::ModuleGraph, AnyError> { let entry_points = flags .specifiers .iter() .map(|p| { let url = resolve_url_or_path(p)?; Ok((url, deno_graph::ModuleKind::Esm)) }) .collect::<Result<Vec<_>, AnyError>>()?; let mut cache = crate::cache::FetchCacher::new( ps.dir.gen_cache.clone(), ps.file_fetcher.clone(), Permissions::allow_all(), Permissions::allow_all(), ); let maybe_locker = lockfile::as_maybe_locker(ps.lockfile.clone()); let maybe_imports = if let Some(config_file) = &ps.maybe_config_file { config_file.to_maybe_imports()? } else { None }; let maybe_import_map_resolver = ps.maybe_import_map.clone().map(ImportMapResolver::new); let maybe_jsx_resolver = ps.maybe_config_file.as_ref().and_then(|cf| { cf.to_maybe_jsx_import_source_module() .map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone())) }); let maybe_resolver = if maybe_jsx_resolver.is_some() { maybe_jsx_resolver.as_ref().map(|jr| jr.as_resolver()) } else { maybe_import_map_resolver .as_ref() .map(|im| im.as_resolver()) }; Ok( deno_graph::create_graph( entry_points, false, maybe_imports, &mut cache, maybe_resolver, maybe_locker, None, None, ) .await, ) } #[cfg(test)] mod internal_test { use super::*; use pretty_assertions::assert_eq; #[test] fn update_config_text_no_existing_props_add_prop() { let text = update_config_text( "{\n}", "./vendor/import_map.json", &Default::default(), ) .unwrap(); assert_eq!( text, r#"{ "importMap": "./vendor/import_map.json" } "# ); } #[test] fn update_config_text_existing_props_add_prop() { let text = update_config_text( r#"{ "tasks": { "task1": "other" } } "#, "./vendor/import_map.json", &Default::default(), ) .unwrap(); assert_eq!( text, r#"{ "tasks": { "task1": "other" }, "importMap": "./vendor/import_map.json" } "# ); } #[test] fn update_config_text_update_prop() { let text = update_config_text( r#"{ "importMap": "./local.json" } "#, "./vendor/import_map.json", &Default::default(), ) .unwrap(); assert_eq!( text, r#"{ "importMap": "./vendor/import_map.json" } "# ); } }
use std::path::Path; use std::path::PathBuf; use deno_ast::ModuleSpecifier; use deno_core::anyhow::bail; use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::resolve_url_or_path; use deno_runtime::permissions::Permissions; use log::warn; use crate::config_file::FmtOptionsConfig; use crate::flags::VendorFlags; use crate::fs_util; use crate::fs_util::relative_specifier; use crate::fs_util::specifier_to_file_path; use crate::lockfile; use crate::proc_state::ProcState; use crate::resolver::ImportMapResolver; use crate::resolver::JsxResolver; use crate::tools::fmt::format_json; mod analyze; mod build; mod import_map; mod mappings; mod specifiers; #[cfg(test)] mod test; pub async fn vendor(ps: ProcState, flags: VendorFlags) -> Result<(), AnyError> { let raw_output_dir = match &flags.output_path { Some(output_path) => output_path.to_owned(), None => PathBuf::from("vendor/"), }; let output_dir = fs_util::resolve_from_cwd(&raw_output_dir)?; validate_output_dir(&output_dir, &flags, &ps)?; let graph = create_graph(&ps, &flags).await?; let vendored_count = build::build( graph, &output_dir, ps.maybe_import_map.as_deref(), &build::RealVendorEnvironment, )?; eprintln!( concat!("Vendored {} {} into {} directory.",), vendored_count, if vendored_count == 1 { "module" } else { "modules" }, raw_output_dir.display(), ); if vendored_count > 0 { let import_map_path = raw_output_dir.join("import_map.json"); if maybe_update_config_file(&output_dir, &ps) { eprintln!( concat!( "\nUpdated your local Deno configuration file with a reference to the ", "new vendored import map at {}. Invoking Deno subcommands will now ", "automatically resolve using the vendored modules. You may override ", "this by providing the `--import-map <other-import-map>` flag or by ", "manually editing your Deno configuration file.", ), import_map_path.display(), ); } else { eprintln!( concat!( "\nTo use vendored modules, specify the `--import-map {}` flag when ", r#"invoking Deno subcommands or add an `"importMap": "<path_to_vendored_import_map>"` "#, "entry to a deno.json file.", ), import_map_path.display(), ); } } Ok(()) } fn validate_output_dir( output_dir: &Path, flags: &VendorFlags, ps: &ProcState, ) -> Result<(), AnyError> { if !flags.force && !is_dir_empty(output_dir)? { bail!(concat!( "Output directory was not empty. Please specify an empty directory or use ", "--force to ignore this error and potentially overwrite its contents.", )); } if let Some(import_map_path) = ps .maybe_import_map .as_ref() .and_then(|m| specifier_to_file_path(m.base_url()).ok()) .and_then(|p| fs_util::canonicalize_path(&p).ok()) { std::fs::create_dir_all(&output_dir)?; let output_dir = fs_util::canonicalize_path(output_dir).with_context(|| { format!("Failed to canonicalize: {}", output_dir.display()) })?; if import_map_path.starts_with(&output_dir) { let cwd = fs_util::canonicalize_path(&std::env::current_dir()?)?; bail!( concat!( "Specifying an import map file ({}) in the deno vendor output ", "directory is not supported. Please specify no import map or one ", "located outside this directory." ), import_map_path .strip_prefix(&cwd) .unwrap_or(&import_map_path) .display() .to_string(), ); } } Ok(()) } fn maybe_update_config_file(output_dir: &Path, ps: &ProcState) -> bool { assert!(output_dir.is_absolute()); let config_file = match &ps.maybe_config_file { Some(f) => f, None => return false, }; let fmt_config = config_file .to_fmt_config() .unwrap_or_default() .unwrap_or_default(); let result = update_config_file( &config_file.specifier, &ModuleSpecifier::from_file_path(output_dir.join("import_map.json")) .unwrap(), &fmt_config.options, ); match result { Ok(()) => true, Err(err) => { warn!("Error updating config file. {:#}", err); false } } } fn update_config_file( config_specifier: &ModuleSpecifier, import_map_specifier: &ModuleSpecifier, fmt_options: &FmtOptionsConfig, ) -> Result<(), AnyError> { if config_specifier.scheme() != "file" { return Ok(()); } let config_path = specifier_to_file_path(config_specifier)?; let config_text = std::fs::read_to_string(&config_path)?; let relative_text = match relative_specifier(config_specifier, import_map_specifier) { Some(text) => text, None => return Ok(()), }; if let Some(new_text) = update_config_text(&config_text, &relative_text, fmt_options) { std::fs::write(config_path, new_text)?; } Ok(()) } fn update_config_text( text: &str, import_map_specifier: &str, fmt_options: &FmtOptionsConfig, ) -> Option<String> { use jsonc_parser::ast::ObjectProp; use jsonc_parser::ast::Value; let ast = jsonc_parser::parse_to_ast(text, &Default::default()).ok()?; let obj = match ast.value { Some(Value::Object(obj)) => obj, _ => return None, }; let import_map_specifier = import_map_specifier.replace('\"', "\\\""); match obj.get("importMap") { Some(ObjectProp { value: Value::StringLit(lit), .. }) => Some(format!( "{}{}{}", &text[..lit.range.start + 1], import_map_specifier, &text[lit.range.end - 1..], )), None => { let insert_position = obj.range.end - 1; let insert_text = format!( r#"{}"importMap": "{}""#, if obj.properties.is_empty() { "" } else { "," }, import_map_specifier ); let new_text = format!( "{}{}{}", &text[..insert_position], insert_text, &text[insert_position..], ); format_json(&new_text, fmt_options) .ok() .map(|formatted_text| formatted_text.unwrap_or(new_text)) } Some(_) => None, } } fn is_dir_empty(dir_path: &Path) -> Result<bool, AnyError> { match std::fs::read_dir(&dir_path) { Ok(mut dir) => Ok(dir.next().is_none()), Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(true), Err(err) => { bail!("Error reading directory {}: {}", dir_path.display(), err) } } } async fn create_graph( ps: &ProcState, flags: &VendorFlags, ) -> Result<deno_graph::ModuleGraph, AnyError> { let entry_points = flags .specifiers .iter() .map(|p| { let url = resolve_url_or_path(p)?; Ok((url, deno_graph::ModuleKind::Esm)) }) .collect::<Result<Vec<_>, AnyError>>()?; let mut cache = crate::cache::FetchCacher::new( ps.dir.gen_cache.clone(), ps.file_fetcher.clone(), Permissions::allow_all(), Permissions::allow_all(), ); let maybe_locker = lockfile::as_maybe_locker(ps.lockfile.clone()); let maybe_imports = if let Some(config_file) = &ps.maybe_config_file { config_file.to_maybe_imports()? } else { None }; let maybe_import_map_resolver = ps.maybe_import_map.clone().map(ImportMapResolver::new); let maybe_jsx_resolver = ps.maybe_config_file.as_ref().and_then(|cf| { cf.to_maybe_jsx_import_source_module() .map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone())) }); let maybe_resolver = if maybe_jsx_resolver.is_some() { maybe_jsx_resolver.as_ref().map(|jr| jr.as_resolver()) } else { maybe_import_map_resolver .as_ref() .map(|im| im.as_resolver()) }; Ok( deno_graph::create_graph( entry_points, false, maybe_imports, &mut cache, maybe_resolver, maybe_locker, None, None, ) .await, ) } #[cfg(test)] mod internal_test { use super::*; use pretty_assertions::assert_eq; #[test] fn update_config_text_no_existing_props_add_prop() { let text = update_config_text( "{\n}", "./vendor/import_map.json", &Default::default(), ) .unwrap(); assert_eq!( text, r#"{ "importMap": "./vendor/import_map.json" } "# ); } #[test] fn update_config_text_existing_props_add_prop() { let text =
.unwrap(); assert_eq!( text, r#"{ "tasks": { "task1": "other" }, "importMap": "./vendor/import_map.json" } "# ); } #[test] fn update_config_text_update_prop() { let text = update_config_text( r#"{ "importMap": "./local.json" } "#, "./vendor/import_map.json", &Default::default(), ) .unwrap(); assert_eq!( text, r#"{ "importMap": "./vendor/import_map.json" } "# ); } }
update_config_text( r#"{ "tasks": { "task1": "other" } } "#, "./vendor/import_map.json", &Default::default(), )
call_expression
[ { "content": "fn success_text(module_count: &str, dir: &str, has_import_map: bool) -> String {\n\n let mut text = format!(\"Vendored {} into {} directory.\", module_count, dir);\n\n if has_import_map {\n\n text.push_str(&\n\n format!(\n\n concat!(\n\n \"\\n\\nTo use vendored modules, s...
Rust
bn-expression/src/basic.rs
Bindernews/minblur
7915e7d8765eb3785da4fabda38e744702ec5985
use std::collections::HashMap; use std::{cmp::Ordering, fmt}; use nom::InputTake; use crate::{enum_to_from_str, error::EvalError, expression::EvalContext, AValue, ExpressionOp}; #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] #[cfg_attr(feature = "serialize", derive(serde::Serialize))] pub enum BasicOp { Add, Sub, Mul, Div, IDiv, Mod, LAnd, LOr, LXor, BAnd, BOr, BXor, Not, Equal, NotEqual, LessThan, LessThanEq, GreaterThan, GreaterThanEq, Shl, Shr, } impl BasicOp { enum_to_from_str!( BasicOp; pub fn as_expr_str(); pub fn from_expr_str(); { BasicOp::Add => "+", BasicOp::Sub => "-", BasicOp::Mul => "*", BasicOp::Div => "/", BasicOp::IDiv => "//", BasicOp::Mod => "%", BasicOp::LAnd => "&&", BasicOp::LOr => "||", BasicOp::LXor => "^^", BasicOp::BAnd => "&", BasicOp::BOr => "|", BasicOp::BXor => "^", BasicOp::Not => "!", BasicOp::Equal => "==", BasicOp::NotEqual => "!=", BasicOp::LessThan => "<", BasicOp::LessThanEq => "<=", BasicOp::GreaterThan => ">", BasicOp::GreaterThanEq => ">=", BasicOp::Shl => "<<", BasicOp::Shr => ">>", } ); } impl fmt::Display for BasicOp { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(self.as_expr_str()) } } impl ExpressionOp for BasicOp { fn eval(&self, left: &AValue, right: &AValue) -> Result<AValue, EvalError> { fn helper_cmp( left: &AValue, right: &AValue, test: impl Fn(Ordering) -> bool, ) -> Result<AValue, EvalError> { Ok(AValue::from(left.op_cmp(right).map(test).unwrap_or(false))) } match self { Self::Add => left.apply_math(right, "+", |lh, rh| lh + rh), Self::Sub => left.apply_math(right, "-", |lh, rh| lh - rh), Self::Mul => left.apply_math(right, "*", |lh, rh| lh * rh), Self::Div => left.apply_math(right, "/", |lh, rh| lh / rh), Self::IDiv => left.apply_int_math(right, "//", |lh, rh| lh / rh), Self::Mod => left.apply_int_math(right, "%", |lh, rh| lh % rh), Self::LAnd => Ok((left.is_truthy() && right.is_truthy()).into()), Self::LOr => Ok((left.is_truthy() || right.is_truthy()).into()), Self::LXor => Ok((left.is_truthy() ^ right.is_truthy()).into()), Self::BAnd => left.apply_int_math(right, "&&", |lh, rh| lh & rh), Self::BOr => left.apply_int_math(right, "||", |lh, rh| lh | rh), Self::BXor => left.apply_int_math(right, "^^", |lh, rh| lh ^ rh), Self::Not => Ok((!left.is_truthy()).into()), Self::Equal => helper_cmp(left, right, |f| f.is_eq()), Self::NotEqual => helper_cmp(left, right, |f| f.is_ne()), Self::LessThan => helper_cmp(left, right, |f| f.is_lt()), Self::LessThanEq => helper_cmp(left, right, |f| f.is_le()), Self::GreaterThan => helper_cmp(left, right, |f| f.is_gt()), Self::GreaterThanEq => helper_cmp(left, right, |f| f.is_ge()), Self::Shl => left.apply_int_math(right, "<<", |lh, rh| lh << rh), Self::Shr => left.apply_int_math(right, ">>", |lh, rh| lh >> rh), } } fn precedence(&self) -> usize { match self { Self::Mul | Self::Div | Self::Mod | Self::IDiv => 1, Self::Add | Self::Sub => 2, Self::BAnd | Self::BOr | Self::BXor => 4, Self::LAnd | Self::LOr | Self::LXor => 5, Self::Shl | Self::Shr => 6, Self::LessThan | Self::LessThanEq | Self::GreaterThan | Self::GreaterThanEq | Self::Equal | Self::NotEqual => 7, Self::Not => 8, } } fn is_unary(&self) -> bool { matches!(self, Self::Not) } fn match_op(input: &str) -> Result<(&str, Self), &'static str> { for op_length in [2, 1] { if input.len() >= op_length { let (remain, used) = input.take_split(op_length); if let Some(op) = Self::from_expr_str(used) { return Ok((remain, op)); } } } Err("failed to parse op") } } pub struct BasicEvaluator { pub variables: HashMap<String, AValue>, pub partial: bool, } impl BasicEvaluator { pub fn new(partial: bool) -> Self { Self { variables: HashMap::new(), partial, } } } impl<Op: ExpressionOp> EvalContext<Op> for BasicEvaluator { fn get_variable(&mut self, name: &str) -> Result<AValue, EvalError> { self.variables .get(name) .map(Clone::clone) .or_else(|| { if self.partial { Some(AValue::name(name)) } else { None } }) .ok_or_else(|| EvalError::unknown_variable(name)) } } pub mod functions { use crate::{AValue, EvalError, Expression, ExpressionOp}; pub fn call_max<Op: ExpressionOp>( name: &str, args: Vec<AValue>, ) -> Result<Expression<Op>, EvalError> { let result = args .iter() .map(|v| v.as_f64()) .reduce(|a, b| a.zip(b).map(|x| f64::max(x.0, x.1))) .flatten() .map(|v| AValue::from(v).into()) .unwrap_or_else(|| Expression::<Op>::call_from_values(name, args)); Ok(result) } pub fn call_min<Op: ExpressionOp>( name: &str, args: Vec<AValue>, ) -> Result<Expression<Op>, EvalError> { let result = args .iter() .map(|v| v.as_f64()) .reduce(|a, b| a.zip(b).map(|x| f64::min(x.0, x.1))) .flatten() .map(|v| AValue::from(v).into()) .unwrap_or_else(|| Expression::<Op>::call_from_values(name, args)); Ok(result) } }
use std::collections::HashMap; use std::{cmp::Ordering, fmt}; use nom::InputTake; use crate::{enum_to_from_str, error::EvalError, expression::EvalContext, AValue, ExpressionOp}; #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] #[cfg_attr(feature = "serialize", derive(serde::Serialize))] pub enum BasicOp { Add, Sub, Mul, Div, IDiv, Mod, LAnd, LOr, LXor, BAnd, BOr, BXor, Not, Equal, NotEqual, LessThan, LessThanEq, GreaterThan, GreaterThanEq, Shl, Shr, } impl BasicOp { enum_to_from_str!( BasicOp; pub fn as_expr_str(); pub fn from_expr_str(); { BasicOp::Add => "+", BasicOp::Sub => "-", BasicOp::Mul => "*", BasicOp::Div => "/", BasicOp::IDiv => "//", BasicOp::Mod => "%", BasicOp::LAnd => "&&", BasicOp::LOr => "||", BasicOp::LXor => "^^", BasicOp::BAnd => "&", BasicOp::BOr => "|", BasicOp::BXor => "^", BasicOp::Not => "!", BasicOp::Equal => "==", BasicOp::NotEqual => "!=", BasicOp::LessThan => "<", BasicOp::LessThanEq => "<=", BasicOp::GreaterThan => ">", BasicOp::GreaterThanEq => ">=", BasicOp::Shl => "<<", BasicOp::Shr => ">>", } ); } impl fmt::Display for BasicOp { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(self.as_expr_str()) } } impl ExpressionOp for BasicOp { fn eval(&self, left: &AValue, right: &AValue) -> Result<AValue, EvalError> { fn helper_cmp( left: &AValue, right: &AValue, test: impl Fn(Ordering) -> bool, ) -> Result<AValue, EvalError> { Ok(AValue::from(left.op_cmp(right).map(test).unwrap_or(false))) } match self { Self::Add => left.apply_math(right, "+", |lh, rh| lh + rh), Self::Sub => left.apply_math(right, "-", |lh, rh| lh - rh), Self::Mul => left.apply_math(right, "*", |lh, rh| lh * rh), Self::Div => left.apply_math(right, "/", |lh, rh| lh / rh), Self::IDiv => left.apply_int_math(right, "//", |lh, rh| lh / rh), Self::Mod => left.apply_int_math(right, "%", |lh, rh| lh % rh), Self::LAnd => Ok((left.is_truthy() && right.is_truthy()).into()), Self::LOr => Ok((left.is_truthy() || right.is_truthy()).into()), Self::LXor => Ok((left.is_truthy() ^ right.is_truthy()).into()), Self::BAnd => left.apply_int_math(right, "&&", |lh, rh| lh & rh),
fn precedence(&self) -> usize { match self { Self::Mul | Self::Div | Self::Mod | Self::IDiv => 1, Self::Add | Self::Sub => 2, Self::BAnd | Self::BOr | Self::BXor => 4, Self::LAnd | Self::LOr | Self::LXor => 5, Self::Shl | Self::Shr => 6, Self::LessThan | Self::LessThanEq | Self::GreaterThan | Self::GreaterThanEq | Self::Equal | Self::NotEqual => 7, Self::Not => 8, } } fn is_unary(&self) -> bool { matches!(self, Self::Not) } fn match_op(input: &str) -> Result<(&str, Self), &'static str> { for op_length in [2, 1] { if input.len() >= op_length { let (remain, used) = input.take_split(op_length); if let Some(op) = Self::from_expr_str(used) { return Ok((remain, op)); } } } Err("failed to parse op") } } pub struct BasicEvaluator { pub variables: HashMap<String, AValue>, pub partial: bool, } impl BasicEvaluator { pub fn new(partial: bool) -> Self { Self { variables: HashMap::new(), partial, } } } impl<Op: ExpressionOp> EvalContext<Op> for BasicEvaluator { fn get_variable(&mut self, name: &str) -> Result<AValue, EvalError> { self.variables .get(name) .map(Clone::clone) .or_else(|| { if self.partial { Some(AValue::name(name)) } else { None } }) .ok_or_else(|| EvalError::unknown_variable(name)) } } pub mod functions { use crate::{AValue, EvalError, Expression, ExpressionOp}; pub fn call_max<Op: ExpressionOp>( name: &str, args: Vec<AValue>, ) -> Result<Expression<Op>, EvalError> { let result = args .iter() .map(|v| v.as_f64()) .reduce(|a, b| a.zip(b).map(|x| f64::max(x.0, x.1))) .flatten() .map(|v| AValue::from(v).into()) .unwrap_or_else(|| Expression::<Op>::call_from_values(name, args)); Ok(result) } pub fn call_min<Op: ExpressionOp>( name: &str, args: Vec<AValue>, ) -> Result<Expression<Op>, EvalError> { let result = args .iter() .map(|v| v.as_f64()) .reduce(|a, b| a.zip(b).map(|x| f64::min(x.0, x.1))) .flatten() .map(|v| AValue::from(v).into()) .unwrap_or_else(|| Expression::<Op>::call_from_values(name, args)); Ok(result) } }
Self::BOr => left.apply_int_math(right, "||", |lh, rh| lh | rh), Self::BXor => left.apply_int_math(right, "^^", |lh, rh| lh ^ rh), Self::Not => Ok((!left.is_truthy()).into()), Self::Equal => helper_cmp(left, right, |f| f.is_eq()), Self::NotEqual => helper_cmp(left, right, |f| f.is_ne()), Self::LessThan => helper_cmp(left, right, |f| f.is_lt()), Self::LessThanEq => helper_cmp(left, right, |f| f.is_le()), Self::GreaterThan => helper_cmp(left, right, |f| f.is_gt()), Self::GreaterThanEq => helper_cmp(left, right, |f| f.is_ge()), Self::Shl => left.apply_int_math(right, "<<", |lh, rh| lh << rh), Self::Shr => left.apply_int_math(right, ">>", |lh, rh| lh >> rh), } }
function_block-function_prefix_line
[ { "content": "/// Returns a nom error if the input string isn't empty\n\n///\n\n/// Param `f` should generate an approriate error given the input\n\npub fn assert_input_consumed<'a, F, O>(mut f: F) -> impl FnMut(Span<'a>) -> MyResult<'a, ()>\n\nwhere\n\n F: Parser<Span<'a>, O, ErrType<'a>>,\n\n{\n\n move ...
Rust
examples/stm32l0x2/main.rs
lulf/rust-sx12xx
b055776e945041d91a6b52430c702aaa228031b7
#![cfg_attr(not(test), no_std)] #![no_main] extern crate nb; extern crate panic_rtt_target; use log::LevelFilter; use panic_rtt_target as _; use rtt_logger::RTTLogger; use rtt_target::rtt_init_print; use core::fmt::Write; use lorawan_crypto::LorawanCrypto as Crypto; use lorawan_device::{ radio, Device as LorawanDevice, Error as LorawanError, Event as LorawanEvent, Response as LorawanResponse, }; use rtic::app; use stm32l0xx_hal::{ exti::Exti, exti::{ExtiLine, GpioLine, TriggerEdge}, gpio::{gpiob::PB2, Input, PullUp}, pac, pac::Interrupt, prelude::*, rcc, rng, rng::Rng, syscfg, timer::Timer, }; use sx12xx::{self, LorawanRadio, Sx12xx}; mod bindings; pub use bindings::initialize_irq as initialize_radio_irq; pub use bindings::RadioIRQ; pub use bindings::TcxoEn; static mut RNG: Option<rng::Rng> = None; fn get_random_u32() -> u32 { unsafe { if let Some(rng) = &mut RNG { rng.enable(); rng.wait(); let val = rng.take_result(); rng.disable(); val } else { panic!("No Rng exists!"); } } } static LOGGER: RTTLogger = RTTLogger::new(LevelFilter::Trace); pub struct TimerContext { pub target: u16, pub count: u16, pub enable: bool, pub armed: bool, } #[app(device = stm32l0xx_hal::pac, peripherals = true)] const APP: () = { struct Resources { int: Exti, radio_irq: RadioIRQ, timer: Timer<pac::TIM2>, button: stm32l0xx_hal::gpio::gpiob::PB2<Input<PullUp>>, #[init([0;512])] buffer: [u8; 512], #[init(false)] ready_to_send: bool, lorawan: Option<LorawanDevice<LorawanRadio, Crypto>>, #[init(TimerContext { target: 0, count: 0, enable: false, armed: false, })] timer_context: TimerContext, } #[init(spawn = [send_ping, lorawan_event], resources = [buffer])] fn init(ctx: init::Context) -> init::LateResources { rtt_init_print!(); unsafe { log::set_logger_racy(&LOGGER).unwrap(); } log::set_max_level(log::LevelFilter::Trace); log::info!("INITIALIZED!?"); let device = ctx.device; let mut rcc = device.RCC.freeze(rcc::Config::hsi16()); let mut syscfg = syscfg::SYSCFG::new(device.SYSCFG, &mut rcc); let gpioa = device.GPIOA.split(&mut rcc); let gpiob = device.GPIOB.split(&mut rcc); let gpioc = device.GPIOC.split(&mut rcc); log::info!("INITIALIZING"); let mut button = gpiob.pb2.into_pull_up_input(); log::info!("LongFi Device Test"); let mut exti = Exti::new(device.EXTI); log::info!("Pin number: {}", button.pin_number()); let line = GpioLine::from_raw_line(button.pin_number()).unwrap(); exti.listen_gpio(&mut syscfg, button.port(), line, TriggerEdge::Falling); let hsi48 = rcc.enable_hsi48(&mut syscfg, device.CRS); unsafe { RNG = Some(Rng::new(device.RNG, &mut rcc, hsi48)) }; let radio_irq = initialize_radio_irq(gpiob.pb4, &mut syscfg, &mut exti); let timer = device.TIM2.timer(1.khz(), &mut rcc); let bindings = bindings::new( device.SPI1, &mut rcc, gpiob.pb3, gpioa.pa6, gpioa.pa7, gpioa.pa15, gpioc.pc0, gpioa.pa1, gpioc.pc2, gpioc.pc1, None, ); let mut sx12xx = Sx12xx::new(sx12xx::Radio::sx1276(), bindings); sx12xx.set_public_network(true); let lorawan = LorawanDevice::new( LorawanRadio::new(sx12xx), [0x00, 0x3C, 0xC5, 0x37, 0x1E, 0xB6, 0x6C, 0x55], [0xEC, 0xD6, 0x03, 0xD0, 0x7E, 0xD5, 0xB3, 0x70], [ 0x6B, 0xC0, 0x5D, 0x69, 0x90, 0x37, 0x13, 0x42, 0x3E, 0xB2, 0xB2, 0x16, 0x63, 0x86, 0xDF, 0x49, ], /* [ 0x49, 0xDF, 0x86, 0x63, 0x16, 0xB2, 0xB2, 0x3E, 0x42, 0x13, 0x37, 0x90, 0x69, 0x5D, 0xC0, 0x6B, ],*/ get_random_u32, ); ctx.spawn .lorawan_event(LorawanEvent::NewSessionRequest) .unwrap(); log::info!("Going to main loop"); init::LateResources { int: exti, button, radio_irq, lorawan: Some(lorawan), timer, } } #[idle(resources = [button], spawn = [send_ping])] fn idle(ctx: idle::Context) -> ! { let mut pressed = true; loop { if ctx.resources.button.is_low().unwrap() { if !pressed { log::info!("BUTTON PRESSSSSS"); pressed = true; ctx.spawn.send_ping().unwrap(); } } else { pressed = false; } } } #[task(capacity = 4, priority = 2, resources = [buffer, lorawan], spawn = [lorawan_response])] fn lorawan_event(ctx: lorawan_event::Context, event: LorawanEvent<'static, LorawanRadio>) { if let Some(lorawan) = ctx.resources.lorawan.take() { match &event { LorawanEvent::NewSessionRequest => { log::info!("New Session Request"); } LorawanEvent::RadioEvent(e) => match e { radio::Event::TxRequest(_, _) => (), radio::Event::RxRequest(_) => (), radio::Event::CancelRx => (), radio::Event::PhyEvent(phy) => { let event = phy as &sx12xx::Event; match event { sx12xx::Event::DIO0(t) => { log::info!("Radio Rx/Tx (DIO0) Interrupt at {} ms", t); } _ => log::info!(""), } } }, LorawanEvent::TimeoutFired => (), LorawanEvent::SendDataRequest(_e) => { log::info!("SendData"); } } let (new_state, response) = lorawan.handle_event(event); ctx.spawn.lorawan_response(response).unwrap(); *ctx.resources.lorawan = Some(new_state); } } #[task(capacity = 4, priority = 2, resources = [timer_context, lorawan], spawn = [lorawan_event])] fn lorawan_response( mut ctx: lorawan_response::Context, response: Result<LorawanResponse, LorawanError<LorawanRadio>>, ) { match response { Ok(response) => match response { LorawanResponse::TimeoutRequest(ms) => { ctx.resources.timer_context.lock(|context| { context.target = ms as u16; context.armed = true; }); log::info!("TimeoutRequest: {:?}", ms); } LorawanResponse::JoinSuccess => { if let Some(lorawan) = ctx.resources.lorawan.take() { log::info!("Join Success: {:?}", lorawan.get_session_keys().unwrap()); *ctx.resources.lorawan = Some(lorawan); } ctx.resources.timer_context.lock(|context| { context.enable = false; }); } LorawanResponse::ReadyToSend => { log::info!("RxWindow expired but no ACK expected. Ready to Send"); ctx.resources.timer_context.lock(|context| { context.enable = false; }); } LorawanResponse::DownlinkReceived(fcnt_down) => { if let Some(mut lorawan) = ctx.resources.lorawan.take() { if let Some(downlink) = lorawan.take_data_downlink() { let fhdr = downlink.fhdr(); let fopts = fhdr.fopts(); use lorawan_encoding::parser::{DataHeader, FRMPayload}; if let Ok(FRMPayload::Data(data)) = downlink.frm_payload() { log::info!( "Downlink received \t\t(FCntDown={}\tFRM: {:?})", fcnt_down, data, ); } else { log::info!("Downlink received \t\t(FcntDown={})", fcnt_down); } let mut mac_commands_len = 0; for mac_command in fopts { if mac_commands_len == 0 { log::info!("\tFOpts: "); } log::info!("{:?},", mac_command); mac_commands_len += 1; } } *ctx.resources.lorawan = Some(lorawan); } } LorawanResponse::NoAck => { log::info!("RxWindow expired, expected ACK to confirmed uplink not received"); ctx.resources.timer_context.lock(|context| { context.enable = false; }); } LorawanResponse::NoJoinAccept => { log::info!("No Join Accept Received"); ctx.spawn .lorawan_event(LorawanEvent::NewSessionRequest) .unwrap(); ctx.resources.timer_context.lock(|context| { context.enable = false; }); } LorawanResponse::SessionExpired => { log::info!("SessionExpired. Created new Session"); ctx.spawn .lorawan_event(LorawanEvent::NewSessionRequest) .unwrap(); ctx.resources.timer_context.lock(|context| { context.enable = false; }); } LorawanResponse::NoUpdate => (), LorawanResponse::UplinkSending(fcnt_up) => { log::info!("Uplink with FCnt {}", fcnt_up); } LorawanResponse::JoinRequestSending => { log::info!("Join Request Sending"); } }, Err(err) => match err { LorawanError::Radio(_) => log::info!("Radio"), LorawanError::Session(e) => log::info!("Session {:?}", e), LorawanError::NoSession(_) => log::info!("NoSession"), }, } } #[task(capacity = 4, priority = 2, resources = [lorawan], spawn = [lorawan_response])] fn send_ping(ctx: send_ping::Context) { if let Some(lorawan) = ctx.resources.lorawan.take() { let ready_to_send = lorawan.ready_to_send_data(); *ctx.resources.lorawan = Some(if ready_to_send { let fcnt_up = if let Some(fcnt) = lorawan.get_fcnt_up() { fcnt } else { 0 }; let data: [u8; 5] = [0xDE, 0xAD, 0xBE, 0xEF, fcnt_up as u8]; let confirmed = if fcnt_up % 4 == 0 { log::info!("Requesting Confirmed Uplink"); true } else { log::info!("Requesting Unconfirmed Uplink"); false }; let (new_state, response) = lorawan.send(&data, 1, confirmed); ctx.spawn.lorawan_response(response).unwrap(); new_state } else { log::info!("Suppressing Send Request"); lorawan }); } } #[task(binds = EXTI4_15, priority = 3, resources = [radio_irq, int, timer_context], spawn = [lorawan_event])] fn EXTI4_15(ctx: EXTI4_15::Context) { Exti::unpend(GpioLine::from_raw_line(ctx.resources.radio_irq.pin_number()).unwrap()); let context = ctx.resources.timer_context; let mut count = 0; if context.enable { count = context.count as u32; context.enable = false; } else { context.target = 0xFFFF as u16; context.count = 0; context.enable = true; } rtic::pend(Interrupt::TIM2); ctx.spawn .lorawan_event(lorawan_device::Event::RadioEvent( lorawan_device::radio::Event::PhyEvent(sx12xx::Event::DIO0(count)), )) .unwrap(); } #[task(binds = EXTI0_1)] fn EXTI0_1(ctx: EXTI0_1::Context) { log::info!("BUTTON PRESSED"); Exti::unpend(GpioLine::from_raw_line(2).unwrap()); } #[task(binds = TIM2, priority = 3, resources = [timer, timer_context], spawn = [lorawan_event])] fn TIM2(ctx: TIM2::Context) { let context = ctx.resources.timer_context; let timer = ctx.resources.timer; let spawn = ctx.spawn; timer.clear_irq(); if !context.enable { context.target = 0; context.count = 0; context.armed = true; timer.unlisten(); } else { if context.count == 0 { timer.reset(); timer.listen(); } context.count += 1; if context.count >= context.target && context.armed { spawn.lorawan_event(LorawanEvent::TimeoutFired).unwrap(); context.armed = false; } } } extern "C" { fn USART4_USART5(); } };
#![cfg_attr(not(test), no_std)] #![no_main] extern crate nb; extern crate panic_rtt_target; use log::LevelFilter; use panic_rtt_target as _; use rtt_logger::RTTLogger; use rtt_target::rtt_init_print; use core::fmt::Write; use lorawan_crypto::LorawanCrypto as Crypto; use lorawan_device::{ radio, Device as LorawanDevice, Error as LorawanError, Event as LorawanEvent, Response as LorawanResponse, }; use rtic::app; use stm32l0xx_hal::{ exti::Exti, exti::{ExtiLine, GpioLine, TriggerEdge}, gpio::{gpiob::PB2, Input, PullUp}, pac, pac::Interrupt, prelude::*, rcc, rng, rng::Rng, syscfg, timer::Timer, }; use sx12xx::{self, LorawanRadio, Sx12xx}; mod bindings; pub use bindings::initialize_irq as initialize_radio_irq; pub use bindings::RadioIRQ; pub use bindings::TcxoEn; static mut RNG: Option<rng::Rng> = None; fn get_random_u32() -> u32 { unsafe { if let Some(rng) = &mut RNG { rng.enable(); rng.wait(); let val = rng.take_result(); rng.disable(); val } else { panic!("No Rng exists!"); } } } static LOGGER: RTTLogger = RTTLogger::new(LevelFilter::Trace); pub struct TimerContext { pub target: u16, pub count: u16, pub enable: bool, pub armed: bool, } #[app(device = stm32l0xx_hal::pac, peripherals = true)] const APP: () = { struct Resources { int: Exti, radio_irq: RadioIRQ, timer: Timer<pac::TIM2>, button: stm32l0xx_hal::gpio::gpiob::PB2<Input<PullUp>>, #[init([0;512])] buffer: [u8; 512], #[init(false)] ready_to_send: bool, lorawan: Option<LorawanDevice<LorawanRadio, Crypto>>, #[init(TimerContext { target: 0, count: 0, enable: false, armed: false, })] timer_context: TimerContext, } #[init(spawn = [send_ping, lorawan_event], resources = [buffer])] fn init(ctx: init::Context) -> init::LateResources { rtt_init_print!(); unsafe { log::set_logger_racy(&LOGGER).unwrap(); } log::set_max_level(log::LevelFilter::Trace); log::info!("INITIALIZED!?"); let device = ctx.device; let mut rcc = device.RCC.freeze(rcc::Config::hsi16()); let mut syscfg = syscfg::SYSCFG::new(device.SYSCFG, &mut rcc); let gpioa = device.GPIOA.split(&mut rcc); let gpiob = device.GPIOB.split(&mut rcc); let gpioc = device.GPIOC.split(&mut rcc); log::info!("INITIALIZING"); let mut button = gpiob.pb2.into_pull_up_input(); log::info!("LongFi Device Test"); let mut exti = Exti::new(device.EXTI); log::info!("Pin number: {}", button.pin_number()); let line = GpioLine::from_raw_line(button.pin_number()).unwrap(); exti.listen_gpio(&mut syscfg, button.port(), line, TriggerEdge::Falling); let hsi48 = rcc.enable_hsi48(&mut syscfg, device.CRS); unsafe { RNG = Some(Rng::new(device.RNG, &mut rcc, hsi48)) }; let radio_irq = initialize_radio_irq(gpiob.pb4, &mut syscfg, &mut exti); let timer = device.TIM2.timer(1.khz(), &mut rcc); let bindings = bindings::new( device.SPI1, &mut rcc, gpiob.pb3, gpioa.pa6, gpioa.pa7, gpioa.pa15, gpioc.pc0, gpioa.pa1, gpioc.pc2, gpioc.pc1, None, ); let mut sx12xx = Sx12xx::new(sx12xx::Radio::sx1276(), bindings); sx12xx.set_public_network(true); let lorawan = LorawanDevice::new( LorawanRadio::new(sx12xx), [0x00, 0x3C, 0xC5, 0x37, 0x1E, 0xB6, 0x6C, 0x55], [0xEC, 0xD6, 0x03, 0xD0, 0x7E, 0xD5, 0xB3, 0x70], [ 0x6B, 0xC0, 0x5D, 0x69, 0x90, 0x37, 0x13, 0x42, 0x3E, 0xB2, 0xB2, 0x16, 0x63, 0x86, 0xDF, 0x49, ], /* [ 0x49, 0xDF, 0x86, 0x63, 0x16, 0xB2, 0xB2, 0x3E, 0x42, 0x13, 0x37, 0x90, 0x69, 0x5D, 0xC0, 0x6B, ],*/ get_random_u32, ); ctx.spawn .lorawan_event(LorawanEvent::NewSessionRequest) .unwrap(); log::info!("Going to main loop"); init::LateResources {
resources.timer_context.lock(|context| { context.enable = false; }); } LorawanResponse::NoJoinAccept => { log::info!("No Join Accept Received"); ctx.spawn .lorawan_event(LorawanEvent::NewSessionRequest) .unwrap(); ctx.resources.timer_context.lock(|context| { context.enable = false; }); } LorawanResponse::SessionExpired => { log::info!("SessionExpired. Created new Session"); ctx.spawn .lorawan_event(LorawanEvent::NewSessionRequest) .unwrap(); ctx.resources.timer_context.lock(|context| { context.enable = false; }); } LorawanResponse::NoUpdate => (), LorawanResponse::UplinkSending(fcnt_up) => { log::info!("Uplink with FCnt {}", fcnt_up); } LorawanResponse::JoinRequestSending => { log::info!("Join Request Sending"); } }, Err(err) => match err { LorawanError::Radio(_) => log::info!("Radio"), LorawanError::Session(e) => log::info!("Session {:?}", e), LorawanError::NoSession(_) => log::info!("NoSession"), }, } } #[task(capacity = 4, priority = 2, resources = [lorawan], spawn = [lorawan_response])] fn send_ping(ctx: send_ping::Context) { if let Some(lorawan) = ctx.resources.lorawan.take() { let ready_to_send = lorawan.ready_to_send_data(); *ctx.resources.lorawan = Some(if ready_to_send { let fcnt_up = if let Some(fcnt) = lorawan.get_fcnt_up() { fcnt } else { 0 }; let data: [u8; 5] = [0xDE, 0xAD, 0xBE, 0xEF, fcnt_up as u8]; let confirmed = if fcnt_up % 4 == 0 { log::info!("Requesting Confirmed Uplink"); true } else { log::info!("Requesting Unconfirmed Uplink"); false }; let (new_state, response) = lorawan.send(&data, 1, confirmed); ctx.spawn.lorawan_response(response).unwrap(); new_state } else { log::info!("Suppressing Send Request"); lorawan }); } } #[task(binds = EXTI4_15, priority = 3, resources = [radio_irq, int, timer_context], spawn = [lorawan_event])] fn EXTI4_15(ctx: EXTI4_15::Context) { Exti::unpend(GpioLine::from_raw_line(ctx.resources.radio_irq.pin_number()).unwrap()); let context = ctx.resources.timer_context; let mut count = 0; if context.enable { count = context.count as u32; context.enable = false; } else { context.target = 0xFFFF as u16; context.count = 0; context.enable = true; } rtic::pend(Interrupt::TIM2); ctx.spawn .lorawan_event(lorawan_device::Event::RadioEvent( lorawan_device::radio::Event::PhyEvent(sx12xx::Event::DIO0(count)), )) .unwrap(); } #[task(binds = EXTI0_1)] fn EXTI0_1(ctx: EXTI0_1::Context) { log::info!("BUTTON PRESSED"); Exti::unpend(GpioLine::from_raw_line(2).unwrap()); } #[task(binds = TIM2, priority = 3, resources = [timer, timer_context], spawn = [lorawan_event])] fn TIM2(ctx: TIM2::Context) { let context = ctx.resources.timer_context; let timer = ctx.resources.timer; let spawn = ctx.spawn; timer.clear_irq(); if !context.enable { context.target = 0; context.count = 0; context.armed = true; timer.unlisten(); } else { if context.count == 0 { timer.reset(); timer.listen(); } context.count += 1; if context.count >= context.target && context.armed { spawn.lorawan_event(LorawanEvent::TimeoutFired).unwrap(); context.armed = false; } } } extern "C" { fn USART4_USART5(); } };
int: exti, button, radio_irq, lorawan: Some(lorawan), timer, } } #[idle(resources = [button], spawn = [send_ping])] fn idle(ctx: idle::Context) -> ! { let mut pressed = true; loop { if ctx.resources.button.is_low().unwrap() { if !pressed { log::info!("BUTTON PRESSSSSS"); pressed = true; ctx.spawn.send_ping().unwrap(); } } else { pressed = false; } } } #[task(capacity = 4, priority = 2, resources = [buffer, lorawan], spawn = [lorawan_response])] fn lorawan_event(ctx: lorawan_event::Context, event: LorawanEvent<'static, LorawanRadio>) { if let Some(lorawan) = ctx.resources.lorawan.take() { match &event { LorawanEvent::NewSessionRequest => { log::info!("New Session Request"); } LorawanEvent::RadioEvent(e) => match e { radio::Event::TxRequest(_, _) => (), radio::Event::RxRequest(_) => (), radio::Event::CancelRx => (), radio::Event::PhyEvent(phy) => { let event = phy as &sx12xx::Event; match event { sx12xx::Event::DIO0(t) => { log::info!("Radio Rx/Tx (DIO0) Interrupt at {} ms", t); } _ => log::info!(""), } } }, LorawanEvent::TimeoutFired => (), LorawanEvent::SendDataRequest(_e) => { log::info!("SendData"); } } let (new_state, response) = lorawan.handle_event(event); ctx.spawn.lorawan_response(response).unwrap(); *ctx.resources.lorawan = Some(new_state); } } #[task(capacity = 4, priority = 2, resources = [timer_context, lorawan], spawn = [lorawan_event])] fn lorawan_response( mut ctx: lorawan_response::Context, response: Result<LorawanResponse, LorawanError<LorawanRadio>>, ) { match response { Ok(response) => match response { LorawanResponse::TimeoutRequest(ms) => { ctx.resources.timer_context.lock(|context| { context.target = ms as u16; context.armed = true; }); log::info!("TimeoutRequest: {:?}", ms); } LorawanResponse::JoinSuccess => { if let Some(lorawan) = ctx.resources.lorawan.take() { log::info!("Join Success: {:?}", lorawan.get_session_keys().unwrap()); *ctx.resources.lorawan = Some(lorawan); } ctx.resources.timer_context.lock(|context| { context.enable = false; }); } LorawanResponse::ReadyToSend => { log::info!("RxWindow expired but no ACK expected. Ready to Send"); ctx.resources.timer_context.lock(|context| { context.enable = false; }); } LorawanResponse::DownlinkReceived(fcnt_down) => { if let Some(mut lorawan) = ctx.resources.lorawan.take() { if let Some(downlink) = lorawan.take_data_downlink() { let fhdr = downlink.fhdr(); let fopts = fhdr.fopts(); use lorawan_encoding::parser::{DataHeader, FRMPayload}; if let Ok(FRMPayload::Data(data)) = downlink.frm_payload() { log::info!( "Downlink received \t\t(FCntDown={}\tFRM: {:?})", fcnt_down, data, ); } else { log::info!("Downlink received \t\t(FcntDown={})", fcnt_down); } let mut mac_commands_len = 0; for mac_command in fopts { if mac_commands_len == 0 { log::info!("\tFOpts: "); } log::info!("{:?},", mac_command); mac_commands_len += 1; } } *ctx.resources.lorawan = Some(lorawan); } } LorawanResponse::NoAck => { log::info!("RxWindow expired, expected ACK to confirmed uplink not received"); ctx.
random
[ { "content": "pub fn new(\n\n spi_peripheral: pac::SPI1,\n\n rcc: &mut Rcc,\n\n spi_sck: gpiob::PB3<Uninitialized>,\n\n spi_miso: gpioa::PA6<Uninitialized>,\n\n spi_mosi: gpioa::PA7<Uninitialized>,\n\n spi_nss_pin: gpioa::PA15<Uninitialized>,\n\n reset: gpioc::PC0<Uninitialized>,\n\n rx:...
Rust
src/parser.rs
jackmott/evolution
a88c4b2501bf9fb9bdb00eb9bbd2e85720d715d4
use std::sync::mpsc::*; #[derive(Debug, PartialEq)] pub enum Token<'a> { OpenParen(usize), CloseParen(usize), Operation(&'a str, usize), Constant(&'a str, usize), } struct StateFunction(fn(&mut Lexer) -> Option<StateFunction>); pub struct Lexer<'a> { input: &'a str, start: usize, pos: usize, width: usize, token_sender: Sender<Token<'a>>, current_line: usize, } impl<'a> Lexer<'a> { pub fn begin_lexing(s: &'a str, sender: Sender<Token<'a>>) { let mut lexer = Lexer::<'a> { input: s, start: 0, pos: 0, width: 0, token_sender: sender, current_line: 0, }; lexer.run(); } fn run(&mut self) { let mut state = Some(StateFunction(Lexer::determine_token)); while let Some(next_state) = state { state = next_state.0(self) } } fn next(&mut self) -> Option<char> { if self.pos >= self.input.len() { self.width = 0; None } else { self.width = 1; let c = self.input[self.pos..].chars().next().unwrap(); if Lexer::is_linebreak(c) { self.current_line += 1; } self.pos += self.width; Some(c) } } fn backup(&mut self) { self.pos -= 1; } fn ignore(&mut self) { self.start = self.pos; } fn emit(&mut self, token: Token<'a>) { println!("token:{:?}", token); self.token_sender.send(token).expect("token send failure"); self.start = self.pos; } fn accept(&mut self, valid: &str) -> bool { if let Some(n) = self.next() { if valid.contains(n) { true } else { self.backup(); false } } else { self.backup(); return false; } } fn accept_run(&mut self, valid: &str) { loop { let n = self.next(); if !(n.is_some() && valid.contains(n.unwrap())) { break; } } self.backup(); } fn lex_operation(l: &mut Lexer) -> Option<StateFunction> { l.accept_run("+-/*abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"); l.emit(Token::Operation(&l.input[l.start..l.pos], l.current_line)); return Some(StateFunction(Lexer::determine_token)); } fn lex_number(l: &mut Lexer) -> Option<StateFunction> { l.accept("-"); let digits = "0123456789"; l.accept_run(digits); if l.accept(".") { l.accept_run(digits); } if &l.input[l.start..l.pos] == "-" { l.emit(Token::Operation(&l.input[l.start..l.pos], l.current_line)); } else { l.emit(Token::Constant(&l.input[l.start..l.pos], l.current_line)); } return Some(StateFunction(Lexer::determine_token)); } fn determine_token(l: &mut Lexer) -> Option<StateFunction> { loop { match l.next() { Some(c) => { if Lexer::is_white_space(c) { l.ignore(); } else if c == '(' { l.emit(Token::OpenParen(l.current_line)); } else if c == ')' { l.emit(Token::CloseParen(l.current_line)); } else if Lexer::is_start_of_number(c) { return Some(StateFunction(Lexer::lex_number)); } else { return Some(StateFunction(Lexer::lex_operation)); } } None => return None, } } } fn is_start_of_number(c: char) -> bool { (c >= '0' && c <= '9') || c == '-' || c == '.' } fn is_white_space(c: char) -> bool { c == ' ' || c == '\n' || c == '\t' || c == '\r' } fn is_linebreak(c: char) -> bool { c == '\n' } }
use std::sync::mpsc::*; #[derive(Debug, PartialEq)] pub enum Token<'a> { OpenParen(usize), CloseParen(usize), Operation(&'a str, usize), Constant(&'a str, usize), } struct StateFunction(fn(&mut Lexer) -> Option<StateFunction>); pub struct Lexer<'a> { input: &'a str, start: usize, pos: usize, width: usize, token_sender: Sender<Token<'a>>, current_line: usize, } impl<'a> Lexer<'a> { pub fn begin_lexing(s: &'a str, sender: Sender<Token<'a>>) { let mut lexer = Lexer::<'a> { input: s, start: 0, pos: 0, width: 0, token_sender: sender, current_line: 0, }; lexer.run(); } fn run(&mut self) { let mut state = Some(StateFunction(Lexer::determine_token)); while let Some(next_state) = state { state = next_state.0(self) } } fn next(&mut self) -> Option<char> { if self.pos >= self.input.len() { self.width = 0; None } else { self.width = 1; let c = self.input[self.pos..].chars().next().unwrap(); if Lexer::is_linebreak(c) { self.current_line += 1; } self.pos += self.width; Some(c) } } fn backup(&mut self) { self.pos -= 1; } fn ignore(&mut self) { self.start = self.pos; } fn emit(&mut self, token: Token<'a>) { println!("token:{:?}", token); self.token_sender.send(token).expect("token send failure"); self.start = self.pos; } fn accept(&mut self, valid: &str) -> bool { if let Some(n) = self.next() { if valid.contains(n) { true } else { self.backup(); false } } else { self.backup(); return false; } } fn accept_run(&mut self, valid: &str) { loop { let n = self.next(); if !(n.is_some() && valid.contains(n.unwrap())) { break; } } self.backup(); } fn lex_operation(l: &mut Lexer) -> Option<StateFunction> { l.accept_run("+-/*abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"); l.emit(Token::Operation(&l.input[l.start..l.pos], l.current_line)); return Some(StateFunction(Lexer::determine_token)); } fn lex_number(l: &mut Lexer) -> Option<StateFunction> { l.accept("-"); let digits = "0123456789"; l.accept_run(digits); if l.accept(".") { l.accept_run(digits); } if &l.input[l.start..l.pos] == "-" { l.emit(Token::Operation(&l.input[l.start..l.pos], l.current_lin
l.emit(Token::OpenParen(l.current_line)); } else if c == ')' { l.emit(Token::CloseParen(l.current_line)); } else if Lexer::is_start_of_number(c) { return Some(StateFunction(Lexer::lex_number)); } else { return Some(StateFunction(Lexer::lex_operation)); } } None => return None, } } } fn is_start_of_number(c: char) -> bool { (c >= '0' && c <= '9') || c == '-' || c == '.' } fn is_white_space(c: char) -> bool { c == ' ' || c == '\n' || c == '\t' || c == '\r' } fn is_linebreak(c: char) -> bool { c == '\n' } }
e)); } else { l.emit(Token::Constant(&l.input[l.start..l.pos], l.current_line)); } return Some(StateFunction(Lexer::determine_token)); } fn determine_token(l: &mut Lexer) -> Option<StateFunction> { loop { match l.next() { Some(c) => { if Lexer::is_white_space(c) { l.ignore(); } else if c == '(' {
random
[ { "content": "pub fn extract_line_number(token: &Token) -> usize {\n\n match token {\n\n Token::OpenParen(ln) | Token::CloseParen(ln) => *ln,\n\n Token::Constant(_, ln) | Token::Operation(_, ln) => *ln,\n\n }\n\n}\n\n\n", "file_path": "src/pic.rs", "rank": 1, "score": 119992.6993...
Rust
pathfinder/src/diff_solver/post_processing_v5/bt/mod.rs
Simula-UiB/CRHS
8f3dd34c8b99680188d9314c6897e0c790f5358f
pub mod bthandler_trait; use std::collections::BTreeMap; use std::collections::hash_map::DefaultHasher; use std::convert::TryFrom; use std::hash::{Hash, Hasher}; use std::io::{Error, ErrorKind}; pub const PROB_FACTOR: usize = 1000; #[derive(Debug, Clone,)] pub struct BaseTable { table: Vec<Vec<usize>>, prob_exponents: BTreeMap<usize, usize>, k: f64, } #[allow(dead_code)] impl BaseTable { pub fn new(table: Vec<Vec<usize>>) -> Result<BaseTable, Error> { Self::try_from(table) } pub fn row(&self, row_nr: usize) -> Option<&Vec<usize>> { self.table.get(row_nr) } pub fn column(&self, col_nr: usize) -> Vec<usize> { (0..self.table.len()) .map(|row| self.get_entry(row as u8, col_nr as u8) .expect("Entry not found: Index out of bounds")) .collect() } pub fn nr_of_rows(&self) -> usize { self.table.len() } pub fn nr_of_columns(&self) -> usize { self.table[0].len() } pub fn get_entry(&self, row: u8, column: u8) -> Option<usize> { self.table.get(row as usize)?.get(column as usize).cloned() } pub fn k(&self) -> f64 { self.k } pub fn prob_exponents(&self) -> &BTreeMap<usize, usize> { &self.prob_exponents } pub fn prob_exponent_for_entry(&self, entry: usize) -> Option<usize> { self.prob_exponents.get(&entry).cloned() } pub fn table_hashed(&self) -> u64 { let mut s = DefaultHasher::new(); self.table.hash(&mut s); s.finish() } fn calculate_k(table: &Vec<Vec<usize>>, probs: &BTreeMap<usize, usize>) -> f64 { let mut counts = BTreeMap::new(); for row in 1..table.len() { for col in 1.. table[0].len() { let entry = table[row][col]; let count = counts.entry(entry).or_insert(0); *count += 1; } } let tot_counts = counts.iter() .filter(|(key, _)| key != &&0) .fold(0, |acc, (_, val)| acc + val) as f64; let k = counts.iter() .filter(|(key, _)| key != &&0) .fold(0_f64, |acc, (key, val)| { acc + (*val as f64/tot_counts)*(*probs.get(key).unwrap() as f64 / PROB_FACTOR as f64) }); k } fn calculate_prob_exponents(table: Vec<Vec<usize>>) -> BTreeMap<usize, usize> { let denom = table[0][0]; let mut probs = BTreeMap::new(); for row in 0..table.len(){ for col in 0..table[0].len() { let entry = table[row][col]; if !probs.contains_key(&entry) { let e: f64 = entry as f64; let raw = -(e / denom as f64).log2(); probs.insert(entry, (raw * PROB_FACTOR as f64) as usize ); } } } probs } } impl TryFrom<Vec<Vec<usize>>> for BaseTable { type Error = std::io::Error; fn try_from(table: Vec<Vec<usize>>) -> Result<Self, Self::Error> { if table.is_empty() { return Err(Error::new(ErrorKind::InvalidInput, "The DDT cannot be empty")); } let nr_of_cols = table[0].len(); if nr_of_cols == 0 { return Err(Error::new(ErrorKind::InvalidInput, "The DDT cannot be empty: We have no columns")); } for row in table.iter() { if row.len() != nr_of_cols { return Err(Error::new(ErrorKind::InvalidInput, "The DDT cannot have varying number of columns")); } } let prob_exponents = Self::calculate_prob_exponents(table.clone()); let k = Self::calculate_k(&table, &prob_exponents); Ok( BaseTable { table, prob_exponents, k, } ) } }
pub mod bthandler_trait; use std::collections::BTreeMap; use std::collections::hash_map::DefaultHasher; use std::convert::TryFrom; use std::hash::{Hash, Hasher}; use std::io::{Error, ErrorKind}; pub const PROB_FACTOR: usize = 1000; #[derive(Debug, Clone,)] pub struct BaseTable { table: Vec<Vec<usize>>, prob_exponents: BTreeMap<usize, usize>, k: f64, } #[allow(dead_code)] impl BaseTable { pub fn new(table: Vec<Vec<usize>>) -> Result<BaseTable, Error> { Self::try_from(table) } pub fn row(&self, row_nr: usize) -> Option<&Vec<usize>> { self.table.get(row_nr) } pub fn column(&self, col_nr: usize) -> Vec<usize> { (0..self.table.len()) .map(|row| self.get_entry(row as u8, col_nr as u8)
let mut s = DefaultHasher::new(); self.table.hash(&mut s); s.finish() } fn calculate_k(table: &Vec<Vec<usize>>, probs: &BTreeMap<usize, usize>) -> f64 { let mut counts = BTreeMap::new(); for row in 1..table.len() { for col in 1.. table[0].len() { let entry = table[row][col]; let count = counts.entry(entry).or_insert(0); *count += 1; } } let tot_counts = counts.iter() .filter(|(key, _)| key != &&0) .fold(0, |acc, (_, val)| acc + val) as f64; let k = counts.iter() .filter(|(key, _)| key != &&0) .fold(0_f64, |acc, (key, val)| { acc + (*val as f64/tot_counts)*(*probs.get(key).unwrap() as f64 / PROB_FACTOR as f64) }); k } fn calculate_prob_exponents(table: Vec<Vec<usize>>) -> BTreeMap<usize, usize> { let denom = table[0][0]; let mut probs = BTreeMap::new(); for row in 0..table.len(){ for col in 0..table[0].len() { let entry = table[row][col]; if !probs.contains_key(&entry) { let e: f64 = entry as f64; let raw = -(e / denom as f64).log2(); probs.insert(entry, (raw * PROB_FACTOR as f64) as usize ); } } } probs } } impl TryFrom<Vec<Vec<usize>>> for BaseTable { type Error = std::io::Error; fn try_from(table: Vec<Vec<usize>>) -> Result<Self, Self::Error> { if table.is_empty() { return Err(Error::new(ErrorKind::InvalidInput, "The DDT cannot be empty")); } let nr_of_cols = table[0].len(); if nr_of_cols == 0 { return Err(Error::new(ErrorKind::InvalidInput, "The DDT cannot be empty: We have no columns")); } for row in table.iter() { if row.len() != nr_of_cols { return Err(Error::new(ErrorKind::InvalidInput, "The DDT cannot have varying number of columns")); } } let prob_exponents = Self::calculate_prob_exponents(table.clone()); let k = Self::calculate_k(&table, &prob_exponents); Ok( BaseTable { table, prob_exponents, k, } ) } }
.expect("Entry not found: Index out of bounds")) .collect() } pub fn nr_of_rows(&self) -> usize { self.table.len() } pub fn nr_of_columns(&self) -> usize { self.table[0].len() } pub fn get_entry(&self, row: u8, column: u8) -> Option<usize> { self.table.get(row as usize)?.get(column as usize).cloned() } pub fn k(&self) -> f64 { self.k } pub fn prob_exponents(&self) -> &BTreeMap<usize, usize> { &self.prob_exponents } pub fn prob_exponent_for_entry(&self, entry: usize) -> Option<usize> { self.prob_exponents.get(&entry).cloned() } pub fn table_hashed(&self) -> u64 {
random
[ { "content": "/// bools to less than 8. Takes 'chunk_size' number of bits from 'bits' and turns them into an u8.\n\n /// Returns a vector with the u8's.\n\npub fn bools_to_lt8(bits: &[bool], chunk_size: usize) -> Vec<u8> {\n\n if chunk_size > 8 {\n\n panic!(\"Chunk-sizes above 8 won't fit in an u8!...
Rust
providers/sgx/sgx-app/src/sgx_app.rs
chatchai-hub/tmkms-light
972a739277002704308bfc4e75a0cfa79f62bbb6
pub(crate) mod keypair_seal; mod state; mod cloud; use ed25519_dalek::Keypair; use rand::rngs::OsRng; use sgx_isa::{Report, Targetinfo}; use std::{io, net::TcpStream, thread, time::Duration}; use subtle::ConstantTimeEq; use tendermint_p2p::secret_connection::{self, PublicKey, SecretConnection}; use tmkms_light::{ connection::{Connection, PlainConnection}, utils::write_u16_payload, }; use tmkms_light_sgx_runner::{ RemoteConnectionConfig, {SgxInitRequest, SgxInitResponse}, }; use tracing::{debug, error, info, warn}; fn get_secret_connection(config: &RemoteConnectionConfig) -> io::Result<Box<dyn Connection>> { let RemoteConnectionConfig { peer_id, host, port, sealed_key, } = config; let socket = TcpStream::connect(format!("{}:{}", host, port))?; if let Ok(identity_key) = keypair_seal::unseal(&sealed_key) { info!("KMS node ID: {}", PublicKey::from(&identity_key)); let connection = SecretConnection::new(socket, identity_key, secret_connection::Version::V0_34) .map_err(|e| { error!("secret connection failed: {}", e); io::Error::from(io::ErrorKind::Other) })?; let actual_peer_id = connection.remote_pubkey().peer_id(); if let Some(expected_peer_id) = peer_id { if expected_peer_id.ct_eq(&actual_peer_id).unwrap_u8() == 0 { error!( "{}:{}: validator peer ID mismatch! (expected {}, got {})", host, port, expected_peer_id, actual_peer_id ); return Err(io::Error::from(io::ErrorKind::Other)); } } info!("connected to validator successfully"); if peer_id.is_none() { warn!( "unverified validator peer ID! ({})", connection.remote_pubkey().peer_id() ); } Ok(Box::new(connection)) } else { error!("unsealing failed"); Err(io::ErrorKind::Other.into()) } } pub fn get_connection(secret_connection: Option<&RemoteConnectionConfig>) -> Box<dyn Connection> { loop { let conn: io::Result<Box<dyn Connection>> = if let Some(config) = secret_connection { get_secret_connection(config) } else { TcpStream::connect("tendermint").map(|socket| { let plain_conn = PlainConnection::new(socket); Box::new(plain_conn) as Box<dyn Connection> }) }; if let Err(e) = conn { error!("tendermint connection error {:?}", e); thread::sleep(Duration::new(1, 0)); } else { return conn.unwrap(); } } } pub fn entry(mut host_response: TcpStream, request: SgxInitRequest) -> io::Result<()> { let mut csprng = OsRng {}; match request { SgxInitRequest::GenWrapKey { targetinfo } => { let targetinfo = targetinfo.unwrap_or_else(|| Targetinfo::from(Report::for_self())); let rsa_kp = cloud::generate_keypair(&mut csprng, targetinfo); if let Ok((wrap_pub_key, wrap_key_sealed, pub_key_report)) = rsa_kp { let response = SgxInitResponse::WrapKey { wrap_key_sealed, wrap_pub_key, pub_key_report, }; match serde_json::to_vec(&response) { Ok(v) => { debug!("writing response"); write_u16_payload(&mut host_response, &v)?; } Err(e) => { error!("keygen error: {}", e); } } } else { error!("sealing failed"); } } SgxInitRequest::KeyGen { cloud_backup } => { let kp = Keypair::generate(&mut csprng); let cloud_backup_key_data = cloud_backup.and_then(|key| cloud::cloud_backup(&mut csprng, key, &kp).ok()); if let Ok(sealed_key_data) = keypair_seal::seal(&mut csprng, &kp) { let response = SgxInitResponse::GenOrRecover { sealed_key_data, cloud_backup_key_data, }; match serde_json::to_vec(&response) { Ok(v) => { debug!("writing response"); write_u16_payload(&mut host_response, &v)?; } Err(e) => { error!("keygen error: {}", e); } } } else { error!("sealing failed"); } } SgxInitRequest::CloudRecover { cloud_backup, key_data, } => { if let Ok(sealed_key_data) = cloud::reseal_recover_cloud(&mut csprng, cloud_backup, key_data) { let response = SgxInitResponse::GenOrRecover { sealed_key_data, cloud_backup_key_data: None, }; match serde_json::to_vec(&response) { Ok(v) => { debug!("writing response"); write_u16_payload(&mut host_response, &v)?; } Err(e) => { error!("recovery error: {}", e); } } } else { error!("recovery failed"); } } SgxInitRequest::Start { sealed_key, config, secret_connection, initial_state, } => { let state_holder = state::StateHolder::new()?; if let Ok(keypair) = keypair_seal::unseal(&sealed_key) { let conn: Box<dyn Connection> = get_connection(secret_connection.as_ref()); let mut session = tmkms_light::session::Session::new( config, conn, keypair, initial_state.into(), state_holder, ); loop { if let Err(e) = session.request_loop() { error!("request error: {}", e); } let conn: Box<dyn Connection> = get_connection(secret_connection.as_ref()); session.reset_connection(conn); } } else { error!("unsealing failed"); return Err(io::ErrorKind::Other.into()); } } } Ok(()) } #[cfg(test)] mod tests { use super::*; use std::net::{TcpListener, TcpStream}; use tmkms_light::utils::read_u16_payload; use tmkms_light_sgx_runner::CloudBackupKey; #[test] fn test_recover_flow() { let listener = TcpListener::bind("127.0.0.1:0").unwrap(); let addr = listener.local_addr().unwrap(); let (sender, receiver) = std::sync::mpsc::channel(); let handler = std::thread::spawn(move || { while let Ok(Some(req)) = receiver.recv() { entry(TcpStream::connect(addr).unwrap(), req).expect("ok entry"); } }); sender .send(Some(SgxInitRequest::GenWrapKey { targetinfo: None })) .expect("send request0"); let (mut stream_signer, _) = listener.accept().unwrap(); let resp1 = read_u16_payload(&mut stream_signer).expect("response0"); let response1: SgxInitResponse = serde_json::from_slice(&resp1).expect("response0"); let (sealed_rsa_key, pubkey) = match response1 { SgxInitResponse::WrapKey { wrap_key_sealed, wrap_pub_key, .. } => (wrap_key_sealed, wrap_pub_key), _ => panic!("wrong response"), }; let mut csprng = OsRng {}; let backup_key = cloud::tests::get_wrapped_key(&mut csprng, pubkey); let cloud_backup = CloudBackupKey { sealed_rsa_key, backup_key, }; let cloud_backup2 = cloud_backup.clone(); sender .send(Some(SgxInitRequest::KeyGen { cloud_backup: Some(cloud_backup), })) .expect("send request1"); let (mut stream_signer, _) = listener.accept().unwrap(); let resp1 = read_u16_payload(&mut stream_signer).expect("response1"); let response1: SgxInitResponse = serde_json::from_slice(&resp1).expect("response1"); let (seal_key_request, cloud_backup_key_data) = response1.get_gen_response().expect("response1"); sender .send(Some(SgxInitRequest::CloudRecover { cloud_backup: cloud_backup2, key_data: cloud_backup_key_data.expect("backup"), })) .expect("send request2"); let (mut stream_signer, _) = listener.accept().unwrap(); let resp2 = read_u16_payload(&mut stream_signer).expect("response2"); let response2: SgxInitResponse = serde_json::from_slice(&resp2).expect("response2"); let (seal_key_request2, _) = response2.get_gen_response().expect("response2"); sender.send(None).expect("send request3"); let _ = handler.join(); assert_eq!( seal_key_request.seal_key_request.keyid, seal_key_request2.seal_key_request.keyid ); } #[test] fn test_unseal() { let mut csprng = OsRng {}; let kp = Keypair::generate(&mut csprng); let sealed_data = keypair_seal::seal(&mut csprng, &kp).unwrap(); let mut mangled_sealed_data = sealed_data.clone(); mangled_sealed_data.nonce[0] ^= 1; assert!(keypair_seal::unseal(&mangled_sealed_data).is_err()); assert_eq!( keypair_seal::unseal(&sealed_data).unwrap().public, kp.public ); } }
pub(crate) mod keypair_seal; mod state; mod cloud; use ed25519_dalek::Keypair; use rand::rngs::OsRng; use sgx_isa::{Report, Targetinfo}; use std::{io, net::TcpStream, thread, time::Duration}; use subtle::ConstantTimeEq; use tendermint_p2p::secret_connection::{self, PublicKey, SecretConnection}; use tmkms_light::{ connection::{Connection, PlainConnection}, utils::write_u16_payload, }; use tmkms_light_sgx_runner::{ RemoteConnectionConfig, {SgxInitRequest, SgxInitResponse}, }; use tracing::{debug, error, info, warn}; fn get_secret_connection(config: &RemoteConnectionConfig) -> io::Result<Box<dyn Connection>> { let RemoteConnectionConfig { peer_id, host, port, sealed_key, } = config; let socket = TcpStream::connect(format!("{}:{}", host, port))?; if let Ok(identity_key) = keypair_seal::unseal(&sealed_key) { info!("KMS node ID: {}", PublicKey::from(&identity_key)); let connection = SecretConnection::new(socket, identity_key, secret_connection::Version::V0_34) .map_err(|e| { error!("secret connection failed: {}", e); io::Error::from(io::ErrorKind::Other) })?; let actual_peer_id = connection.remote_pubkey().peer_id(); if let Some(expected_peer_id) = peer_id { if expected_peer_id.ct_eq(&actual_peer_id).unwrap_u8() == 0 { error!( "{}:{}: validator peer ID mismatch! (expected {}, got {})", host, port, expected_peer_id, actual_peer_id ); return Err(io::Error::from(io::ErrorKind::Other)); } } info!("connected to validator successfully"); if peer_id.is_none() { warn!( "unverified validator peer ID! ({})", connection.remote_pubkey().peer_id() ); } Ok(Box::new(connection)) } else { error!("unsealing failed"); Err(io::ErrorKind::Other.into()) } } pub fn get_connection(secret_connection: Option<&RemoteConnectionConfig>) -> Box<dyn Connection> { loop { let conn: io::Result<Box<dyn Connection>> = if let Some(config) = secret_connection { get_secret_connection(config) } else { TcpStream::connect("tendermint").map(|socket| { let plain_conn = PlainConnection::new(socket); Box::new(plain_conn) as Box<dyn Connection> }) }; if let Err(e) = conn { error!("tendermint connection error {:?}", e); thread::sleep(Duration::new(1, 0)); } else { return conn.unwrap(); } } } pub fn entry(mut host_response: TcpStream, request: SgxInitRequest) -> io::Result<()> { let mut csprng = OsRng {}; match request { SgxInitRequest::GenWrapKey { targetinfo } => { let targetinfo = targetinfo.unwrap_or_else(|| Targetinfo::from(Report::for_self())); let rsa_kp = cloud::generate_keypair(&mut csprng, targetinfo); if let Ok((wrap_pub_key, wrap_key_sealed, pub_key_report)) = rsa_kp { let response = SgxInitResponse::WrapKey { wrap_key_sealed, wrap_pub_key, pub_key_report, };
} else { error!("sealing failed"); } } SgxInitRequest::KeyGen { cloud_backup } => { let kp = Keypair::generate(&mut csprng); let cloud_backup_key_data = cloud_backup.and_then(|key| cloud::cloud_backup(&mut csprng, key, &kp).ok()); if let Ok(sealed_key_data) = keypair_seal::seal(&mut csprng, &kp) { let response = SgxInitResponse::GenOrRecover { sealed_key_data, cloud_backup_key_data, }; match serde_json::to_vec(&response) { Ok(v) => { debug!("writing response"); write_u16_payload(&mut host_response, &v)?; } Err(e) => { error!("keygen error: {}", e); } } } else { error!("sealing failed"); } } SgxInitRequest::CloudRecover { cloud_backup, key_data, } => { if let Ok(sealed_key_data) = cloud::reseal_recover_cloud(&mut csprng, cloud_backup, key_data) { let response = SgxInitResponse::GenOrRecover { sealed_key_data, cloud_backup_key_data: None, }; match serde_json::to_vec(&response) { Ok(v) => { debug!("writing response"); write_u16_payload(&mut host_response, &v)?; } Err(e) => { error!("recovery error: {}", e); } } } else { error!("recovery failed"); } } SgxInitRequest::Start { sealed_key, config, secret_connection, initial_state, } => { let state_holder = state::StateHolder::new()?; if let Ok(keypair) = keypair_seal::unseal(&sealed_key) { let conn: Box<dyn Connection> = get_connection(secret_connection.as_ref()); let mut session = tmkms_light::session::Session::new( config, conn, keypair, initial_state.into(), state_holder, ); loop { if let Err(e) = session.request_loop() { error!("request error: {}", e); } let conn: Box<dyn Connection> = get_connection(secret_connection.as_ref()); session.reset_connection(conn); } } else { error!("unsealing failed"); return Err(io::ErrorKind::Other.into()); } } } Ok(()) } #[cfg(test)] mod tests { use super::*; use std::net::{TcpListener, TcpStream}; use tmkms_light::utils::read_u16_payload; use tmkms_light_sgx_runner::CloudBackupKey; #[test] fn test_recover_flow() { let listener = TcpListener::bind("127.0.0.1:0").unwrap(); let addr = listener.local_addr().unwrap(); let (sender, receiver) = std::sync::mpsc::channel(); let handler = std::thread::spawn(move || { while let Ok(Some(req)) = receiver.recv() { entry(TcpStream::connect(addr).unwrap(), req).expect("ok entry"); } }); sender .send(Some(SgxInitRequest::GenWrapKey { targetinfo: None })) .expect("send request0"); let (mut stream_signer, _) = listener.accept().unwrap(); let resp1 = read_u16_payload(&mut stream_signer).expect("response0"); let response1: SgxInitResponse = serde_json::from_slice(&resp1).expect("response0"); let (sealed_rsa_key, pubkey) = match response1 { SgxInitResponse::WrapKey { wrap_key_sealed, wrap_pub_key, .. } => (wrap_key_sealed, wrap_pub_key), _ => panic!("wrong response"), }; let mut csprng = OsRng {}; let backup_key = cloud::tests::get_wrapped_key(&mut csprng, pubkey); let cloud_backup = CloudBackupKey { sealed_rsa_key, backup_key, }; let cloud_backup2 = cloud_backup.clone(); sender .send(Some(SgxInitRequest::KeyGen { cloud_backup: Some(cloud_backup), })) .expect("send request1"); let (mut stream_signer, _) = listener.accept().unwrap(); let resp1 = read_u16_payload(&mut stream_signer).expect("response1"); let response1: SgxInitResponse = serde_json::from_slice(&resp1).expect("response1"); let (seal_key_request, cloud_backup_key_data) = response1.get_gen_response().expect("response1"); sender .send(Some(SgxInitRequest::CloudRecover { cloud_backup: cloud_backup2, key_data: cloud_backup_key_data.expect("backup"), })) .expect("send request2"); let (mut stream_signer, _) = listener.accept().unwrap(); let resp2 = read_u16_payload(&mut stream_signer).expect("response2"); let response2: SgxInitResponse = serde_json::from_slice(&resp2).expect("response2"); let (seal_key_request2, _) = response2.get_gen_response().expect("response2"); sender.send(None).expect("send request3"); let _ = handler.join(); assert_eq!( seal_key_request.seal_key_request.keyid, seal_key_request2.seal_key_request.keyid ); } #[test] fn test_unseal() { let mut csprng = OsRng {}; let kp = Keypair::generate(&mut csprng); let sealed_data = keypair_seal::seal(&mut csprng, &kp).unwrap(); let mut mangled_sealed_data = sealed_data.clone(); mangled_sealed_data.nonce[0] ^= 1; assert!(keypair_seal::unseal(&mangled_sealed_data).is_err()); assert_eq!( keypair_seal::unseal(&sealed_data).unwrap().public, kp.public ); } }
match serde_json::to_vec(&response) { Ok(v) => { debug!("writing response"); write_u16_payload(&mut host_response, &v)?; } Err(e) => { error!("keygen error: {}", e); } }
if_condition
[]
Rust
src/app/components/now_playing/now_playing_model.rs
bertob/spot
ef3309cecf3050ea46a2b714aa36b7ce0e8de549
use gettextrs::gettext; use gio::prelude::*; use gio::SimpleActionGroup; use std::ops::Deref; use std::rc::Rc; use crate::app::components::SimpleHeaderBarModel; use crate::app::components::{labels, PlaylistModel}; use crate::app::models::SongDescription; use crate::app::models::SongListModel; use crate::app::state::SelectionContext; use crate::app::state::{PlaybackAction, PlaybackState, SelectionAction, SelectionState}; use crate::app::{ActionDispatcher, AppAction, AppEvent, AppModel}; pub struct NowPlayingModel { app_model: Rc<AppModel>, dispatcher: Box<dyn ActionDispatcher>, } impl NowPlayingModel { pub fn new(app_model: Rc<AppModel>, dispatcher: Box<dyn ActionDispatcher>) -> Self { Self { app_model, dispatcher, } } fn queue(&self) -> impl Deref<Target = PlaybackState> + '_ { self.app_model.map_state(|s| &s.playback) } pub fn load_more(&self) -> Option<()> { let queue = self.queue(); let loader = self.app_model.get_batch_loader(); let query = queue.next_query()?; debug!("next_query = {:?}", &query); self.dispatcher.dispatch_async(Box::pin(async move { let source = query.source.clone(); let action = loader .query(query, |song_batch| { PlaybackAction::LoadPagedSongs(source, song_batch).into() }) .await; Some(action) })); Some(()) } } impl PlaylistModel for NowPlayingModel { fn song_list_model(&self) -> SongListModel { self.queue().songs().clone() } fn current_song_id(&self) -> Option<String> { self.queue().current_song_id() } fn play_song_at(&self, _pos: usize, id: &str) { self.dispatcher .dispatch(PlaybackAction::Load(id.to_string()).into()); } fn autoscroll_to_playing(&self) -> bool { false } fn actions_for(&self, id: &str) -> Option<gio::ActionGroup> { let queue = self.queue(); let song = queue.songs().get(id)?; let song = song.description(); let group = SimpleActionGroup::new(); for view_artist in song.make_artist_actions(self.dispatcher.box_clone(), None) { group.add_action(&view_artist); } group.add_action(&song.make_album_action(self.dispatcher.box_clone(), None)); group.add_action(&song.make_link_action(None)); group.add_action(&song.make_dequeue_action(self.dispatcher.box_clone(), None)); Some(group.upcast()) } fn menu_for(&self, id: &str) -> Option<gio::MenuModel> { let queue = self.queue(); let song = queue.songs().get(id)?; let song = song.description(); let menu = gio::Menu::new(); menu.append(Some(&*labels::VIEW_ALBUM), Some("song.view_album")); for artist in song.artists.iter() { menu.append( Some(&labels::more_from_label(&artist.name)), Some(&format!("song.view_artist_{}", artist.id)), ); } menu.append(Some(&*labels::COPY_LINK), Some("song.copy_link")); menu.append(Some(&*labels::REMOVE_FROM_QUEUE), Some("song.dequeue")); Some(menu.upcast()) } fn select_song(&self, id: &str) { let queue = self.queue(); if let Some(song) = queue.songs().get(id) { let song = song.description().clone(); self.dispatcher .dispatch(SelectionAction::Select(vec![song]).into()); } } fn deselect_song(&self, id: &str) { self.dispatcher .dispatch(SelectionAction::Deselect(vec![id.to_string()]).into()); } fn enable_selection(&self) -> bool { self.dispatcher .dispatch(AppAction::EnableSelection(SelectionContext::Queue)); true } fn selection(&self) -> Option<Box<dyn Deref<Target = SelectionState> + '_>> { let selection = self.app_model.map_state(|s| &s.selection); Some(Box::new(selection)) } } impl SimpleHeaderBarModel for NowPlayingModel { fn title(&self) -> Option<String> { Some(gettext("Now playing")) } fn title_updated(&self, _: &AppEvent) -> bool { false } fn selection_context(&self) -> Option<&SelectionContext> { Some(&SelectionContext::Queue) } fn select_all(&self) { let songs: Vec<SongDescription> = self.queue().songs().collect(); self.dispatcher .dispatch(SelectionAction::Select(songs).into()); } }
use gettextrs::gettext; use gio::prelude::*; use gio::SimpleActionGroup; use std::ops::Deref; use std::rc::Rc; use crate::app::components::SimpleHeaderBarModel; use crate::app::components::{labels, PlaylistModel}; use crate::app::models::SongDescription; use crate::app::models::SongListModel; use crate::app::state::SelectionContext; use crate::app::state::{PlaybackAction, PlaybackState, SelectionAction, SelectionState}; use crate::app::{ActionDispatcher, AppAction, AppEvent, AppModel}; pub struct NowPlayingModel { app_model: Rc<AppModel>, dispatcher: Box<dyn ActionDispatcher>, } impl NowPlayingModel { pub fn new(app_model: Rc<AppModel>, dispatcher: Box<dyn ActionDispatcher>) -> Self { Self { app_model, dispatcher, } } fn queue(&self) -> impl Deref<Target = PlaybackState> + '_ { self.app_model.map_state(|s| &s.playback) } pub fn load_more(&self) -> Option<()> { let queue = self.queue(); let loader = self.app_model.get_batch_loader(); let query = queue.next_query()?; debug!("next_query = {:?}", &query); self.dispatcher.dispatch_async(Box::pin(async move { let source = query.source.clone(); let action = loader .query(query, |song_batch| { PlaybackAction::LoadPagedSongs(source, song_batch).into() }) .await; Some(action) })); Some(()) } } impl PlaylistModel for NowPlayingModel { fn song_list_model(&self) -> SongListModel { self.queue().songs().clone() } fn current_song_id(&self) -> Option<String> { self.queue().current_song_id() } fn play_song_at(&self, _pos: usize, id: &str) { self.dispatcher .dispatch(PlaybackAction::Load(id.to_string()).into()); } fn autoscroll_to_playing(&self) -> bool { false } fn actions_for(&self, id: &str) -> Option<gio::ActionGroup> { let queue = self.queue(); let song = queue.songs().get(id)?; let song = song.description(); let group = SimpleActionGroup::new(); for view_artist in song.make_artist_actions(self.dispatcher.box_clone(), None) { group.add_action(&view_artist); } group.add_action(&song.make_album_action(self.dispatcher.box_clone(), None)); group.add_action(&song.make_link_action(None)); group.add_action(&song.make_dequeue_action(self.dispatcher.box_clone(), None)); Some(group.upcast()) } fn menu_for(&self, id: &str) -> Option<gio::MenuModel> { let queue = self.queue(); let song = queue.songs().get(id)?; let song = song.description(); let menu = gio::Menu::new();
fn select_song(&self, id: &str) { let queue = self.queue(); if let Some(song) = queue.songs().get(id) { let song = song.description().clone(); self.dispatcher .dispatch(SelectionAction::Select(vec![song]).into()); } } fn deselect_song(&self, id: &str) { self.dispatcher .dispatch(SelectionAction::Deselect(vec![id.to_string()]).into()); } fn enable_selection(&self) -> bool { self.dispatcher .dispatch(AppAction::EnableSelection(SelectionContext::Queue)); true } fn selection(&self) -> Option<Box<dyn Deref<Target = SelectionState> + '_>> { let selection = self.app_model.map_state(|s| &s.selection); Some(Box::new(selection)) } } impl SimpleHeaderBarModel for NowPlayingModel { fn title(&self) -> Option<String> { Some(gettext("Now playing")) } fn title_updated(&self, _: &AppEvent) -> bool { false } fn selection_context(&self) -> Option<&SelectionContext> { Some(&SelectionContext::Queue) } fn select_all(&self) { let songs: Vec<SongDescription> = self.queue().songs().collect(); self.dispatcher .dispatch(SelectionAction::Select(songs).into()); } }
menu.append(Some(&*labels::VIEW_ALBUM), Some("song.view_album")); for artist in song.artists.iter() { menu.append( Some(&labels::more_from_label(&artist.name)), Some(&format!("song.view_artist_{}", artist.id)), ); } menu.append(Some(&*labels::COPY_LINK), Some("song.copy_link")); menu.append(Some(&*labels::REMOVE_FROM_QUEUE), Some("song.dequeue")); Some(menu.upcast()) }
function_block-function_prefix_line
[ { "content": "pub fn n_songs_selected_label(n: usize) -> String {\n\n // this is just to fool xgettext, it doesn't like macros (or rust for that matter) :(\n\n if cfg!(debug_assertions) {\n\n // translators: This shows up when in selection mode. This text should be as short as possible.\n\n ...
Rust
src/interchange/src/json.rs
josharenberg/materialize
afbd0685b98a06ba1603274d4550d0606b955284
use std::collections::HashSet; use repr::adt::numeric::{NUMERIC_AGG_MAX_PRECISION, NUMERIC_DATUM_MAX_PRECISION}; use repr::{ColumnName, ColumnType, ScalarType}; use serde_json::json; fn build_row_schema_field<F: FnMut() -> String>( namer: &mut F, names_seen: &mut HashSet<String>, typ: &ColumnType, ) -> serde_json::value::Value { let mut field_type = match &typ.scalar_type { ScalarType::Bool => json!("boolean"), ScalarType::Int16 | ScalarType::Int32 | ScalarType::Oid => json!("int"), ScalarType::Int64 => json!("long"), ScalarType::Float32 => json!("float"), ScalarType::Float64 => json!("double"), ScalarType::Date => json!({ "type": "int", "logicalType": "date", }), ScalarType::Time => json!({ "type": "long", "logicalType": "time-micros", }), ScalarType::Timestamp | ScalarType::TimestampTz => json!({ "type": "long", "logicalType": "timestamp-micros" }), ScalarType::Interval => json!({ "type": "fixed", "size": 12, "logicalType": "duration" }), ScalarType::Bytes => json!("bytes"), ScalarType::String => json!("string"), ScalarType::Jsonb => json!({ "type": "string", "connect.name": "io.debezium.data.Json", }), ScalarType::Uuid => json!({ "type": "string", "logicalType": "uuid", }), ScalarType::Array(element_type) | ScalarType::List { element_type, .. } => { let inner = build_row_schema_field( namer, names_seen, &ColumnType { nullable: true, scalar_type: (**element_type).clone(), }, ); json!({ "type": "array", "items": inner }) } ScalarType::Map { value_type, .. } => { let inner = build_row_schema_field( namer, names_seen, &ColumnType { nullable: true, scalar_type: (**value_type).clone(), }, ); json!({ "type": "map", "values": inner }) } ScalarType::Record { fields, custom_name, .. } => { let (name, name_seen) = match custom_name { Some(name) => (name.clone(), !names_seen.insert(name.clone())), None => (namer(), false), }; if name_seen { json!(name) } else { let fields = fields.to_vec(); let json_fields = build_row_schema_fields(&fields, names_seen, namer); json!({ "type": "record", "name": name, "fields": json_fields }) } } ScalarType::Numeric { scale } => { let (p, s) = match scale { Some(scale) => (NUMERIC_DATUM_MAX_PRECISION, usize::from(*scale)), None => (NUMERIC_AGG_MAX_PRECISION, NUMERIC_DATUM_MAX_PRECISION), }; json!({ "type": "bytes", "logicalType": "decimal", "precision": p, "scale": s, }) } }; if typ.nullable { field_type = json!(["null", field_type]); } field_type } pub(super) fn build_row_schema_fields<F: FnMut() -> String>( columns: &[(ColumnName, ColumnType)], names_seen: &mut HashSet<String>, namer: &mut F, ) -> Vec<serde_json::value::Value> { let mut fields = Vec::new(); for (name, typ) in columns.iter() { let field_type = build_row_schema_field(namer, names_seen, typ); fields.push(json!({ "name": name, "type": field_type, })); } fields } pub fn build_row_schema_json( columns: &[(ColumnName, ColumnType)], name: &str, ) -> serde_json::value::Value { let mut name_idx = 0; let fields = build_row_schema_fields(columns, &mut Default::default(), &mut move || { let ret = format!("com.materialize.sink.record{}", name_idx); name_idx += 1; ret }); json!({ "type": "record", "fields": fields, "name": name }) }
use std::collections::HashSet; use repr::adt::numeric::{NUMERIC_AGG_MAX_PRECISION, NUMERIC_DATUM_MAX_PRECISION}; use repr::{ColumnName, ColumnType, ScalarType}; use serde_json::json; fn build_row_schema_field<F: FnMut() -> String>( namer: &mut F, names_seen: &mut HashSet<String>, typ: &ColumnType, ) -> serde_json::value::Value { let mut field_type = match &typ.scalar_type { ScalarType::Bool => json!("boolean"), ScalarType::Int16 | ScalarType::Int32 | ScalarType::Oid => json!("int"), ScalarType::Int64 => json!("long"), ScalarType::Float32 => json!("float"), ScalarType::Float64 => json!("double"), ScalarType::Date => json!({ "type": "int", "logicalType": "date", }), ScalarType::Time => json!({ "type": "long", "logicalType": "time-micros", }), ScalarType::Timestamp | ScalarType::TimestampTz => json!({ "type": "long", "logicalType": "timestamp-micros" }), ScalarType::Interval => json!({ "type": "fixed", "size": 12, "logicalType": "duration" }), ScalarType::Bytes => json!("bytes"), ScalarType::String => json!("string"), ScalarType::Jsonb => json!({ "type": "string", "connect.name": "io.debezium.data.Json", }), ScalarType::Uuid => json!({ "type": "string", "logicalType": "uuid", }), ScalarType::Array(element_type) | ScalarType::List { element_type, .. } => { let inner = build_row_schema_field( namer, names_seen, &ColumnType { nullable: true, scalar_type: (**element_type).clone(), }, ); json!({ "type": "array", "items": inner }) } ScalarType::Map { value_type, .. } => { let inner = build_row_schema_field( namer, names_seen, &ColumnType { nullable: true, scalar_type: (**value_type).clone(), }, ); json!({ "type": "map", "values": inner }) } ScalarType::Record { fields, custom_name, .. } => { let (name, name_seen) = match custom_name { Some(name) => (name.clone(), !names_seen.insert(name.clone())), None => (namer(), false), }; if name_seen { json!(name) } else { let fields = fields.to_vec(); let json_fields = build_row_schema_fields(&fields, names_seen, namer); json!({ "type": "record", "name": name, "fields": json_fields }) } } ScalarType::Numeric { scale } => { let (p, s) = match scale { Some(scale) => (NUMERIC_DATUM_MAX_PRECISION, usize::from(*scale)), None => (NUMERIC_AGG_MAX_PRECISION, NUMERIC_DATUM_MAX_PRECISION), }; json!({ "type": "bytes", "logicalType": "decimal", "precision": p, "scale": s, }) } }; if typ.nullable { field_type = json!(["null", field_type]); } field_type } pub(super) fn build_row_schema_fields<F: FnMut() -> String>( columns: &[(ColumnName, ColumnType)], names_seen: &mut HashSet<String>, namer: &mut F, ) -> Vec<serde_json::value::Value> { let mut fields = Vec::new(); for (name, typ) in columns.iter() { let field_type = build_row_schema_field(namer, names_seen, typ); fields.push(json!({ "name": name, "type": field_type, })); } fields } pub fn build_row_schema_json( columns: &[(ColumnName, ColumnType)], name: &str, ) -> serde_json::value::Value { let mut name_idx = 0; let fields =
; json!({ "type": "record", "fields": fields, "name": name }) }
build_row_schema_fields(columns, &mut Default::default(), &mut move || { let ret = format!("com.materialize.sink.record{}", name_idx); name_idx += 1; ret })
call_expression
[ { "content": "pub fn format_string<F>(buf: &mut F, s: &str) -> Nestable\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n buf.write_str(s);\n\n Nestable::MayNeedEscaping\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 1, "score": 458584.64644809946 }, { "content": "pub fn format_u...
Rust
src/lib.rs
alexsnaps/cachers
89853ee9191570f7c15b85ef585a91b6412140dc
#![cfg_attr(feature = "unstable", feature(test))] pub mod asynchronous; mod eviction; mod segment; mod segment2; use std::ops::Fn; use std::sync::{Arc, RwLock}; use crate::segment::Segment; pub struct CacheThrough<K, V> { data: RwLock<Segment<K, V>>, } impl<K, V> CacheThrough<K, V> where K: std::cmp::Eq + std::hash::Hash + Copy, { pub fn new(capacity: usize) -> CacheThrough<K, V> { CacheThrough { data: RwLock::new(Segment::new(capacity)), } } pub fn get<F>(&self, key: K, populating_fn: F) -> Option<Arc<V>> where F: Fn(&K) -> Option<V>, { if let Some(value) = self.data.read().unwrap().get(&key) { return Some(value); } let option = self.data.write(); if option.is_ok() { let mut guard = option.unwrap(); return guard.get_or_populate(key, populating_fn); } None } pub fn update<F>(&self, key: K, updating_fn: F) -> Option<Arc<V>> where F: Fn(&K, Option<Arc<V>>) -> Option<V>, { self.data.write().unwrap().update(key, updating_fn) } pub fn remove(&self, key: K) { self.data.write().unwrap().update(key, |_, _| None); } #[cfg(test)] fn len(&self) -> usize { self.data.read().unwrap().len() } } #[cfg(test)] mod tests { use super::CacheThrough; use std::sync::Arc; fn test_cache() -> CacheThrough<i32, String> { CacheThrough::new(3) } #[test] fn hit_populates() { let cache: CacheThrough<i32, String> = test_cache(); let our_key = 42; { let value = cache.get(our_key, populate); assert_eq!(*value.unwrap(), "42"); assert_eq!(cache.len(), 1); } { let value = cache.get(our_key, do_not_invoke); assert_eq!(*value.unwrap(), "42"); assert_eq!(cache.len(), 1); } } #[test] fn miss_populates_not() { let cache: CacheThrough<i32, String> = test_cache(); let our_key = 42; { let value = cache.get(our_key, miss); assert_eq!(value, None); assert_eq!(cache.len(), 0); } { let value = cache.get(our_key, populate); assert_eq!(*value.unwrap(), "42"); assert_eq!(cache.len(), 1); cache.get(2, populate); cache.get(3, populate); cache.get(4, populate); } } #[test] fn update_populates() { let cache: CacheThrough<i32, String> = test_cache(); let our_key = 42; { let value = cache.update(our_key, upsert); assert_eq!(*value.unwrap(), "42"); assert_eq!(cache.len(), 1); } { let value = cache.get(our_key, do_not_invoke); assert_eq!(*value.unwrap(), "42"); assert_eq!(cache.len(), 1); } } #[test] fn update_updates() { let cache: CacheThrough<i32, String> = test_cache(); let our_key = 42; { let value = cache.get(our_key, populate); assert_eq!(*value.unwrap(), "42"); assert_eq!(cache.len(), 1); } { let value = cache.update(our_key, update); assert_eq!(*value.unwrap(), "42 updated!"); assert_eq!(cache.len(), 1); } { let value = cache.get(our_key, do_not_invoke); assert_eq!(*value.unwrap(), "42 updated!"); assert_eq!(cache.len(), 1); } } #[test] fn update_removes() { let cache: CacheThrough<i32, String> = test_cache(); let our_key = 42; { let value = cache.get(our_key, populate); assert_eq!(*value.unwrap(), "42"); assert_eq!(cache.len(), 1); } { let value = cache.update(our_key, updel); assert_eq!(value, None); assert_eq!(cache.len(), 0); } { let value = cache.get(our_key, miss); assert_eq!(value, None); assert_eq!(cache.len(), 0); } } #[test] fn remove_removes() { let cache: CacheThrough<i32, String> = test_cache(); let our_key = 42; { let value = cache.get(our_key, populate); assert_eq!(*value.unwrap(), "42"); assert_eq!(cache.len(), 1); } { cache.remove(our_key); assert_eq!(cache.len(), 0); } } #[test] fn evicts() { let cache: CacheThrough<i32, String> = test_cache(); { assert_eq!(*cache.get(1, populate).unwrap(), "1"); assert_eq!(cache.len(), 1); assert_eq!(*cache.get(2, populate).unwrap(), "2"); assert_eq!(cache.len(), 2); assert_eq!(*cache.get(3, populate).unwrap(), "3"); assert_eq!(cache.len(), 3); } { assert_eq!(*cache.get(4, populate).unwrap(), "4"); assert_eq!(cache.len(), 3); assert_eq!(*cache.get(2, do_not_invoke).unwrap(), "2"); assert_eq!(cache.len(), 3); assert_eq!(*cache.get(3, do_not_invoke).unwrap(), "3"); assert_eq!(cache.len(), 3); } { assert_eq!(*cache.get(5, populate).unwrap(), "5"); assert_eq!(cache.len(), 3); assert_eq!(*cache.get(2, do_not_invoke).unwrap(), "2"); assert_eq!(cache.len(), 3); assert_eq!(*cache.get(4, do_not_invoke).unwrap(), "4"); assert_eq!(cache.len(), 3); } { assert_eq!(*cache.get(6, populate).unwrap(), "6"); assert_eq!(cache.len(), 3); assert_eq!(*cache.get(5, do_not_invoke).unwrap(), "5"); assert_eq!(cache.len(), 3); assert_eq!(*cache.get(2, do_not_invoke).unwrap(), "2"); assert_eq!(cache.len(), 3); } } fn miss(_key: &i32) -> Option<String> { None } fn populate(key: &i32) -> Option<String> { Some(key.to_string()) } fn upsert(key: &i32, value: Option<Arc<String>>) -> Option<String> { assert_eq!(value, None); populate(key) } fn update(_key: &i32, value: Option<Arc<String>>) -> Option<String> { let previous = &*value.unwrap(); Some(previous.clone() + " updated!") } fn updel(_key: &i32, value: Option<Arc<String>>) -> Option<String> { assert!(value.is_some()); None } fn do_not_invoke(_key: &i32) -> Option<String> { assert_eq!("", "I shall not be invoked!"); None } } #[cfg(all(feature = "unstable", test))] mod bench { extern crate test; use std::sync::{Arc, Barrier}; use std::thread; use test::Bencher; use crate::CacheThrough; #[bench] fn get_100_times_no_eviction_two_threads(b: &mut Bencher) { let cache_size: i32 = 1000; let cache: Arc<CacheThrough<i32, String>> = Arc::new(CacheThrough::new(cache_size as usize)); let our_key = 42; let barrier = Arc::new(Barrier::new(2)); let other_cache = cache.clone(); let other_barrier = barrier.clone(); let t = thread::spawn(move || { for warmup in 0..our_key { other_cache .get(warmup, |key| Some(key.to_string())) .expect("We had a miss?!"); } let _value = other_cache.get(our_key, |key| Some(key.to_string())); for iteration in 0..10000 { { other_cache.get(our_key, |_| unimplemented!()).expect("We had a miss?!"); if iteration % 4 == 0 { other_cache .update(iteration, |key, _| Some(key.to_string())) .expect("We had a miss?!"); } else { other_cache .get(iteration as i32, |key| Some(key.to_string())) .expect("We had a miss?!"); } if iteration == cache_size / 100 { barrier.wait(); } } } }); other_barrier.wait(); b.iter(|| { for _ in 0..100 { cache.get(our_key, |_| unimplemented!()).expect("We had a miss?!"); } }); t.join().unwrap(); } }
#![cfg_attr(feature = "unstable", feature(test))] pub mod asynchronous; mod eviction; mod segment; mod segment2; use std::ops::Fn; use std::sync::{Arc, RwLock}; use crate::segment::Segment; pub struct CacheThrough<K, V> { data: RwLock<Segment<K, V>>, } impl<K, V> CacheThrough<K, V> where K: std::cmp::Eq + std::hash::Hash + Copy, { pub fn new(capacity: usize) -> CacheThrough<K, V> { CacheThrough { data: RwLock::new(Segment::new(capacity)), } } pub fn get<F>(&self, key: K, populating_fn: F) -> Option<Arc<V>> where F: Fn(&K) -> Option<V>, {
pub fn update<F>(&self, key: K, updating_fn: F) -> Option<Arc<V>> where F: Fn(&K, Option<Arc<V>>) -> Option<V>, { self.data.write().unwrap().update(key, updating_fn) } pub fn remove(&self, key: K) { self.data.write().unwrap().update(key, |_, _| None); } #[cfg(test)] fn len(&self) -> usize { self.data.read().unwrap().len() } } #[cfg(test)] mod tests { use super::CacheThrough; use std::sync::Arc; fn test_cache() -> CacheThrough<i32, String> { CacheThrough::new(3) } #[test] fn hit_populates() { let cache: CacheThrough<i32, String> = test_cache(); let our_key = 42; { let value = cache.get(our_key, populate); assert_eq!(*value.unwrap(), "42"); assert_eq!(cache.len(), 1); } { let value = cache.get(our_key, do_not_invoke); assert_eq!(*value.unwrap(), "42"); assert_eq!(cache.len(), 1); } } #[test] fn miss_populates_not() { let cache: CacheThrough<i32, String> = test_cache(); let our_key = 42; { let value = cache.get(our_key, miss); assert_eq!(value, None); assert_eq!(cache.len(), 0); } { let value = cache.get(our_key, populate); assert_eq!(*value.unwrap(), "42"); assert_eq!(cache.len(), 1); cache.get(2, populate); cache.get(3, populate); cache.get(4, populate); } } #[test] fn update_populates() { let cache: CacheThrough<i32, String> = test_cache(); let our_key = 42; { let value = cache.update(our_key, upsert); assert_eq!(*value.unwrap(), "42"); assert_eq!(cache.len(), 1); } { let value = cache.get(our_key, do_not_invoke); assert_eq!(*value.unwrap(), "42"); assert_eq!(cache.len(), 1); } } #[test] fn update_updates() { let cache: CacheThrough<i32, String> = test_cache(); let our_key = 42; { let value = cache.get(our_key, populate); assert_eq!(*value.unwrap(), "42"); assert_eq!(cache.len(), 1); } { let value = cache.update(our_key, update); assert_eq!(*value.unwrap(), "42 updated!"); assert_eq!(cache.len(), 1); } { let value = cache.get(our_key, do_not_invoke); assert_eq!(*value.unwrap(), "42 updated!"); assert_eq!(cache.len(), 1); } } #[test] fn update_removes() { let cache: CacheThrough<i32, String> = test_cache(); let our_key = 42; { let value = cache.get(our_key, populate); assert_eq!(*value.unwrap(), "42"); assert_eq!(cache.len(), 1); } { let value = cache.update(our_key, updel); assert_eq!(value, None); assert_eq!(cache.len(), 0); } { let value = cache.get(our_key, miss); assert_eq!(value, None); assert_eq!(cache.len(), 0); } } #[test] fn remove_removes() { let cache: CacheThrough<i32, String> = test_cache(); let our_key = 42; { let value = cache.get(our_key, populate); assert_eq!(*value.unwrap(), "42"); assert_eq!(cache.len(), 1); } { cache.remove(our_key); assert_eq!(cache.len(), 0); } } #[test] fn evicts() { let cache: CacheThrough<i32, String> = test_cache(); { assert_eq!(*cache.get(1, populate).unwrap(), "1"); assert_eq!(cache.len(), 1); assert_eq!(*cache.get(2, populate).unwrap(), "2"); assert_eq!(cache.len(), 2); assert_eq!(*cache.get(3, populate).unwrap(), "3"); assert_eq!(cache.len(), 3); } { assert_eq!(*cache.get(4, populate).unwrap(), "4"); assert_eq!(cache.len(), 3); assert_eq!(*cache.get(2, do_not_invoke).unwrap(), "2"); assert_eq!(cache.len(), 3); assert_eq!(*cache.get(3, do_not_invoke).unwrap(), "3"); assert_eq!(cache.len(), 3); } { assert_eq!(*cache.get(5, populate).unwrap(), "5"); assert_eq!(cache.len(), 3); assert_eq!(*cache.get(2, do_not_invoke).unwrap(), "2"); assert_eq!(cache.len(), 3); assert_eq!(*cache.get(4, do_not_invoke).unwrap(), "4"); assert_eq!(cache.len(), 3); } { assert_eq!(*cache.get(6, populate).unwrap(), "6"); assert_eq!(cache.len(), 3); assert_eq!(*cache.get(5, do_not_invoke).unwrap(), "5"); assert_eq!(cache.len(), 3); assert_eq!(*cache.get(2, do_not_invoke).unwrap(), "2"); assert_eq!(cache.len(), 3); } } fn miss(_key: &i32) -> Option<String> { None } fn populate(key: &i32) -> Option<String> { Some(key.to_string()) } fn upsert(key: &i32, value: Option<Arc<String>>) -> Option<String> { assert_eq!(value, None); populate(key) } fn update(_key: &i32, value: Option<Arc<String>>) -> Option<String> { let previous = &*value.unwrap(); Some(previous.clone() + " updated!") } fn updel(_key: &i32, value: Option<Arc<String>>) -> Option<String> { assert!(value.is_some()); None } fn do_not_invoke(_key: &i32) -> Option<String> { assert_eq!("", "I shall not be invoked!"); None } } #[cfg(all(feature = "unstable", test))] mod bench { extern crate test; use std::sync::{Arc, Barrier}; use std::thread; use test::Bencher; use crate::CacheThrough; #[bench] fn get_100_times_no_eviction_two_threads(b: &mut Bencher) { let cache_size: i32 = 1000; let cache: Arc<CacheThrough<i32, String>> = Arc::new(CacheThrough::new(cache_size as usize)); let our_key = 42; let barrier = Arc::new(Barrier::new(2)); let other_cache = cache.clone(); let other_barrier = barrier.clone(); let t = thread::spawn(move || { for warmup in 0..our_key { other_cache .get(warmup, |key| Some(key.to_string())) .expect("We had a miss?!"); } let _value = other_cache.get(our_key, |key| Some(key.to_string())); for iteration in 0..10000 { { other_cache.get(our_key, |_| unimplemented!()).expect("We had a miss?!"); if iteration % 4 == 0 { other_cache .update(iteration, |key, _| Some(key.to_string())) .expect("We had a miss?!"); } else { other_cache .get(iteration as i32, |key| Some(key.to_string())) .expect("We had a miss?!"); } if iteration == cache_size / 100 { barrier.wait(); } } } }); other_barrier.wait(); b.iter(|| { for _ in 0..100 { cache.get(our_key, |_| unimplemented!()).expect("We had a miss?!"); } }); t.join().unwrap(); } }
if let Some(value) = self.data.read().unwrap().get(&key) { return Some(value); } let option = self.data.write(); if option.is_ok() { let mut guard = option.unwrap(); return guard.get_or_populate(key, populating_fn); } None }
function_block-function_prefix_line
[ { "content": "pub trait Evictor<K> {\n\n fn add(&mut self, key: K) -> (usize, Option<K>);\n\n fn touch(&self, index: usize);\n\n}\n\n\n\npub struct ClockEvictor<K> {\n\n capacity: usize,\n\n current_pos: usize,\n\n clock: RwLock<Vec<bool>>,\n\n mapping: HashMap<usize, K>,\n\n}\n\n\n\nimpl<K> ClockEvictor<...
Rust
model/src/gateway/payload/incoming/thread_members_update.rs
vilgotf/twilight
b2cb7a4f80890e328e50cb1696b477c5c10464e5
use crate::{ channel::thread::{ThreadMember, ThreadMemberIntermediary}, id::{ChannelId, GuildId, UserId}, }; use serde::{ de::{value::MapAccessDeserializer, MapAccess, Visitor}, Deserialize, Deserializer, Serialize, }; use std::fmt::{Formatter, Result as FmtResult}; #[derive(Clone, Debug, Eq, Hash, PartialEq, Serialize)] pub struct ThreadMembersUpdate { #[serde(default)] pub added_members: Vec<ThreadMember>, pub guild_id: GuildId, pub id: ChannelId, pub member_count: u8, #[serde(default)] pub removed_member_ids: Vec<UserId>, } impl<'de> Deserialize<'de> for ThreadMembersUpdate { fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> { deserializer.deserialize_map(ThreadMembersUpdateVisitor) } } #[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq)] struct ThreadMembersUpdateIntermediary { #[serde(default)] pub added_members: Vec<ThreadMemberIntermediary>, pub guild_id: GuildId, pub id: ChannelId, pub member_count: u8, #[serde(default)] pub removed_member_ids: Vec<UserId>, } impl ThreadMembersUpdateIntermediary { fn into_thread_members_update(self) -> ThreadMembersUpdate { let guild_id = self.guild_id; let added_members = self .added_members .into_iter() .map(|tm| tm.into_thread_member(guild_id)) .collect(); ThreadMembersUpdate { added_members, guild_id, id: self.id, member_count: self.member_count, removed_member_ids: self.removed_member_ids, } } } struct ThreadMembersUpdateVisitor; impl<'de> Visitor<'de> for ThreadMembersUpdateVisitor { type Value = ThreadMembersUpdate; fn expecting(&self, f: &mut Formatter<'_>) -> FmtResult { f.write_str("struct ThreadMembersUpdate") } fn visit_map<A: MapAccess<'de>>(self, map: A) -> Result<Self::Value, A::Error> { let deser = MapAccessDeserializer::new(map); let update = ThreadMembersUpdateIntermediary::deserialize(deser)?; Ok(update.into_thread_members_update()) } } #[cfg(test)] mod tests { use super::ThreadMembersUpdate; use crate::{ channel::thread::ThreadMember, datetime::Timestamp, gateway::presence::{ Activity, ActivityEmoji, ActivityType, ClientStatus, Presence, Status, UserOrId, }, guild::Member, id::{ChannelId, GuildId, UserId}, user::User, }; use serde_test::Token; use std::str::FromStr; #[allow(clippy::too_many_lines)] #[test] fn test_thread_members_update() { const JOIN_TIMESTAMP: &str = "2015-04-26T06:26:56.936000+00:00"; const PREMIUM_SINCE: &str = "2021-03-16T14:29:19.046000+00:00"; let joined_at = Timestamp::from_str(JOIN_TIMESTAMP).expect("timestamp error"); let premium_since = Timestamp::from_str(PREMIUM_SINCE).expect("timestamp error"); let member = Member { avatar: Some("guild avatar".to_owned()), communication_disabled_until: None, deaf: false, guild_id: GuildId::new(2).expect("non zero"), joined_at, mute: true, nick: Some("twilight".to_owned()), pending: false, premium_since: Some(premium_since), roles: Vec::new(), user: User { accent_color: None, avatar: None, banner: None, bot: false, discriminator: 1, email: None, flags: None, id: UserId::new(3).expect("non zero"), locale: None, mfa_enabled: None, name: "twilight".to_owned(), premium_type: None, public_flags: None, system: None, verified: None, }, }; let activity = Activity { application_id: None, assets: None, buttons: Vec::new(), created_at: Some(1_571_048_061_237), details: None, flags: None, id: Some("aaaaaaaaaaaaaaaa".to_owned()), instance: None, kind: ActivityType::Custom, name: "foo".to_owned(), emoji: Some(ActivityEmoji { name: "Test".to_string(), id: None, animated: None, }), party: None, secrets: None, state: None, timestamps: None, url: None, }; let presence = Presence { activities: vec![activity], client_status: ClientStatus { desktop: Some(Status::Online), mobile: None, web: None, }, guild_id: GuildId::new(2).expect("non zero"), status: Status::Online, user: UserOrId::UserId { id: UserId::new(3).expect("non zero"), }, }; let join_timestamp = Timestamp::from_str(JOIN_TIMESTAMP).expect("timestamp error"); let value = ThreadMembersUpdate { added_members: vec![ThreadMember { flags: 1, id: Some(ChannelId::new(123).expect("non zero")), join_timestamp, member: Some(member), presence: Some(presence), user_id: Some(UserId::new(3).expect("non zero")), }], guild_id: GuildId::new(2).expect("non zero"), id: ChannelId::new(4).expect("non zero"), member_count: 8, removed_member_ids: vec![], }; serde_test::assert_de_tokens( &value, &[ Token::Struct { name: "ThreadMemberUpdate", len: 6, }, Token::Str("added_members"), Token::Seq { len: Some(1) }, Token::Struct { name: "ThreadMemberIntermediary", len: 6, }, Token::Str("flags"), Token::U64(1), Token::Str("id"), Token::Some, Token::NewtypeStruct { name: "ChannelId" }, Token::Str("123"), Token::Str("join_timestamp"), Token::Str(JOIN_TIMESTAMP), Token::Str("member"), Token::Some, Token::Struct { name: "MemberIntermediary", len: 11, }, Token::Str("avatar"), Token::Some, Token::Str("guild avatar"), Token::Str("communication_disabled_until"), Token::None, Token::Str("deaf"), Token::Bool(false), Token::Str("guild_id"), Token::NewtypeStruct { name: "GuildId" }, Token::Str("1"), Token::Str("joined_at"), Token::Str(JOIN_TIMESTAMP), Token::Str("mute"), Token::Bool(true), Token::Str("nick"), Token::Some, Token::Str("twilight"), Token::Str("pending"), Token::Bool(false), Token::Str("premium_since"), Token::Some, Token::Str(PREMIUM_SINCE), Token::Str("roles"), Token::Seq { len: Some(0) }, Token::SeqEnd, Token::Str("user"), Token::Struct { name: "User", len: 7, }, Token::Str("accent_color"), Token::None, Token::Str("avatar"), Token::None, Token::Str("banner"), Token::None, Token::Str("bot"), Token::Bool(false), Token::Str("discriminator"), Token::Str("0001"), Token::Str("id"), Token::NewtypeStruct { name: "UserId" }, Token::Str("3"), Token::Str("username"), Token::Str("twilight"), Token::StructEnd, Token::StructEnd, Token::Str("presence"), Token::Some, Token::Struct { name: "PresenceIntermediary", len: 5, }, Token::Str("activities"), Token::Seq { len: Some(1) }, Token::Struct { name: "Activity", len: 5, }, Token::Str("created_at"), Token::Some, Token::U64(1_571_048_061_237), Token::Str("emoji"), Token::Some, Token::Struct { name: "ActivityEmoji", len: 1, }, Token::Str("name"), Token::Str("Test"), Token::StructEnd, Token::Str("id"), Token::Some, Token::Str("aaaaaaaaaaaaaaaa"), Token::Str("type"), Token::U8(4), Token::Str("name"), Token::Str("foo"), Token::StructEnd, Token::SeqEnd, Token::Str("client_status"), Token::Struct { name: "ClientStatus", len: 1, }, Token::Str("desktop"), Token::Some, Token::Enum { name: "Status" }, Token::Str("online"), Token::Unit, Token::StructEnd, Token::Str("guild_id"), Token::Some, Token::NewtypeStruct { name: "GuildId" }, Token::Str("2"), Token::Str("status"), Token::Enum { name: "Status" }, Token::Str("online"), Token::Unit, Token::Str("user"), Token::Struct { name: "UserOrId", len: 1, }, Token::Str("id"), Token::NewtypeStruct { name: "UserId" }, Token::Str("3"), Token::StructEnd, Token::StructEnd, Token::Str("user_id"), Token::Some, Token::NewtypeStruct { name: "UserId" }, Token::Str("3"), Token::StructEnd, Token::SeqEnd, Token::Str("guild_id"), Token::NewtypeStruct { name: "GuildId" }, Token::Str("2"), Token::Str("id"), Token::NewtypeStruct { name: "ChannelId" }, Token::Str("4"), Token::Str("member_count"), Token::U8(8), Token::Str("removed_member_ids"), Token::Seq { len: Some(0) }, Token::SeqEnd, Token::StructEnd, ], ); } }
use crate::{ channel::thread::{ThreadMember, ThreadMemberIntermediary}, id::{ChannelId, GuildId, UserId}, }; use serde::{ de::{value::MapAccessDeserializer, MapAccess, Visitor}, Deserialize, Deserializer, Serialize, }; use std::fmt::{Formatter, Result as FmtResult}; #[derive(Clone, Debug, Eq, Hash, PartialEq, Serialize)] pub struct ThreadMembersUpdate { #[serde(default)] pub added_members: Vec<ThreadMember>, pub guild_id: GuildId, pub id: ChannelId, pub member_count: u8, #[serde(default)] pub removed_member_ids: Vec<UserId>, } impl<'de> Deserialize<'de> for ThreadMembersUpdate { fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> { deserializer.deserialize_map(ThreadMembersUpdateVisitor) } } #[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq)] struct ThreadMembersUpdateIntermediary { #[serde(default)] pub added_members: Vec<ThreadMemberIntermediary>, pub guild_id: GuildId, pub id: ChannelId, pub member_count: u8, #[serde(default)] pub removed_member_ids: Vec<UserId>, } impl ThreadMembersUpdateIntermediary { fn into_thread_members_update(self) -> ThreadMembersUpdate { let guild_id = self.guild_id; let added_members = self .added_members .into_iter() .map(|tm| tm.into_thread_member(guild_id)) .collect(); ThreadMembersUpdate { added_members, guild_id, id: self.id, member_count: self.member_count, removed_member_ids: self.removed_member_ids, } } } struct ThreadMembersUpdateVisitor; impl<'de> Visitor<'de> for ThreadMembersUpdateVisitor { type Value = ThreadMembersUpdate; fn expecting(&self, f: &mut Formatter<'_>) -> FmtResult { f.write_str("struct ThreadMembersUpdate") } fn visit_map<A: MapAccess<'de>>(self, map: A) -> Result<Self::Value, A::Error> { let deser = MapAccessDeserializer::new(map); let update = ThreadMembersUpdateIntermediary::deserialize(deser)?; Ok(update.into_thread_members_update()) } } #[cfg(test)] mod tests { use super::ThreadMembersUpdate; use crate::{ channel::thread::ThreadMember, datetime::Timestamp, gateway::presence::{ Activity, ActivityEmoji, ActivityType, ClientStatus, Presence, Status, UserOrId, }, guild::Member, id::{ChannelId, GuildId, UserId}, user::User, }; use serde_test::Token; use std::str::FromStr; #[allow(clippy::too_many_lines)] #[test] fn test_thread_members_update() { const JOIN_TIMESTAMP: &str = "2015-04-26T06:26:56.936000+00:00"; const PREMIUM_SINCE: &str = "2021-03-16T14:29:19.046000+00:00"; let joined_at = Timestamp::from_str(JOIN_TIMESTAMP).expect("timestamp error"); let premium_since = Timestamp::from_str(PREMIUM_SINCE).expect("timestamp error");
let activity = Activity { application_id: None, assets: None, buttons: Vec::new(), created_at: Some(1_571_048_061_237), details: None, flags: None, id: Some("aaaaaaaaaaaaaaaa".to_owned()), instance: None, kind: ActivityType::Custom, name: "foo".to_owned(), emoji: Some(ActivityEmoji { name: "Test".to_string(), id: None, animated: None, }), party: None, secrets: None, state: None, timestamps: None, url: None, }; let presence = Presence { activities: vec![activity], client_status: ClientStatus { desktop: Some(Status::Online), mobile: None, web: None, }, guild_id: GuildId::new(2).expect("non zero"), status: Status::Online, user: UserOrId::UserId { id: UserId::new(3).expect("non zero"), }, }; let join_timestamp = Timestamp::from_str(JOIN_TIMESTAMP).expect("timestamp error"); let value = ThreadMembersUpdate { added_members: vec![ThreadMember { flags: 1, id: Some(ChannelId::new(123).expect("non zero")), join_timestamp, member: Some(member), presence: Some(presence), user_id: Some(UserId::new(3).expect("non zero")), }], guild_id: GuildId::new(2).expect("non zero"), id: ChannelId::new(4).expect("non zero"), member_count: 8, removed_member_ids: vec![], }; serde_test::assert_de_tokens( &value, &[ Token::Struct { name: "ThreadMemberUpdate", len: 6, }, Token::Str("added_members"), Token::Seq { len: Some(1) }, Token::Struct { name: "ThreadMemberIntermediary", len: 6, }, Token::Str("flags"), Token::U64(1), Token::Str("id"), Token::Some, Token::NewtypeStruct { name: "ChannelId" }, Token::Str("123"), Token::Str("join_timestamp"), Token::Str(JOIN_TIMESTAMP), Token::Str("member"), Token::Some, Token::Struct { name: "MemberIntermediary", len: 11, }, Token::Str("avatar"), Token::Some, Token::Str("guild avatar"), Token::Str("communication_disabled_until"), Token::None, Token::Str("deaf"), Token::Bool(false), Token::Str("guild_id"), Token::NewtypeStruct { name: "GuildId" }, Token::Str("1"), Token::Str("joined_at"), Token::Str(JOIN_TIMESTAMP), Token::Str("mute"), Token::Bool(true), Token::Str("nick"), Token::Some, Token::Str("twilight"), Token::Str("pending"), Token::Bool(false), Token::Str("premium_since"), Token::Some, Token::Str(PREMIUM_SINCE), Token::Str("roles"), Token::Seq { len: Some(0) }, Token::SeqEnd, Token::Str("user"), Token::Struct { name: "User", len: 7, }, Token::Str("accent_color"), Token::None, Token::Str("avatar"), Token::None, Token::Str("banner"), Token::None, Token::Str("bot"), Token::Bool(false), Token::Str("discriminator"), Token::Str("0001"), Token::Str("id"), Token::NewtypeStruct { name: "UserId" }, Token::Str("3"), Token::Str("username"), Token::Str("twilight"), Token::StructEnd, Token::StructEnd, Token::Str("presence"), Token::Some, Token::Struct { name: "PresenceIntermediary", len: 5, }, Token::Str("activities"), Token::Seq { len: Some(1) }, Token::Struct { name: "Activity", len: 5, }, Token::Str("created_at"), Token::Some, Token::U64(1_571_048_061_237), Token::Str("emoji"), Token::Some, Token::Struct { name: "ActivityEmoji", len: 1, }, Token::Str("name"), Token::Str("Test"), Token::StructEnd, Token::Str("id"), Token::Some, Token::Str("aaaaaaaaaaaaaaaa"), Token::Str("type"), Token::U8(4), Token::Str("name"), Token::Str("foo"), Token::StructEnd, Token::SeqEnd, Token::Str("client_status"), Token::Struct { name: "ClientStatus", len: 1, }, Token::Str("desktop"), Token::Some, Token::Enum { name: "Status" }, Token::Str("online"), Token::Unit, Token::StructEnd, Token::Str("guild_id"), Token::Some, Token::NewtypeStruct { name: "GuildId" }, Token::Str("2"), Token::Str("status"), Token::Enum { name: "Status" }, Token::Str("online"), Token::Unit, Token::Str("user"), Token::Struct { name: "UserOrId", len: 1, }, Token::Str("id"), Token::NewtypeStruct { name: "UserId" }, Token::Str("3"), Token::StructEnd, Token::StructEnd, Token::Str("user_id"), Token::Some, Token::NewtypeStruct { name: "UserId" }, Token::Str("3"), Token::StructEnd, Token::SeqEnd, Token::Str("guild_id"), Token::NewtypeStruct { name: "GuildId" }, Token::Str("2"), Token::Str("id"), Token::NewtypeStruct { name: "ChannelId" }, Token::Str("4"), Token::Str("member_count"), Token::U8(8), Token::Str("removed_member_ids"), Token::Seq { len: Some(0) }, Token::SeqEnd, Token::StructEnd, ], ); } }
let member = Member { avatar: Some("guild avatar".to_owned()), communication_disabled_until: None, deaf: false, guild_id: GuildId::new(2).expect("non zero"), joined_at, mute: true, nick: Some("twilight".to_owned()), pending: false, premium_since: Some(premium_since), roles: Vec::new(), user: User { accent_color: None, avatar: None, banner: None, bot: false, discriminator: 1, email: None, flags: None, id: UserId::new(3).expect("non zero"), locale: None, mfa_enabled: None, name: "twilight".to_owned(), premium_type: None, public_flags: None, system: None, verified: None, }, };
assignment_statement
[]
Rust
conmon-rs/server/src/lib.rs
wgahnagl/conmon-rs
a3849a79c82b1992974f25677c9949b56dfbf263
use anyhow::{Context, Result}; use capnp_rpc::{rpc_twoparty_capnp::Side, twoparty, RpcSystem}; use conmon_common::conmon_capnp::conmon; use futures::{AsyncReadExt, FutureExt}; use getset::{Getters, MutGetters}; use log::{debug, info}; use nix::{ libc::_exit, unistd::{fork, ForkResult}, }; use std::{fs::File, io::Write, path::Path}; use tokio::{ fs, net::UnixListener, runtime, signal::unix::{signal, SignalKind}, sync::oneshot, task::{self, LocalSet}, }; use tokio_util::compat::TokioAsyncReadCompatExt; use twoparty::VatNetwork; mod config; mod init; mod rpc; #[derive(Debug, Default, Getters, MutGetters)] pub struct Server { #[doc = "The main conmon configuration."] #[getset(get, get_mut)] config: config::Config, } impl Server { pub fn new() -> Result<Self> { let server = Self::default(); server.init_logging().context("set log verbosity")?; server.config().validate().context("validate config")?; server.init_self()?; Ok(server) } pub fn start(self) -> Result<()> { match unsafe { fork()? } { ForkResult::Parent { child, .. } => { if let Some(path) = &self.config().conmon_pidfile() { let child_str = format!("{}", child); File::create(path)?.write_all(child_str.as_bytes())?; } unsafe { _exit(0) }; } ForkResult::Child => (), } let rt = runtime::Builder::new_current_thread().enable_io().build()?; rt.block_on(self.spawn_tasks())?; Ok(()) } fn init_self(&self) -> Result<()> { init::unset_locale(); init::set_oom("-1000")?; Ok(()) } fn init_logging(&self) -> Result<()> { if let Some(level) = self.config().log_level().to_level() { simple_logger::init_with_level(level).context("init logger")?; info!("Set log level to {}", level); } Ok(()) } async fn spawn_tasks(self) -> Result<()> { let (shutdown_tx, shutdown_rx) = oneshot::channel(); let socket = self.config().socket().to_path_buf(); tokio::spawn(Self::start_sigterm_handler(socket, shutdown_tx)); task::spawn_blocking(move || { let rt = runtime::Handle::current(); rt.block_on(async { LocalSet::new() .run_until(self.start_backend(shutdown_rx)) .await }) }) .await? } async fn start_sigterm_handler<T: AsRef<Path>>( socket: T, shutdown_tx: oneshot::Sender<()>, ) -> Result<()> { let mut sigterm = signal(SignalKind::terminate())?; let mut sigint = signal(SignalKind::interrupt())?; tokio::select! { _ = sigterm.recv() => { info!("Received SIGTERM"); } _ = sigint.recv() => { info!("Received SIGINT"); } }; let _ = shutdown_tx.send(()); debug!("Removing socket file {}", socket.as_ref().display()); fs::remove_file(socket) .await .context("remove existing socket file")?; Ok(()) } async fn start_backend(self, mut shutdown_rx: oneshot::Receiver<()>) -> Result<()> { let listener = UnixListener::bind(&self.config().socket()).context("bind server socket")?; let client: conmon::Client = capnp_rpc::new_client(self); loop { let stream = tokio::select! { _ = &mut shutdown_rx => { return Ok(()) } stream = listener.accept() => { stream?.0 }, }; let (reader, writer) = TokioAsyncReadCompatExt::compat(stream).split(); let network = Box::new(VatNetwork::new( reader, writer, Side::Server, Default::default(), )); let rpc_system = RpcSystem::new(network, Some(client.clone().client)); task::spawn_local(Box::pin(rpc_system.map(|_| ()))); } } }
use anyhow::{Context, Result}; use capnp_rpc::{rpc_twoparty_capnp::Side, twoparty, RpcSystem}; use conmon_common::conmon_capnp::conmon; use futures::{AsyncReadExt, FutureExt}; use getset::{Getters, MutGetters}; use log::{debug, info}; use nix::{ libc::_exit, unistd::{fork, ForkResult}, }; use std::{fs::File, io::Write, path::Path}; use tokio::{ fs, net::UnixListener, runtime, signal::unix::{signal, SignalKind}, sync::oneshot, task::{self, LocalSet}, }; use tokio_util::compat::TokioAsyncReadCompatExt; use twoparty::VatNetwork; mod config; mod init; mod rpc; #[derive(Debug, Default, Getters, MutGetters)] pub struct Server { #[doc = "The main conmon configuration."] #[getset(get, get_mut)] config: config::Config, } impl Server { pub fn new() -> Result<Self> { let server = Self::default(); server.init_logging().context("set log verbosity")?; server.config().validate().context("validate config")?; server.init_self()?; Ok(server) } pub fn start(self) -> Result<()> { match unsafe { fork()? } { ForkResult::Parent { child, .. } => { if let Some(path) = &self.config().conmon_pidfile() { let child_str = format!("{}", child); File::create(path)?.write_all(child_str.as_bytes())?; } unsafe { _exit(0) }; } ForkResult::Child => (), } let rt = runtime::Builder::new_current_thread().enable_io().build()?; rt.block_on(self.spawn_tasks())?; Ok(()) } fn init_self(&self) -> Result<()> { init::unset_locale(); init::set_oom("-1000")?; Ok(()) } fn init_logging(&self) -> Result<()> { if let Some(level) = self.config().log_level().to_level() { simple_logger::init_with_level(level).context("init logger")?; info!("Set log level to {}", level); } Ok(()) } async fn spawn_tasks(self) -> Result<()> { let (shutdown_tx, shutdown_rx) = oneshot::channel(); let socket = self.config().socket().to_path_buf(); tokio::spawn(Self::start_sigterm_handler(socket, shutdown_tx)); task::spawn_blocking(move || { let rt = runtime::Handle::current(); rt.block_on(async { LocalSet::new() .run_until(self.start_backend(shutdown_rx)) .await }) }) .await? } async fn start_sigterm_handler<T: AsRef<Path>>( socket: T, shutdown_tx: oneshot::Sender<()>, ) -> Result<()> { let mut sigterm = signal(SignalKind::terminate())?; let mut sigint = signal(SignalKind::interrupt())?; tokio::select! { _ = sigterm.recv() => { info!("Received SIGTERM"); } _ = sigint.recv() => { info!("Received SIGINT"); } }; let _ = shutdown_tx.send(()); debug!("Removing socket file {}", socket.as_ref().display()); fs::remove_file(socket) .await .context("remove existing socket file")?; Ok(()) } async fn start_backend(self, mut shutdown_rx: oneshot::Receiver<()>) -> Result<()> { let listener = UnixListener::bind(&self.config().socket()).context("bind server socket")?; let client: conmon::Client = capnp_rpc::new_client(self); loop { let stream = tokio::select! { _ = &mut shutdown_rx => { return Ok(()) } stream = listener.accept() => { stream?.0 }, }; let (reader, writer) = TokioAsyncReadCompatExt::compat(stream).split(); let network = Bo
}
x::new(VatNetwork::new( reader, writer, Side::Server, Default::default(), )); let rpc_system = RpcSystem::new(network, Some(client.clone().client)); task::spawn_local(Box::pin(rpc_system.map(|_| ()))); } }
function_block-function_prefixed
[ { "content": "fn main() -> Result<()> {\n\n Server::new()\n\n .context(\"create server\")?\n\n .start()\n\n .context(\"start server\")\n\n}\n", "file_path": "conmon-rs/server/src/main.rs", "rank": 0, "score": 148350.74080412355 }, { "content": "/// Helper to adjust th...
Rust
core/src/mbc/mbc5.rs
LukasKalbertodt/mahboi
61990b4ccab688a62172ef7836ea15b60f1cc942
use crate::{ log::*, cartridge::{RamSize, RomSize}, primitives::{Byte, Word}, }; use super::Mbc; pub(crate) struct Mbc5 { rom: Box<[Byte]>, ram: Box<[Byte]>, rom_bank: u16, ram_bank: u8, ram_enabled: bool, } impl Mbc5 { pub(crate) fn new(data: &[u8], rom_size: RomSize, ram_size: RamSize) -> Self { assert!(rom_size <= RomSize::Banks512, "More than 128 banks, but only MBC5!"); assert!( rom_size.len() == data.len(), "Length of cartridge doesn't match length specified in ROM size header", ); assert!( [RamSize::None, RamSize::Kb8, RamSize::Kb32, RamSize::Kb128].contains(&ram_size), "Illegal ram size {:?} for MBC5", ram_size, ); let rom: Vec<_> = data.iter().cloned().map(Byte::new).collect(); let ram = vec![Byte::zero(); ram_size.len()]; Self { rom: rom.into_boxed_slice(), ram: ram.into_boxed_slice(), rom_bank: 0, ram_bank: 0, ram_enabled: false, } } } impl Mbc for Mbc5 { fn load_rom_byte(&self, addr: Word) -> Byte { match addr.get() { 0x0000..=0x3FFF => self.rom[addr.get() as usize], 0x4000..=0x7FFF => { let bank_offset = self.rom_bank as usize * 0x4000; let relative_addr = addr.get() as usize - 0x4000; self.rom.get(bank_offset + relative_addr) .cloned() .unwrap_or(Byte::new(0xFF)) } _ => unreachable!(), } } fn store_rom_byte(&mut self, addr: Word, byte: Byte) { match addr.get() { 0x0000..=0x1FFF => self.ram_enabled = byte.get() & 0x0F == 0x0A, 0x2000..=0x2FFF => { self.rom_bank = (self.rom_bank & 0xFF00) | byte.get() as u16; } 0x3000..=0x3FFF => { self.rom_bank = (self.rom_bank & 0xFF) | (byte.get() as u16 & 1); } 0x4000..=0x5FFF => { self.ram_bank = byte.get() & 0x0F; } 0x6000..=0x7FFF => {} _ => unreachable!(), } } fn load_ram_byte(&self, addr: Word) -> Byte { if !self.ram_enabled { return Byte::new(0xFF); } self.ram.get(self.ram_bank as usize * 0x2000 + addr.get() as usize) .cloned() .unwrap_or(Byte::new(0xFF)) } fn store_ram_byte(&mut self, addr: Word, byte: Byte) { if !self.ram_enabled { return; } let idx = self.ram_bank as usize * 0x2000 + addr.get() as usize; if idx < self.ram.len() { self.ram[idx] = byte; } else { warn!( "[mbc5] write outside of valid RAM (bank {}, address {})", self.ram_bank, addr, ); } } }
use crate::{ log::*, cartridge::{RamSize, RomSize}, primitives::{Byte, Word}, }; use super::Mbc; pub(crate) struct Mbc5 { rom: Box<[Byte]>, ram: Box<[Byte]>, rom_bank: u16, ram_bank: u8, ram_enabled: bool, } impl Mbc5 { pub(crate) fn new(data: &[u8], rom_size: RomSize, ram_size: RamSize) -> Self { assert!(rom_size <= RomSize::Banks512, "More than 128 banks, but only MBC5!"); assert!( rom_size.len() == data.len(), "Length of cartridge doesn't match length specified in ROM size header", ); assert!( [RamSize::None, RamSize::Kb8, RamSize::Kb32, RamSize::Kb128].contains(&ram_size), "Illegal ram size {:?} for MBC5", ram_size, ); let rom: Vec<_> = data.iter().cloned().map(Byte::new).collect(); let ram = vec![Byte::zero(); ram_size.len()]; Self { rom: rom.into_boxed_slice(), ram: ram.into_boxed_slice(), rom_bank: 0, ram_bank: 0, ram_enabled: false, } } } impl Mbc for Mbc5 { fn load_rom_byte(&self, addr: Word) -> Byte { match addr.get() { 0x0000..=0x3FFF => self.rom[addr.get() as usize], 0x4000..=0x7FFF => { let bank_offset = self.rom_bank as usize * 0x4000; let relative_addr = addr.get() as usize - 0x4000; self.rom.get(bank_offset + relative_addr) .cloned() .unwrap_or(Byte::new(0xFF)) } _ => unreachable!(), } } fn store_rom_byte(&mut self, addr: Word, byte: Byte) { match addr.get() { 0x0000..=0x1FFF => self.ram_enabled = byte.get() & 0x0F == 0x0A, 0x2000..=0x2FFF => { self.rom_bank = (self.rom_bank & 0xFF00) | byte.get() as u16; } 0x3000..=0x3FFF => { self.rom_bank = (self.rom_bank & 0xFF) | (byte.get() as u16 & 1); } 0x4000..=0x5FFF => { self.ram_bank = byte.get() & 0x0F; } 0x6000..=0x7FFF => {} _ => unreachable!(), } } f
fn store_ram_byte(&mut self, addr: Word, byte: Byte) { if !self.ram_enabled { return; } let idx = self.ram_bank as usize * 0x2000 + addr.get() as usize; if idx < self.ram.len() { self.ram[idx] = byte; } else { warn!( "[mbc5] write outside of valid RAM (bank {}, address {})", self.ram_bank, addr, ); } } }
n load_ram_byte(&self, addr: Word) -> Byte { if !self.ram_enabled { return Byte::new(0xFF); } self.ram.get(self.ram_bank as usize * 0x2000 + addr.get() as usize) .cloned() .unwrap_or(Byte::new(0xFF)) }
function_block-function_prefixed
[ { "content": "/// Creates a comment string for the given instruction.\n\n///\n\n/// The comment can hold any potentially useful informtion.\n\nfn comment_for(instr: &DecodedInstr, addr: Word) -> String {\n\n fn comment_sep(s: &mut String) {\n\n if !s.is_empty() {\n\n *s += \", \";\n\n ...
Rust
cli/src/main.rs
nical/etagere
1a38a0235070e56a7dd6d21e22cbf7230d26cea8
extern crate etagere; #[macro_use] extern crate serde; use etagere::*; use etagere::euclid::size2; use clap::*; use std::io::prelude::*; use std::fs::{File, OpenOptions}; #[derive(Serialize, Deserialize)] struct Session { atlas: BucketedAtlasAllocator, names: std::collections::HashMap<String, Allocation>, next_id: u32, } fn main() { let matches = App::new("Étagère command-line interface") .version("0.1") .author("Nicolas Silva <nical@fastmail.com>") .about("Dynamic texture atlas allocator.") .subcommand( SubCommand::with_name("init") .about("Initialize the atlas") .arg(Arg::with_name("WIDTH") .help("Rectangle width.") .value_name("WIDTH") .takes_value(true) .required(true) ) .arg(Arg::with_name("HEIGHT") .help("Rectangle height.") .value_name("HEIGHT") .takes_value(true) .required(true) ) .arg(Arg::with_name("COLUMNS") .long("columns") .help("Split the allocator into multiple columns.") .value_name("COLUMNS") .takes_value(true) .required(false) ) .arg(Arg::with_name("VERTICAL_SHELVES") .long("vertical-shelves") .help("Use vertical instead of horizontal shelves.") .value_name("VERTICAL_SHELVES") .takes_value(false) .required(false) ) .arg(Arg::with_name("ALIGN_X") .long("align-x") .help("Round up the width of the allocated rectangle to a multiple of the provided value.") .value_name("ALIGN_X") .takes_value(true) .required(false) ) .arg(Arg::with_name("ALIGN_Y") .long("align-y") .help("Round up the width of the allocated rectangle to a multiple of the provided value.") .value_name("ALIGN_Y") .takes_value(true) .required(false) ) .arg(Arg::with_name("ATLAS") .short("a") .long("atlas") .help("Sets the output atlas file to use") .value_name("FILE") .takes_value(true) .required(false) ) .arg(Arg::with_name("SVG_OUTPUT") .long("svg") .help("Dump the atlas in an SVG file") .value_name("SVG_OUTPUT") .takes_value(true) .required(false) ) ) .subcommand( SubCommand::with_name("allocate") .about("Allocate a rectangle") .arg(Arg::with_name("WIDTH") .help("Rectangle width.") .value_name("WIDTH") .takes_value(true) .required(true) ) .arg(Arg::with_name("HEIGHT") .help("Rectangle height.") .value_name("HEIGHT") .takes_value(true) .required(true) ) .arg(Arg::with_name("NAME") .short("-n") .long("name") .help("Set a name to identify the rectangle.") .value_name("NAME") .takes_value(true) .required(false) ) .arg(Arg::with_name("ATLAS") .short("a") .long("atlas") .help("Sets the output atlas file to use") .value_name("FILE") .takes_value(true) .required(false) ) .arg(Arg::with_name("SVG_OUTPUT") .long("svg") .help("Dump the atlas in an SVG file") .value_name("SVG_OUTPUT") .takes_value(true) .required(false) ) ) .subcommand( SubCommand::with_name("deallocate") .about("De-allocate a rectangle") .arg(Arg::with_name("NAME") .help("Name of the rectangle to remove.") .value_name("NAME") .takes_value(true) .required(true) ) .arg(Arg::with_name("ATLAS") .short("a") .long("atlas") .help("Sets the output file to use") .value_name("FILE") .takes_value(true) .required(false) ) .arg(Arg::with_name("SVG_OUTPUT") .long("svg") .help("Dump the atlas in an SVG file") .value_name("SVG_OUTPUT") .takes_value(true) .required(false) ) ) .subcommand( SubCommand::with_name("svg") .about("Dump the atlas as SVG") .arg(Arg::with_name("ATLAS") .short("-a") .long("atlas") .help("Input texture atlas file.") .value_name("ATLAS") .takes_value(true) ) .arg(Arg::with_name("SVG_OUTPUT") .help("Output SVG file to use") .value_name("FILE") .takes_value(true) .required(false) ) ) .subcommand( SubCommand::with_name("list") .about("List the allocations and free rectangles in the atlas") .arg(Arg::with_name("ATLAS") .short("-a") .long("atlas") .help("Input texture atlas file.") .value_name("ATLAS") .takes_value(true) ) ) .get_matches(); if let Some(cmd) = matches.subcommand_matches("init") { init(&cmd); } else if let Some(cmd) = matches.subcommand_matches("allocate") { allocate(&cmd); } else if let Some(cmd) = matches.subcommand_matches("deallocate") { deallocate(&cmd); } else if let Some(cmd) = matches.subcommand_matches("svg") { svg(&cmd); } else if let Some(cmd) = matches.subcommand_matches("list") { list(&cmd); } } fn read_atlas(args: &ArgMatches) -> Session { let atlas_file_name = args.value_of("ATLAS").unwrap_or("atlas.ron"); let file = OpenOptions::new() .read(true) .write(true) .open(atlas_file_name) .expect( "Failed to open the atlas file." ); ron::de::from_reader(file).expect("Failed to parse the atlas") } fn write_atlas(session: &Session, args: &ArgMatches) { let serialized: String = ron::ser::to_string_pretty( &session, ron::ser::PrettyConfig::default(), ).unwrap(); let atlas_file_name = args.value_of("ATLAS").unwrap_or("atlas.ron"); let mut atlas_file = std::fs::File::create(atlas_file_name).expect( "Failed to open the atlas file." ); atlas_file.write_all(serialized.as_bytes()).expect( "Failed to write into the atlas file." ); } fn init(args: &ArgMatches) { let w = args.value_of("WIDTH").expect("Missing width.").parse::<i32>().unwrap(); let h = args.value_of("HEIGHT").expect("Missing height.").parse::<i32>().unwrap(); let default_options = etagere::DEFAULT_OPTIONS; let options = etagere::AllocatorOptions { alignment: size2( args.value_of("ALIGN_X") .map(|s| s.parse::<i32>().unwrap()) .unwrap_or(default_options.alignment.width), args.value_of("ALIGN_Y") .map(|s| s.parse::<i32>().unwrap()) .unwrap_or(default_options.alignment.width), ), vertical_shelves: args.is_present("VERTICAL_SHELVES"), num_columns: args.value_of("ALIGN_X") .map(|s| s.parse::<i32>().unwrap()) .unwrap_or(default_options.num_columns), }; let session = Session { atlas: BucketedAtlasAllocator::with_options(size2(w, h), &options), names: std::collections::HashMap::default(), next_id: 0, }; write_atlas(&session, &args); if args.is_present("SVG_OUTPUT") { svg(args); } } fn allocate(args: &ArgMatches) { let mut session = read_atlas(args); let w = args.value_of("WIDTH").expect("Missing width.").parse::<i32>().unwrap(); let h = args.value_of("HEIGHT").expect("Missing height.").parse::<i32>().unwrap(); let alloc = session.atlas.allocate(size2(w, h)); if alloc.is_none() { eprintln!("Allocation of size {}x{} failed.", w, h); return; } let alloc = alloc.unwrap(); let name = args.value_of("NAME").map(|name| name.to_string()).unwrap_or_else(|| { session.next_id += 1; format!("#{}", session.next_id) }); let r = alloc.rectangle; println!( "Allocated rectangle {} of size {}x{} at origin [{}, {}]", name, r.size().width, r.size().height, r.min.x, r.min.y, ); if let Some(old) = session.names.insert(name.clone(), alloc) { println!("Previous allocation with name {:?} was deallocated.", name); session.atlas.deallocate(old.id); } write_atlas(&session, args); if args.is_present("SVG_OUTPUT") { svg(args); } } fn deallocate(args: &ArgMatches) { let mut session = read_atlas(args); let name = args.value_of("NAME").expect("Need a rectangle name"); let id = session.names.remove(name).unwrap().id; session.atlas.deallocate(id); write_atlas(&session, args); if args.is_present("SVG_OUTPUT") { svg(args); } } fn svg(args: &ArgMatches) { let session = read_atlas(args); let svg_file_name = args.value_of("SVG_OUTPUT").unwrap_or("atlas.svg"); let mut svg_file = File::create(svg_file_name).expect( "Failed to open the SVG file." ); session.atlas.dump_svg(&mut svg_file).expect( "Failed to write into the SVG file." ); } fn list(args: &ArgMatches) { let session = read_atlas(args); println!("# Allocated rectangles"); for (name, &alloc) in &session.names { let rect = alloc.rectangle; println!( " - {}: size {}x{} at origin [{}, {}]", name, rect.size().width, rect.size().height, rect.min.x, rect.min.y ); break; } }
extern crate etagere; #[macro_use] extern crate serde; use etagere::*; use etagere::euclid::size2; use clap::*; use std::io::prelude::*; use std::fs::{File, OpenOptions}; #[derive(Serialize, Deserialize)] struct Session { atlas: BucketedAtlasAllocator, names: std::collections::HashMap<String, Allocation>, next_id: u32, } fn main() { let matches = App::new("Étagère command-line interface") .version("0.1") .author("Nicolas Silva <nical@fastmail.com>") .about("Dynamic texture atlas allocator.") .subcommand( SubCommand::with_name("init") .about("Initialize the atlas") .arg(Arg::with_name("WIDTH") .help("Rectangle width.") .value_name("WIDTH") .takes_value(true) .required(true) ) .arg(Arg::with_name("HEIGHT") .help("Rectangle height.") .value_name("HEIGHT") .takes_value(true) .required(true) ) .arg(Arg::with_name("COLUMNS") .long("columns") .help("Split the allocator into multiple columns.") .value_name("COLUMNS") .takes_value(true) .required(false) ) .arg(Arg::with_name("VERTICAL_SHELVES") .long("vertical-shelves") .help("Use vertical instead of horizontal shelves.") .value_name("VERTICAL_SHELVES") .takes_value(false) .required(false) ) .arg(Arg::with_name("ALIGN_X") .long("align-x") .help("Round up the width of the allocated rectangle to a multiple of the provided value.") .value_name("ALIGN_X") .takes_value(true) .required(false) ) .arg(Arg::with_name("ALIGN_Y") .long("align-y") .help("Round up the width of the allocated rectangle to a multiple of the provided value.") .value_name("ALIGN_Y") .takes_value(true) .required(false) ) .arg(Arg::with_name("ATLAS") .short("a") .long("atlas") .help("Sets the output atlas file to use") .value_name("FILE") .takes_value(true) .required(false) ) .arg(Arg::with_name("SVG_OUTPUT") .long("svg") .help("Dump the atlas in an SVG file") .value_name("SVG_OUTPUT") .takes_value(true) .required(false) ) ) .subcommand( SubCommand::with_name("allocate") .about("Allocate a rectangle") .arg(Arg::with_name("WIDTH") .help("Rectangle width.") .value_name("WIDTH") .takes_value(true) .required(true) ) .arg(Arg::with_name("HEIGHT") .help("Rectangle height.") .value_name("HEIGHT") .takes_value(true) .required(true) ) .arg(Arg::with_name("NAME") .short("-n") .long("name") .help("Set a name to identify the rectangle.") .value_name("NAME") .takes_value(true) .required(false) ) .arg(Arg::with_name("ATLAS") .short("a") .long("atlas") .help("Sets the output atlas file to use") .value_name("FILE") .takes_value(true) .required(false) ) .arg(Arg::with_name("SVG_OUTPUT") .long("svg") .help("Dump the atlas in an SVG file") .value_name("SVG_OUTPUT") .takes_value(true) .required(false) ) ) .subcommand( SubCommand::with_name("deallocate") .about("De-allocate a rectangle") .arg(Arg::with_name("NAME") .help("Name of the rectangle to remove.") .value_name("NAME") .takes_value(true) .required(true) ) .arg(Arg::with_name("ATLAS") .short("a") .long("atlas") .help("Sets the output file to use") .value_name("FILE") .takes_value(true) .required(false) ) .arg(Arg::with_name("SVG_OUTPUT") .long("svg") .help("Dump the atlas in an SVG file") .value_name("SVG_OUTPUT") .takes_value(true) .required(false) ) ) .subcommand( SubCommand::with_name("svg") .about("Dump the atlas as SVG") .arg(Arg::with_name("ATLAS") .short("-a") .long("atlas") .help("Input texture atlas file.") .value_name("ATLAS") .takes_value(true) ) .arg(Arg::with_name("SVG_OUTPUT") .help("Output SVG file to use") .value_name("FILE") .takes_value(true) .required(false) ) ) .subcommand( SubCommand::with_name("list") .about("List the allocations and free rectangles in the atlas") .arg(Arg::with_name("ATLAS") .short("-a") .long("atlas") .help("Input texture atlas file.") .value_name("ATLAS") .takes_value(true) ) ) .get_matches(); if let Some(cmd) = matches.subcommand_matches("init") { init(&cmd); } else if let Some(cmd) = matches.subcommand_matches("allocate") { allocate(&cmd); } else if let Some(cmd) = matches.subcommand_matches("deallocate") { deallocate(&cmd); } else if let Some(cmd) = matches.subcommand_matches("svg") { svg(&cmd); } else if let Some(cmd) = matches.subcommand_matches("list") { list(&cmd); } } fn read_atlas(args: &ArgMatches) -> Session { let atlas_file_name = args.value_of("ATLAS").unwrap_or("atlas.ron"); let file = OpenOptions::new() .read(true) .write(true) .open(atlas_file_name) .expect( "Failed to open the atlas file." ); ron::de::from_reader(file).expect("Failed to parse the atlas") } fn write_atlas(session: &Session, args: &ArgMatches) { let serialized: String = ron::ser::to_string_pretty( &session, ron::ser::PrettyConfig::default(), ).unwrap(); let atlas_file_name = args.value_of("ATLAS").unwrap_or("atlas.ron"); let mut atlas_file = std::fs::File::create(atlas_file_name).expect( "Failed to open the atlas file." ); atlas_file.write_all(serialized.as_bytes()).expect( "Failed to write into the atlas file." ); } fn init(args: &ArgMatches) { let w = args.value_of("WIDTH").expect("Missing width.").parse::<i32>().unwrap(); let h = args.value_of("HEIGHT").expect("Missing height.").parse::<i32>().unwrap(); let default_options = etagere::DEFAULT_OPTIONS; let options = etagere::AllocatorOptions { alignment:
, vertical_shelves: args.is_present("VERTICAL_SHELVES"), num_columns: args.value_of("ALIGN_X") .map(|s| s.parse::<i32>().unwrap()) .unwrap_or(default_options.num_columns), }; let session = Session { atlas: BucketedAtlasAllocator::with_options(size2(w, h), &options), names: std::collections::HashMap::default(), next_id: 0, }; write_atlas(&session, &args); if args.is_present("SVG_OUTPUT") { svg(args); } } fn allocate(args: &ArgMatches) { let mut session = read_atlas(args); let w = args.value_of("WIDTH").expect("Missing width.").parse::<i32>().unwrap(); let h = args.value_of("HEIGHT").expect("Missing height.").parse::<i32>().unwrap(); let alloc = session.atlas.allocate(size2(w, h)); if alloc.is_none() { eprintln!("Allocation of size {}x{} failed.", w, h); return; } let alloc = alloc.unwrap(); let name = args.value_of("NAME").map(|name| name.to_string()).unwrap_or_else(|| { session.next_id += 1; format!("#{}", session.next_id) }); let r = alloc.rectangle; println!( "Allocated rectangle {} of size {}x{} at origin [{}, {}]", name, r.size().width, r.size().height, r.min.x, r.min.y, ); if let Some(old) = session.names.insert(name.clone(), alloc) { println!("Previous allocation with name {:?} was deallocated.", name); session.atlas.deallocate(old.id); } write_atlas(&session, args); if args.is_present("SVG_OUTPUT") { svg(args); } } fn deallocate(args: &ArgMatches) { let mut session = read_atlas(args); let name = args.value_of("NAME").expect("Need a rectangle name"); let id = session.names.remove(name).unwrap().id; session.atlas.deallocate(id); write_atlas(&session, args); if args.is_present("SVG_OUTPUT") { svg(args); } } fn svg(args: &ArgMatches) { let session = read_atlas(args); let svg_file_name = args.value_of("SVG_OUTPUT").unwrap_or("atlas.svg"); let mut svg_file = File::create(svg_file_name).expect( "Failed to open the SVG file." ); session.atlas.dump_svg(&mut svg_file).expect( "Failed to write into the SVG file." ); } fn list(args: &ArgMatches) { let session = read_atlas(args); println!("# Allocated rectangles"); for (name, &alloc) in &session.names { let rect = alloc.rectangle; println!( " - {}: size {}x{} at origin [{}, {}]", name, rect.size().width, rect.size().height, rect.min.x, rect.min.y ); break; } }
size2( args.value_of("ALIGN_X") .map(|s| s.parse::<i32>().unwrap()) .unwrap_or(default_options.alignment.width), args.value_of("ALIGN_Y") .map(|s| s.parse::<i32>().unwrap()) .unwrap_or(default_options.alignment.width), )
call_expression
[ { "content": "fn shelf_height(mut size: i32) -> i32 {\n\n let alignment = match size {\n\n 0 ..= 31 => 8,\n\n 32 ..= 127 => 16,\n\n 128 ..= 511 => 32,\n\n _ => 64,\n\n };\n\n\n\n let rem = size % alignment;\n\n if rem > 0 {\n\n size += alignment - rem;\n\n }\n\n...
Rust
cprover_bindings/src/irep/serialize.rs
celinval/kani-dev
677bacdb9ad7ce74dd20bbf6ba3cc6b87a6d7d16
use crate::irep::{Irep, IrepId, Symbol, SymbolTable}; use crate::InternedString; use serde::ser::{SerializeMap, Serializer}; use serde::Serialize; impl Serialize for Irep { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { let mut obj = serializer.serialize_map(None)?; obj.serialize_entry("id", &self.id)?; if !self.sub.is_empty() { obj.serialize_entry("sub", &self.sub)?; } if !self.named_sub.is_empty() { obj.serialize_entry("namedSub", &self.named_sub)?; } obj.end() } } impl Serialize for IrepId { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { self.to_string().serialize(serializer) } } impl Serialize for SymbolTable { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { let mut obj = serializer.serialize_map(None)?; obj.serialize_entry("symbolTable", &self.symbol_table)?; obj.end() } } impl Serialize for crate::goto_program::SymbolTable { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { let mut obj = serializer.serialize_map(None)?; obj.serialize_entry("symbolTable", &StreamingSymbols(self))?; obj.end() } } struct StreamingSymbols<'a>(&'a crate::goto_program::SymbolTable); impl<'a> Serialize for StreamingSymbols<'a> { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { let mm = self.0.machine_model(); let mut obj = serializer.serialize_map(None)?; for (k, v) in self.0.iter() { obj.serialize_entry(k, &v.to_irep(mm))?; } obj.end() } } impl Serialize for InternedString { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { self.to_string().serialize(serializer) } } struct InternedStringVisitor; impl<'de> serde::Deserialize<'de> for InternedString { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de>, { deserializer.deserialize_str(InternedStringVisitor) } } impl<'de> serde::de::Visitor<'de> for InternedStringVisitor { type Value = InternedString; fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { formatter.write_str("a String like thing") } fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(v.into()) } } impl Serialize for Symbol { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { let mut obj = serializer.serialize_map(None)?; obj.serialize_entry(&IrepId::Type.to_string(), &self.typ)?; obj.serialize_entry(&IrepId::Value.to_string(), &self.value)?; obj.serialize_entry("location", &self.location)?; obj.serialize_entry(&IrepId::Name.to_string(), &self.name)?; obj.serialize_entry(&IrepId::Module.to_string(), &self.module)?; obj.serialize_entry("baseName", &self.base_name)?; obj.serialize_entry("prettyName", &self.pretty_name)?; obj.serialize_entry(&IrepId::Mode.to_string(), &self.mode)?; obj.serialize_entry("isType", &self.is_type)?; obj.serialize_entry("isMacro", &self.is_macro)?; obj.serialize_entry("isExported", &self.is_exported)?; obj.serialize_entry("isInput", &self.is_input)?; obj.serialize_entry("isOutput", &self.is_output)?; obj.serialize_entry("isStateVar", &self.is_state_var)?; obj.serialize_entry("isProperty", &self.is_property)?; obj.serialize_entry("isStaticLifetime", &self.is_static_lifetime)?; obj.serialize_entry("isThreadLocal", &self.is_thread_local)?; obj.serialize_entry("isLvalue", &self.is_lvalue)?; obj.serialize_entry("isFileLocal", &self.is_file_local)?; obj.serialize_entry("isExtern", &self.is_extern)?; obj.serialize_entry("isVolatile", &self.is_volatile)?; obj.serialize_entry("isParameter", &self.is_parameter)?; obj.serialize_entry("isAuxiliary", &self.is_auxiliary)?; obj.serialize_entry("isWeak", &self.is_weak)?; obj.end() } } #[cfg(test)] mod test { use super::*; use serde_test::{assert_ser_tokens, Token}; #[test] fn serialize_irep() { let irep = Irep::empty(); assert_ser_tokens( &irep, &[Token::Map { len: None }, Token::String("id"), Token::String("empty"), Token::MapEnd], ); } #[test] fn serialize_sym_table() { let mut sym_table = SymbolTable::new(); let symbol = Symbol { typ: Irep::empty(), value: Irep::empty(), location: Irep::empty(), name: "my_name".into(), module: "".into(), base_name: "".into(), pretty_name: "".into(), mode: "".into(), is_type: false, is_macro: false, is_exported: false, is_input: false, is_output: false, is_state_var: false, is_property: false, is_static_lifetime: false, is_thread_local: false, is_lvalue: false, is_file_local: false, is_extern: false, is_volatile: false, is_parameter: false, is_auxiliary: false, is_weak: false, }; sym_table.insert(symbol.clone()); assert_ser_tokens( &sym_table, &[ Token::Map { len: None }, Token::String("symbolTable"), Token::Map { len: Some(1) }, Token::String("my_name"), Token::Map { len: None }, Token::String("type"), Token::Map { len: None }, Token::String("id"), Token::String("empty"), Token::MapEnd, Token::String("value"), Token::Map { len: None }, Token::String("id"), Token::String("empty"), Token::MapEnd, Token::String("location"), Token::Map { len: None }, Token::String("id"), Token::String("empty"), Token::MapEnd, Token::String("name"), Token::String("my_name"), Token::String("module"), Token::String(""), Token::String("baseName"), Token::String(""), Token::String("prettyName"), Token::String(""), Token::String("mode"), Token::String(""), Token::String("isType"), Token::Bool(false), Token::String("isMacro"), Token::Bool(false), Token::String("isExported"), Token::Bool(false), Token::String("isInput"), Token::Bool(false), Token::String("isOutput"), Token::Bool(false), Token::String("isStateVar"), Token::Bool(false), Token::String("isProperty"), Token::Bool(false), Token::String("isStaticLifetime"), Token::Bool(false), Token::String("isThreadLocal"), Token::Bool(false), Token::String("isLvalue"), Token::Bool(false), Token::String("isFileLocal"), Token::Bool(false), Token::String("isExtern"), Token::Bool(false), Token::String("isVolatile"), Token::Bool(false), Token::String("isParameter"), Token::Bool(false), Token::String("isAuxiliary"), Token::Bool(false), Token::String("isWeak"), Token::Bool(false), Token::MapEnd, Token::MapEnd, Token::MapEnd, ], ); } #[test] fn serialize_irep_sub() { let empty_irep = Irep::empty(); let one_irep = Irep::one(); let sub_irep = Irep::just_sub(vec![empty_irep.clone(), one_irep]); let top_irep = Irep::just_sub(vec![sub_irep, empty_irep]); assert_ser_tokens( &top_irep, &[ Token::Map { len: None }, Token::String("id"), Token::String(""), Token::String("sub"), Token::Seq { len: Some(2) }, Token::Map { len: None }, Token::String("id"), Token::String(""), Token::String("sub"), Token::Seq { len: Some(2) }, Token::Map { len: None }, Token::String("id"), Token::String("empty"), Token::MapEnd, Token::Map { len: None }, Token::String("id"), Token::String("1"), Token::MapEnd, Token::SeqEnd, Token::MapEnd, Token::Map { len: None }, Token::String("id"), Token::String("empty"), Token::MapEnd, Token::SeqEnd, Token::MapEnd, ], ); } }
use crate::irep::{Irep, IrepId, Symbol, SymbolTable}; use crate::InternedString; use serde::ser::{SerializeMap, Serializer}; use serde::Serialize; impl Serialize for Irep { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { let mut obj = serializer.serialize_map(None)?; obj.serialize_entry("id", &self.id)?; if !self.sub.is_empty() { obj.serialize_entry("sub", &self.sub)?; } if !self.named_sub.is_empty() { obj.serialize_entry("namedSub", &self.named_sub)?; } obj.end() } } impl Serialize for IrepId { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { self.to_string().serialize(serializer) } } impl Serialize for SymbolTable { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { let mut obj = serializer.serialize_map(None)?; obj.serialize_entry("symbolTable", &self.symbol_table)?; obj.end() } } impl Serialize for crate::goto_program::SymbolTable { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { let mut obj = serializer.serialize_map(None)?; obj.serialize_entry("symbolTable", &StreamingSymbols(self))?; obj.end() } } struct StreamingSymbols<'a>(&'a crate::goto_program::SymbolTable); impl<'a> Serialize for StreamingSymbols<'a> { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { let mm = self.0.machine_model(); let mut obj = serializer.serialize_map(None)?; for (k, v) in self.0.iter() { obj
alse, is_thread_local: false, is_lvalue: false, is_file_local: false, is_extern: false, is_volatile: false, is_parameter: false, is_auxiliary: false, is_weak: false, }; sym_table.insert(symbol.clone()); assert_ser_tokens( &sym_table, &[ Token::Map { len: None }, Token::String("symbolTable"), Token::Map { len: Some(1) }, Token::String("my_name"), Token::Map { len: None }, Token::String("type"), Token::Map { len: None }, Token::String("id"), Token::String("empty"), Token::MapEnd, Token::String("value"), Token::Map { len: None }, Token::String("id"), Token::String("empty"), Token::MapEnd, Token::String("location"), Token::Map { len: None }, Token::String("id"), Token::String("empty"), Token::MapEnd, Token::String("name"), Token::String("my_name"), Token::String("module"), Token::String(""), Token::String("baseName"), Token::String(""), Token::String("prettyName"), Token::String(""), Token::String("mode"), Token::String(""), Token::String("isType"), Token::Bool(false), Token::String("isMacro"), Token::Bool(false), Token::String("isExported"), Token::Bool(false), Token::String("isInput"), Token::Bool(false), Token::String("isOutput"), Token::Bool(false), Token::String("isStateVar"), Token::Bool(false), Token::String("isProperty"), Token::Bool(false), Token::String("isStaticLifetime"), Token::Bool(false), Token::String("isThreadLocal"), Token::Bool(false), Token::String("isLvalue"), Token::Bool(false), Token::String("isFileLocal"), Token::Bool(false), Token::String("isExtern"), Token::Bool(false), Token::String("isVolatile"), Token::Bool(false), Token::String("isParameter"), Token::Bool(false), Token::String("isAuxiliary"), Token::Bool(false), Token::String("isWeak"), Token::Bool(false), Token::MapEnd, Token::MapEnd, Token::MapEnd, ], ); } #[test] fn serialize_irep_sub() { let empty_irep = Irep::empty(); let one_irep = Irep::one(); let sub_irep = Irep::just_sub(vec![empty_irep.clone(), one_irep]); let top_irep = Irep::just_sub(vec![sub_irep, empty_irep]); assert_ser_tokens( &top_irep, &[ Token::Map { len: None }, Token::String("id"), Token::String(""), Token::String("sub"), Token::Seq { len: Some(2) }, Token::Map { len: None }, Token::String("id"), Token::String(""), Token::String("sub"), Token::Seq { len: Some(2) }, Token::Map { len: None }, Token::String("id"), Token::String("empty"), Token::MapEnd, Token::Map { len: None }, Token::String("id"), Token::String("1"), Token::MapEnd, Token::SeqEnd, Token::MapEnd, Token::Map { len: None }, Token::String("id"), Token::String("empty"), Token::MapEnd, Token::SeqEnd, Token::MapEnd, ], ); } }
.serialize_entry(k, &v.to_irep(mm))?; } obj.end() } } impl Serialize for InternedString { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { self.to_string().serialize(serializer) } } struct InternedStringVisitor; impl<'de> serde::Deserialize<'de> for InternedString { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de>, { deserializer.deserialize_str(InternedStringVisitor) } } impl<'de> serde::de::Visitor<'de> for InternedStringVisitor { type Value = InternedString; fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { formatter.write_str("a String like thing") } fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(v.into()) } } impl Serialize for Symbol { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { let mut obj = serializer.serialize_map(None)?; obj.serialize_entry(&IrepId::Type.to_string(), &self.typ)?; obj.serialize_entry(&IrepId::Value.to_string(), &self.value)?; obj.serialize_entry("location", &self.location)?; obj.serialize_entry(&IrepId::Name.to_string(), &self.name)?; obj.serialize_entry(&IrepId::Module.to_string(), &self.module)?; obj.serialize_entry("baseName", &self.base_name)?; obj.serialize_entry("prettyName", &self.pretty_name)?; obj.serialize_entry(&IrepId::Mode.to_string(), &self.mode)?; obj.serialize_entry("isType", &self.is_type)?; obj.serialize_entry("isMacro", &self.is_macro)?; obj.serialize_entry("isExported", &self.is_exported)?; obj.serialize_entry("isInput", &self.is_input)?; obj.serialize_entry("isOutput", &self.is_output)?; obj.serialize_entry("isStateVar", &self.is_state_var)?; obj.serialize_entry("isProperty", &self.is_property)?; obj.serialize_entry("isStaticLifetime", &self.is_static_lifetime)?; obj.serialize_entry("isThreadLocal", &self.is_thread_local)?; obj.serialize_entry("isLvalue", &self.is_lvalue)?; obj.serialize_entry("isFileLocal", &self.is_file_local)?; obj.serialize_entry("isExtern", &self.is_extern)?; obj.serialize_entry("isVolatile", &self.is_volatile)?; obj.serialize_entry("isParameter", &self.is_parameter)?; obj.serialize_entry("isAuxiliary", &self.is_auxiliary)?; obj.serialize_entry("isWeak", &self.is_weak)?; obj.end() } } #[cfg(test)] mod test { use super::*; use serde_test::{assert_ser_tokens, Token}; #[test] fn serialize_irep() { let irep = Irep::empty(); assert_ser_tokens( &irep, &[Token::Map { len: None }, Token::String("id"), Token::String("empty"), Token::MapEnd], ); } #[test] fn serialize_sym_table() { let mut sym_table = SymbolTable::new(); let symbol = Symbol { typ: Irep::empty(), value: Irep::empty(), location: Irep::empty(), name: "my_name".into(), module: "".into(), base_name: "".into(), pretty_name: "".into(), mode: "".into(), is_type: false, is_macro: false, is_exported: false, is_input: false, is_output: false, is_state_var: false, is_property: false, is_static_lifetime: f
random
[ { "content": "/// Utility functions\n\nfn arguments_irep<'a>(arguments: impl Iterator<Item = &'a Expr>, mm: &MachineModel) -> Irep {\n\n Irep {\n\n id: IrepId::Arguments,\n\n sub: arguments.map(|x| x.to_irep(mm)).collect(),\n\n named_sub: linear_map![],\n\n }\n\n}\n", "file_path":...
Rust
xtask/src/coverage.rs
koutheir/selinux
a23b6acc513cfa4cf20baa6a12aafe00a2e04c7d
use std::ffi::OsStr; use std::fs::File; use std::path::{Path, PathBuf}; use std::{fs, process}; use log::{debug, info}; use crate::errors::{Error, Result}; use crate::utils::*; use crate::{Config, NIGHTLY_TOOLCHAIN}; #[derive(Debug, serde_derive::Deserialize)] struct CargoTestMessageProfile { test: bool, } #[derive(Debug, serde_derive::Deserialize)] struct CargoTestMessage { profile: CargoTestMessageProfile, filenames: Vec<PathBuf>, } pub(crate) fn coverage(config: &Config) -> Result<()> { let coverage_dir = config .coverage_dir .to_str() .expect("Path is not valid UTF-8"); let llvm_cov_common_args: [&str; 10] = [ "--Xdemangler", "rustfilt", "--ignore-filename-regex", r#"/\.cargo/registry/"#, "--ignore-filename-regex", r#"/rustc/"#, "--ignore-filename-regex", r#"/tests.rs$$"#, "--ignore-filename-regex", &format!("^{}/", coverage_dir), ]; let coverage_common_env: [(&str, &OsStr); 4] = [ ("RUST_BACKTRACE", OsStr::new("1")), ("CARGO_INCREMENTAL", OsStr::new("0")), ("RUSTFLAGS", OsStr::new("-Zinstrument-coverage")), ("RUSTDOCFLAGS", OsStr::new("-Zinstrument-coverage")), ]; let coverage_common_args: [&str; 6] = [ &format!("+{}", NIGHTLY_TOOLCHAIN), "test", "--workspace", "--tests", "--target-dir", coverage_dir, ]; rustfilt_version(config)?; let sys_root = sys_root_of_nightly_toolchain(config)?; let mut result = find_executable_file(&sys_root, "llvm-profdata"); if result.is_err() { info!("Installing component 'llvm-tools-preview'..."); let args = [ "--quiet", "component", "add", "--toolchain", NIGHTLY_TOOLCHAIN, "llvm-tools-preview", ]; rustup(config, &args)?; result = find_executable_file(&sys_root, "llvm-profdata"); } let llvm_profdata = result?; let llvm_cov = find_executable_file(&sys_root, "llvm-cov")?; fs::create_dir_all(&config.coverage_dir) .map_err(|r| Error::from_io_path("std::fs::create_dir_all", &config.coverage_dir, r))?; info!("Cleaning up old coverage files..."); let profraw_files = list_files(&config.coverage_dir, "profraw")?; profraw_files.into_iter().for_each(|p| { let _ignored = fs::remove_file(&p); }); let tests_paths = build_coverage_binaries(config, &coverage_common_env, &coverage_common_args)?; run_coverage_binaries(config, &coverage_common_env, &coverage_common_args)?; merge_coverage_profraw_files(config, &llvm_profdata)?; export_coverage_lcov(config, &llvm_cov, &llvm_cov_common_args, &tests_paths)?; export_coverage_html(config, &llvm_cov, &llvm_cov_common_args, &tests_paths) } fn rustfilt_version(config: &Config) -> Result<()> { let mut cmd = process::Command::new("rustfilt"); cmd.stdout(process::Stdio::null()).arg("--version"); let mut result = run_cmd(cmd, "rustfilt"); if result.is_err() { info!("Installing 'rustfilt'..."); cargo_command(config, "", &["--quiet", "install", "rustfilt"])?; let mut cmd = process::Command::new("rustfilt"); cmd.stdout(process::Stdio::null()).arg("--version"); result = run_cmd(cmd, "rustfilt"); } result } fn build_coverage_binaries( config: &Config, common_env: &[(&str, &OsStr)], common_args: &[&str], ) -> Result<Vec<PathBuf>> { info!("Building coverage binaries..."); let mut cmd = process::Command::new("cargo"); cmd.current_dir(&config.workspace_dir) .stdout(process::Stdio::piped()) .envs(common_env.iter().map(|(k, v)| (k, v))) .env("LLVM_PROFILE_FILE", "/dev/null") .args(common_args) .args(&["--no-run", "--message-format=json"]); debug!("Running: {:?}", cmd); let output = cmd .spawn() .map_err(|r| Error::from_io_path("std::process::Command::spawn", "cargo", r))? .wait_with_output() .map_err(|r| Error::from_io_path("std::process::Child::wait_with_output", "cargo", r))?; if output.status.success() { Ok(test_binaries_from_cargo_test_messages(&output.stdout)) } else { Err(Error::CommandFailed { name: "cargo" }) } } fn run_coverage_binaries( config: &Config, common_env: &[(&str, &OsStr)], common_args: &[&str], ) -> Result<()> { info!("Running coverage binaries..."); let mut cmd = process::Command::new("cargo"); cmd.current_dir(&config.workspace_dir) .envs(common_env.iter().map(|(k, v)| (k, v))) .env("LLVM_PROFILE_FILE", &config.coverage_dir.join("%m.profraw")) .args(common_args); run_cmd(cmd, "cargo") } fn merge_coverage_profraw_files(config: &Config, llvm_profdata: &Path) -> Result<()> { info!("Merging coverage data..."); let profraw_files = list_files(&config.coverage_dir, "profraw")?; let mut cmd = process::Command::new(llvm_profdata); cmd.args(&["merge", "--sparse", "--output"]) .arg(&config.coverage_profdata) .args(&profraw_files); run_cmd(cmd, "llvm-profdata") } fn export_coverage_lcov( config: &Config, llvm_cov: &Path, llvm_cov_common_args: &[&str], tests_paths: &[PathBuf], ) -> Result<()> { info!("Exporting coverage LCOV..."); let lcov_path = config.coverage_dir.join("lcov.info"); let lcov_info = File::create(&lcov_path) .map_err(|r| Error::from_io_path("std::fs::File::create", &lcov_path, r))?; let mut cmd = process::Command::new(llvm_cov); cmd.stdout(lcov_info) .args(&["export", "--format", "lcov"]) .args(llvm_cov_common_args) .arg("--instr-profile") .arg(&config.coverage_profdata); for path in tests_paths { cmd.arg("--object").arg(path); } run_cmd(cmd, "llvm-cov") } fn export_coverage_html( config: &Config, llvm_cov: &Path, llvm_cov_common_args: &[&str], tests_paths: &[PathBuf], ) -> Result<()> { info!("Exporting coverage HTML..."); let mut cmd = process::Command::new(llvm_cov); cmd.args(&["show", "--format", "html"]) .args(&["--show-line-counts-or-regions", "--show-instantiations"]) .args(llvm_cov_common_args) .arg("--instr-profile") .arg(&config.coverage_profdata) .arg("--output-dir") .arg(&config.coverage_dir); for path in tests_paths { cmd.arg("--object").arg(path); } run_cmd(cmd, "llvm-cov")?; let mut cmd = process::Command::new("patch"); cmd.current_dir(&config.coverage_dir) .arg("--input") .arg(&config.workspace_dir.join("coverage-style.css.patch")); run_cmd(cmd, "patch") } fn rustc_print_sysroot(config: &Config) -> Result<Vec<u8>> { let name = "rustc --print sysroot"; let mut cmd = process::Command::new("rustc"); cmd.current_dir(&config.workspace_dir) .stdout(process::Stdio::piped()) .arg(&format!("+{}", NIGHTLY_TOOLCHAIN)) .args(&["--print", "sysroot"]); debug!("Running: {:?}", cmd); let output = cmd .spawn() .map_err(|r| Error::from_io_path("std::process::Command::spawn", name, r))? .wait_with_output() .map_err(|r| Error::from_io_path("std::process::Child::wait_with_output", name, r))?; if output.status.success() { Ok(output.stdout) } else { Err(Error::CommandFailed { name }) } } fn sys_root_of_nightly_toolchain(config: &Config) -> Result<PathBuf> { let mut result = rustc_print_sysroot(config); if result.is_err() { info!("Installing toolchain '{}'...", NIGHTLY_TOOLCHAIN); let args = ["--quiet", "toolchain", "install", NIGHTLY_TOOLCHAIN]; rustup(config, &args)?; result = rustc_print_sysroot(config); } let mut bytes = result?; if let Some(line_len) = bytes .as_slice() .split(|&c| c == b'\n' || c == b'\r') .next() .map(|s| s.len()) { bytes.resize(line_len, 0); } Ok(pathbuf_from_vec(bytes)) } fn test_binaries_from_cargo_test_messages(bytes: &[u8]) -> Vec<PathBuf> { bytes .split(|&c| c == b'\r' || c == b'\n') .map(|line| serde_json::from_slice::<CargoTestMessage>(line)) .filter_map(std::result::Result::ok) .filter(|obj| obj.profile.test) .map(|obj| obj.filenames) .flatten() .collect() }
use std::ffi::OsStr; use std::fs::File; use std::path::{Path, PathBuf}; use std::{fs, process}; use log::{debug, info}; use crate::errors::{Error, Result}; use crate::utils::*; use crate::{Config, NIGHTLY_TOOLCHAIN}; #[derive(Debug, serde_derive::Deserialize)] struct CargoTestMessageProfile { test: bool, } #[derive(Debug, serde_derive::Deserialize)] struct CargoTestMessage { profile: CargoTestMessageProfile, filenames: Vec<PathBuf>, } pub(crate) fn coverage(config: &Config) -> Result<()> { let coverage_dir = config .coverage_dir .to_str() .expect("Path is not valid UTF-8"); let llvm_cov_common_args: [&str; 10] = [ "--Xdemangler", "rustfilt", "--ignore-filename-regex", r#"/\.cargo/registry/"#, "--ignore-filename-regex", r#"/rustc/"#, "--ignore-filename-regex", r#"/tests.rs$$"#, "--ignore-filename-regex", &format!("^{}/", coverage_dir), ]; let coverage_common_env: [(&str, &OsStr); 4] = [ ("RUST_BACKTRACE", OsStr::new("1")), ("CARGO_INCREMENTAL", OsStr::new("0")), ("RUSTFLAGS", OsStr::new("-Zinstrument-coverage")), ("RUSTDOCFLAGS", OsStr::new("-Zinstrument-coverage")), ]; let coverage_common_args: [&str; 6] = [ &format!("+{}", NIGHTLY_TOOLCHAIN), "test", "--workspace", "--tests", "--target-dir", coverage_dir, ]; rustfilt_version(config)?; let sys_root = sys_root_of_nightly_toolchain(config)?; let mut result = find_executable_file(&sys_root, "llvm-profdata"); if result.is_err() { info!("Installing component 'llvm-tools-preview'...");
rustup(config, &args)?; result = find_executable_file(&sys_root, "llvm-profdata"); } let llvm_profdata = result?; let llvm_cov = find_executable_file(&sys_root, "llvm-cov")?; fs::create_dir_all(&config.coverage_dir) .map_err(|r| Error::from_io_path("std::fs::create_dir_all", &config.coverage_dir, r))?; info!("Cleaning up old coverage files..."); let profraw_files = list_files(&config.coverage_dir, "profraw")?; profraw_files.into_iter().for_each(|p| { let _ignored = fs::remove_file(&p); }); let tests_paths = build_coverage_binaries(config, &coverage_common_env, &coverage_common_args)?; run_coverage_binaries(config, &coverage_common_env, &coverage_common_args)?; merge_coverage_profraw_files(config, &llvm_profdata)?; export_coverage_lcov(config, &llvm_cov, &llvm_cov_common_args, &tests_paths)?; export_coverage_html(config, &llvm_cov, &llvm_cov_common_args, &tests_paths) } fn rustfilt_version(config: &Config) -> Result<()> { let mut cmd = process::Command::new("rustfilt"); cmd.stdout(process::Stdio::null()).arg("--version"); let mut result = run_cmd(cmd, "rustfilt"); if result.is_err() { info!("Installing 'rustfilt'..."); cargo_command(config, "", &["--quiet", "install", "rustfilt"])?; let mut cmd = process::Command::new("rustfilt"); cmd.stdout(process::Stdio::null()).arg("--version"); result = run_cmd(cmd, "rustfilt"); } result } fn build_coverage_binaries( config: &Config, common_env: &[(&str, &OsStr)], common_args: &[&str], ) -> Result<Vec<PathBuf>> { info!("Building coverage binaries..."); let mut cmd = process::Command::new("cargo"); cmd.current_dir(&config.workspace_dir) .stdout(process::Stdio::piped()) .envs(common_env.iter().map(|(k, v)| (k, v))) .env("LLVM_PROFILE_FILE", "/dev/null") .args(common_args) .args(&["--no-run", "--message-format=json"]); debug!("Running: {:?}", cmd); let output = cmd .spawn() .map_err(|r| Error::from_io_path("std::process::Command::spawn", "cargo", r))? .wait_with_output() .map_err(|r| Error::from_io_path("std::process::Child::wait_with_output", "cargo", r))?; if output.status.success() { Ok(test_binaries_from_cargo_test_messages(&output.stdout)) } else { Err(Error::CommandFailed { name: "cargo" }) } } fn run_coverage_binaries( config: &Config, common_env: &[(&str, &OsStr)], common_args: &[&str], ) -> Result<()> { info!("Running coverage binaries..."); let mut cmd = process::Command::new("cargo"); cmd.current_dir(&config.workspace_dir) .envs(common_env.iter().map(|(k, v)| (k, v))) .env("LLVM_PROFILE_FILE", &config.coverage_dir.join("%m.profraw")) .args(common_args); run_cmd(cmd, "cargo") } fn merge_coverage_profraw_files(config: &Config, llvm_profdata: &Path) -> Result<()> { info!("Merging coverage data..."); let profraw_files = list_files(&config.coverage_dir, "profraw")?; let mut cmd = process::Command::new(llvm_profdata); cmd.args(&["merge", "--sparse", "--output"]) .arg(&config.coverage_profdata) .args(&profraw_files); run_cmd(cmd, "llvm-profdata") } fn export_coverage_lcov( config: &Config, llvm_cov: &Path, llvm_cov_common_args: &[&str], tests_paths: &[PathBuf], ) -> Result<()> { info!("Exporting coverage LCOV..."); let lcov_path = config.coverage_dir.join("lcov.info"); let lcov_info = File::create(&lcov_path) .map_err(|r| Error::from_io_path("std::fs::File::create", &lcov_path, r))?; let mut cmd = process::Command::new(llvm_cov); cmd.stdout(lcov_info) .args(&["export", "--format", "lcov"]) .args(llvm_cov_common_args) .arg("--instr-profile") .arg(&config.coverage_profdata); for path in tests_paths { cmd.arg("--object").arg(path); } run_cmd(cmd, "llvm-cov") } fn export_coverage_html( config: &Config, llvm_cov: &Path, llvm_cov_common_args: &[&str], tests_paths: &[PathBuf], ) -> Result<()> { info!("Exporting coverage HTML..."); let mut cmd = process::Command::new(llvm_cov); cmd.args(&["show", "--format", "html"]) .args(&["--show-line-counts-or-regions", "--show-instantiations"]) .args(llvm_cov_common_args) .arg("--instr-profile") .arg(&config.coverage_profdata) .arg("--output-dir") .arg(&config.coverage_dir); for path in tests_paths { cmd.arg("--object").arg(path); } run_cmd(cmd, "llvm-cov")?; let mut cmd = process::Command::new("patch"); cmd.current_dir(&config.coverage_dir) .arg("--input") .arg(&config.workspace_dir.join("coverage-style.css.patch")); run_cmd(cmd, "patch") } fn rustc_print_sysroot(config: &Config) -> Result<Vec<u8>> { let name = "rustc --print sysroot"; let mut cmd = process::Command::new("rustc"); cmd.current_dir(&config.workspace_dir) .stdout(process::Stdio::piped()) .arg(&format!("+{}", NIGHTLY_TOOLCHAIN)) .args(&["--print", "sysroot"]); debug!("Running: {:?}", cmd); let output = cmd .spawn() .map_err(|r| Error::from_io_path("std::process::Command::spawn", name, r))? .wait_with_output() .map_err(|r| Error::from_io_path("std::process::Child::wait_with_output", name, r))?; if output.status.success() { Ok(output.stdout) } else { Err(Error::CommandFailed { name }) } } fn sys_root_of_nightly_toolchain(config: &Config) -> Result<PathBuf> { let mut result = rustc_print_sysroot(config); if result.is_err() { info!("Installing toolchain '{}'...", NIGHTLY_TOOLCHAIN); let args = ["--quiet", "toolchain", "install", NIGHTLY_TOOLCHAIN]; rustup(config, &args)?; result = rustc_print_sysroot(config); } let mut bytes = result?; if let Some(line_len) = bytes .as_slice() .split(|&c| c == b'\n' || c == b'\r') .next() .map(|s| s.len()) { bytes.resize(line_len, 0); } Ok(pathbuf_from_vec(bytes)) } fn test_binaries_from_cargo_test_messages(bytes: &[u8]) -> Vec<PathBuf> { bytes .split(|&c| c == b'\r' || c == b'\n') .map(|line| serde_json::from_slice::<CargoTestMessage>(line)) .filter_map(std::result::Result::ok) .filter(|obj| obj.profile.test) .map(|obj| obj.filenames) .flatten() .collect() }
let args = [ "--quiet", "component", "add", "--toolchain", NIGHTLY_TOOLCHAIN, "llvm-tools-preview", ];
assignment_statement
[ { "content": "fn run_target(config: &Config, target: &str) -> Result<()> {\n\n match target {\n\n \"coverage\" => coverage(config),\n\n\n\n _ => usage(),\n\n }\n\n}\n", "file_path": "xtask/src/main.rs", "rank": 0, "score": 180495.35324010975 }, { "content": "fn cargo_vers...
Rust
client/src/gui/mod.rs
ed-jones/voxel-rs
f3b43867c8b704d84de87611c00bdd246ea301f7
use crate::ui::PrimitiveBuffer; pub mod experiments; pub struct Gui { pub(self) mouse_x: i32, pub(self) mouse_y: i32, pub(self) mouse_down: bool, pub(self) hot_item: u32, pub(self) active_item: u32, pub(self) primitives: PrimitiveBuffer, } impl Gui { pub fn new() -> Self { Self { mouse_x: 0, mouse_y: 0, mouse_down: false, hot_item: 0, active_item: 0, primitives: Default::default(), } } pub fn update_mouse_position(&mut self, new_x: i32, new_y: i32) { self.mouse_x = new_x; self.mouse_y = new_y; } pub fn update_mouse_button(&mut self, is_down: bool) { self.mouse_down = is_down; } pub fn drain_primitives(&mut self) -> PrimitiveBuffer { std::mem::replace(&mut self.primitives, PrimitiveBuffer::default()) } pub fn prepare(&mut self) { self.hot_item = 0; } pub fn finish(&mut self) { if !self.mouse_down { self.active_item = 0; } else { if self.active_item == 0 { self.active_item = 1; } } } pub fn is_mouse_inside(&self, x: i32, y: i32, w: i32, h: i32) -> bool { x <= self.mouse_x && self.mouse_x < x + w && y <= self.mouse_y && self.mouse_y < y + h } pub fn button(&mut self, id: u32, x: i32, y: i32, w: i32, h: i32) -> ButtonBuilder { ButtonBuilder { gui: self, id: id + 2, x, y, w, h, text: None, } } pub fn text(&mut self, x: i32, y: i32, h: i32, text: String, color: [f32; 4], z: f32) { self.primitives.draw_text_simple(x, y, h, text, color, z); } } #[must_use] pub struct ButtonBuilder<'a> { gui: &'a mut Gui, id: u32, x: i32, y: i32, w: i32, h: i32, text: Option<(String, [f32; 4])>, } impl<'a> ButtonBuilder<'a> { pub fn build(self) -> bool { let Self { gui, id, x, y, w, h, text, } = self; if gui.is_mouse_inside(x, y, w, h) { gui.hot_item = id; if gui.active_item == 0 && gui.mouse_down { gui.active_item = id; } } gui.primitives .draw_rect(x + 3, y + 3, w, h, [0.0, 0.0, 0.0, 1.0], 0.02); let draw_pos; let button_color; if gui.hot_item == id { if gui.active_item == id { draw_pos = (x + 2, y + 2); button_color = [0.4, 0.4, 0.5, 1.0]; } else { draw_pos = (x, y); button_color = [0.5, 0.5, 0.6, 1.0]; } } else { draw_pos = (x, y); button_color = [0.6, 0.6, 0.7, 1.0]; } gui.primitives .draw_rect(draw_pos.0, draw_pos.1, w, h, button_color, 0.01); if let Some((text, color)) = text { gui.text(draw_pos.0, draw_pos.1, h, text, color, 0.005); } if !gui.mouse_down && gui.active_item == id && gui.hot_item == id { return true; } false } pub fn text(mut self, text: String, color: [f32; 4]) -> Self { self.text = Some((text, color)); self } }
use crate::ui::PrimitiveBuffer; pub mod experiments; pub struct Gui { pub(self) mouse_x: i32, pub(self) mouse_y: i32, pub(self) mouse_down: bool, pub(self) hot_item: u32, pub(self) active_item: u32, pub(self) primitives: PrimitiveBuffer, } impl Gui { pub fn new() -> Self { Self { mouse_x: 0, mouse_y: 0, mouse_down: false, hot_item: 0, active_item: 0, primitives: Default::default(), } } pub fn update_mouse_position(&mut self, new_x: i32, new_y: i32) { self.mouse_x = new_x; self.mouse_y = new_y; } pub fn update_mouse_button(&mut self, is_down: bool) { self.mouse_down = is_down; } pub fn drain_primitives(&mut self) -> PrimitiveBuffer { std::mem::replace(&mut self.primitives, PrimitiveBuffer::default()) } pub fn prepare(&mut self) { self.hot_item = 0; } pub fn finish(&mut self) { if !self.mouse_down { self.active_item = 0; } else { if self.active_item == 0 { self.active_item = 1; } } } pub fn is_mouse_inside(&self, x: i32, y: i32, w: i32, h: i32) -> bool { x <= self.mouse_x && self.mouse_x < x + w && y <= self.mouse_y && self.mouse_y < y + h }
pub fn text(&mut self, x: i32, y: i32, h: i32, text: String, color: [f32; 4], z: f32) { self.primitives.draw_text_simple(x, y, h, text, color, z); } } #[must_use] pub struct ButtonBuilder<'a> { gui: &'a mut Gui, id: u32, x: i32, y: i32, w: i32, h: i32, text: Option<(String, [f32; 4])>, } impl<'a> ButtonBuilder<'a> { pub fn build(self) -> bool { let Self { gui, id, x, y, w, h, text, } = self; if gui.is_mouse_inside(x, y, w, h) { gui.hot_item = id; if gui.active_item == 0 && gui.mouse_down { gui.active_item = id; } } gui.primitives .draw_rect(x + 3, y + 3, w, h, [0.0, 0.0, 0.0, 1.0], 0.02); let draw_pos; let button_color; if gui.hot_item == id { if gui.active_item == id { draw_pos = (x + 2, y + 2); button_color = [0.4, 0.4, 0.5, 1.0]; } else { draw_pos = (x, y); button_color = [0.5, 0.5, 0.6, 1.0]; } } else { draw_pos = (x, y); button_color = [0.6, 0.6, 0.7, 1.0]; } gui.primitives .draw_rect(draw_pos.0, draw_pos.1, w, h, button_color, 0.01); if let Some((text, color)) = text { gui.text(draw_pos.0, draw_pos.1, h, text, color, 0.005); } if !gui.mouse_down && gui.active_item == id && gui.hot_item == id { return true; } false } pub fn text(mut self, text: String, color: [f32; 4]) -> Self { self.text = Some((text, color)); self } }
pub fn button(&mut self, id: u32, x: i32, y: i32, w: i32, h: i32) -> ButtonBuilder { ButtonBuilder { gui: self, id: id + 2, x, y, w, h, text: None, } }
function_block-full_function
[ { "content": "fn four_bytes_to_u32(bytes: &[u8], big_endian: bool) -> u32 {\n\n if big_endian {\n\n return ((bytes[0] as u32) << 24)\n\n + ((bytes[1] as u32) << 16)\n\n + ((bytes[2] as u32) << 8)\n\n + (bytes[3] as u32);\n\n } else {\n\n return ((bytes[3] as ...
Rust
jl_compiler/src/cli.rs
vain0x/jacco-lang
f1982710a8b9a6198ebb14692570e83774d3659b
use crate::{ clang::clang_dump, cps::*, front::name_resolution::NameSymbols, logs::{DocLogs, Logs}, parse::{parse_tokens, PTree}, source::{Doc, TRange}, token::tokenize, utils::VecArena, }; use log::error; use std::{ collections::{HashMap, HashSet}, io, mem::take, path::{Component, Path, PathBuf}, }; pub(crate) type DocTag = (); pub(crate) type DocArena = VecArena<DocTag, DocData>; pub(crate) struct DocData { name: String, path: PathBuf, text: String, } type SyntaxArena = VecArena<DocTag, SyntaxData>; struct SyntaxData { tree: PTree, mod_names: Vec<String>, logs: DocLogs, } type SemanticsArena = VecArena<DocTag, SemanticsData>; struct SemanticsData { #[allow(unused)] k_mod: KMod, name_symbols: NameSymbols, } #[derive(Default)] pub struct Project { docs: DocArena, doc_name_map: HashMap<String, Doc>, syntaxes: SyntaxArena, semantics: SemanticsArena, mod_outline: KModOutline, mod_data: KModData, } impl Project { pub fn new() -> Self { Project::default() } pub fn insert(&mut self, name: String, path: PathBuf, text: String) -> Result<Doc, (Doc, Doc)> { let id = self.docs.alloc(DocData { name: name.clone(), path, text, }); let doc = Doc::from(id.to_index()); let duplicated_doc_opt = self.doc_name_map.insert(name, doc); if let Some(other) = duplicated_doc_opt { return Err((doc, other)); } Ok(doc) } fn logs_into_errors(&mut self, logs: Logs, errors: &mut Vec<(Doc, PathBuf, TRange, String)>) { for item in logs.finish() { let (doc, loc) = match item.loc().inner() { Ok(it) => it, Err(hint) => { error!("'{}' {}", hint, item.message()); continue; } }; let mut message = item.message().to_string(); let tree = &self.syntaxes[doc.inner()].tree; let range = match loc.range(tree) { Ok(it) => it, Err(hint) => { message += &format!(" loc={}", hint); TRange::ZERO } }; let path = self.docs[doc.inner()].path.to_path_buf(); errors.push((doc, path, range, message)); } } pub fn parse(&mut self, unresolved_mod_names: &mut Vec<String>) { let mut mod_names = vec![]; let offset = self.syntaxes.len(); let additional = self.docs.len().saturating_sub(offset); self.syntaxes.reserve(additional); for (id, doc_data) in self.docs.enumerate().skip(offset) { let logs = DocLogs::new(); let tokens = tokenize(doc_data.text.clone().into()); let tree = parse_tokens(tokens, logs.logger()); tree.write_trace(); tree.collect_used_mod_names(&mut mod_names); let id2 = self.syntaxes.alloc(SyntaxData { tree, mod_names: mod_names.split_off(0), logs, }); assert_eq!(id2, id); } let mut mod_names = HashSet::new(); for syntax_data in self.syntaxes.iter() { for mod_name in &syntax_data.mod_names { if !self.doc_name_map.contains_key(mod_name) { mod_names.insert(mod_name); } } } unresolved_mod_names.extend(mod_names.iter().map(ToString::to_string)); } pub fn compile_v2(&mut self) -> Result<String, Vec<(Doc, PathBuf, TRange, String)>> { let logs = Logs::new(); self.semantics.reserve(self.syntaxes.len()); for (id, syntax) in self.syntaxes.enumerate_mut() { let doc = Doc::from(id.to_index()); let doc_name = self.docs[id].name.to_string(); let doc_logs = take(&mut syntax.logs); let k_mod = self.mod_outline.mods.alloc(KModInfo { name: doc_name }); let name_symbols = super::front::generate_outline( doc, &syntax.tree, &mut self.mod_outline, &doc_logs.logger(), ); syntax.logs = doc_logs; let id2 = self.semantics.alloc(SemanticsData { k_mod, name_symbols, }); assert_eq!(id2, id); } let mut aliases = take(&mut self.mod_outline.aliases); resolve_aliases(&mut aliases, &self.mod_outline, logs.logger()); self.mod_outline.aliases = aliases; for ((id, syntax), semantics) in self.syntaxes.enumerate_mut().zip(self.semantics.iter_mut()) { let doc = Doc::from(id.to_index()); let doc_logs = take(&mut syntax.logs); super::front::convert_to_cps( doc, &syntax.tree, &mut semantics.name_symbols, &self.mod_outline, &mut self.mod_data, &doc_logs.logger(), ); logs.logger().extend_from_doc_logs(doc, doc_logs); } if logs.is_fatal() { let mut errors = vec![]; self.logs_into_errors(logs, &mut errors); return Err(errors); } let mut mod_data = take(&mut self.mod_data); resolve_types(&self.mod_outline, &mut mod_data, logs.logger()); self.mod_data = mod_data; super::cps::eval_cps(&mut self.mod_outline, &mut self.mod_data, &logs.logger()); if logs.is_fatal() { let mut errors = vec![]; self.logs_into_errors(logs, &mut errors); return Err(errors); } KConstEnumOutline::determine_tags( &mut self.mod_outline.consts, &self.mod_outline.const_enums, ); eliminate_unit(&mut self.mod_outline, &mut self.mod_data); Ok(clang_dump(&self.mod_outline, &self.mod_data)) } } pub struct SyntaxDump { pub tree: String, pub errors: Vec<(TRange, String)>, } pub fn dump_syntax(source_path: &Path, source_code: &str) -> SyntaxDump { let mut project = Project::new(); let name = source_path .file_stem() .and_then(|stem| stem.to_str()) .unwrap_or("<main>") .to_string(); let source_path = make_path_relative_to_manifest_dir(source_path); let text = source_code.to_string(); project.insert(name, source_path, text).ok().unwrap(); let mut unresolved_doc_names = vec![]; project.parse(&mut unresolved_doc_names); debug_assert_eq!(project.syntaxes.len(), 1); let mut tree = String::new(); let mut errors = vec![]; let logs = Logs::new(); if let Some((doc, syntax)) = project.syntaxes.enumerate_mut().next() { let doc_logs = take(&mut syntax.logs); logs.logger() .extend_from_doc_logs(Doc::from(doc.to_index()), doc_logs); tree = format!("{:#?}", syntax.tree); } project.logs_into_errors(logs, &mut errors); let errors = errors .into_iter() .map(|(_, _, range, message)| (range, message)) .collect(); SyntaxDump { tree, errors } } pub fn compile_v2(source_path: &Path, source_code: &str) -> Option<String> { let project_dir = source_path.parent(); let mut project = Project::new(); let name = source_path .file_stem() .and_then(|stem| stem.to_str()) .unwrap_or("<main>") .to_string(); let source_path = make_path_relative_to_manifest_dir(source_path); let text = source_code.to_string(); project.insert(name, source_path, text).ok().unwrap(); let mut missed_files = HashSet::new(); let mut unresolved_doc_names = vec![]; loop { assert!(unresolved_doc_names.is_empty()); project.parse(&mut unresolved_doc_names); if unresolved_doc_names.is_empty() { break; } let project_dir = project_dir.unwrap(); let mut stuck = true; for doc_name in unresolved_doc_names.drain(..) { let file_path = project_dir.join(format!("{}.jacco", doc_name)); if missed_files.contains(&file_path) { continue; } let source_code = match std::fs::read_to_string(&file_path) { Ok(text) => text, Err(_) => { missed_files.insert(file_path.to_path_buf()); continue; } }; project .insert(doc_name, file_path.to_path_buf(), source_code) .ok(); stuck = false; } if stuck { break; } } if !missed_files.is_empty() { for path in missed_files { error!( "このファイルが use 文で必要とされていますが、見つかりませんでした {:?}", path.to_string_lossy(), ); } return None; } match project.compile_v2() { Ok(code) => Some(code), Err(errors) => { if errors.is_empty() { unreachable!("エラーがないのにコンパイルエラーが発生しています。"); } for (_, path, range, message) in errors { error!("{}:{} {}", path.to_string_lossy(), range, message); } None } } } fn make_path_relative_to_manifest_dir(path: &Path) -> PathBuf { fn segments(path: &Path) -> io::Result<Vec<String>> { Ok(path .canonicalize()? .components() .filter_map(|c| match c { Component::Normal(name) => Some(name.to_string_lossy().to_string()), _ => None, }) .collect()) } fn make_relative_path(dest_path: &Path, base_path: &Path) -> io::Result<PathBuf> { let dest = segments(dest_path)?; let base = segments(base_path)?; let common_prefix_len = dest .iter() .zip(base.iter()) .take_while(|(dest_name, base_name)| dest_name == base_name) .count(); let mut out = PathBuf::new(); for _ in 0..base.len() - common_prefix_len { out.push("..".to_string()); } for name in &dest[common_prefix_len..] { out.push(name); } Ok(out) } let manifest_dir: &str = env!("CARGO_MANIFEST_DIR"); let mut base_dir = PathBuf::from(manifest_dir); base_dir.pop(); make_relative_path(path, &base_dir).unwrap_or_default() }
use crate::{ clang::clang_dump, cps::*, front::name_resolution::NameSymbols, logs::{DocLogs, Logs}, parse::{parse_tokens, PTree}, source::{Doc, TRange}, token::tokenize, utils::VecArena, }; use log::error; use std::{ collections::{HashMap, HashSet}, io, mem::take, path::{Component, Path, PathBuf}, }; pub(crate) type DocTag = (); pub(crate) type DocArena = VecArena<DocTag, DocData>; pub(crate) struct DocData { name: String, path: PathBuf, text: String, } type SyntaxArena = VecArena<DocTag, SyntaxData>; struct SyntaxData { tree: PTree, mod_names: Vec<String>, logs: DocLogs, } type SemanticsArena = VecArena<DocTag, SemanticsData>; struct SemanticsData { #[allow(unused)] k_mod: KMod, name_symbols: NameSymbols, } #[derive(Default)] pub struct Project { docs: DocArena, doc_name_map: HashMap<String, Doc>, syntaxes: SyntaxArena, semantics: SemanticsArena, mod_outline: KModOutline, mod_data: KModData, } impl Project { pub fn new() -> Self { Project::default() } pub fn insert(&mut self, name: String, path: PathBuf, text: String) -> Result<Doc, (Doc, Doc)> { let id = self.docs.alloc(DocData { name: name.clone(), path, text, }); let doc = Doc::from(id.to_index()); let duplicated_doc_opt = self.doc_name_map.insert(name, doc); if let Some(other) = duplicated_doc_opt { return Err((doc, other)); } Ok(doc) } fn logs_into_errors(&mut self, logs: Logs, errors: &mut Vec<(Doc, PathBuf, TRange, String)>) { for item in logs.finish() { let (doc, loc) = match item.loc().inner() { Ok(it) => it, Err(hint) => { error!("'{}' {}", hint, item.message()); continue; } }; let mut message = item.message().to_string(); let tree = &self.syntaxes[doc.inner()].tree; let range = match loc.range(tree) { Ok(it) => it, Err(hint) => { message += &format!(" loc={}", hint); TRange::ZERO } }; let path = self.docs[doc.inner()].path.to_path_buf(); errors.push((doc, path, range, message)); } } pub fn parse(&mut self, unresolved_mod_names: &mut Vec<String>) { let mut mod_names = vec![]; let offset = self.syntaxes.len(); let additional = self.docs.len().saturating_sub(offset); self.syntaxes.reserve(additional); for (id, doc_data) in self.docs.enumerate().skip(offset) { let logs = DocLogs::new(); let tokens = tokenize(doc_data.text.clone().into()); let tree = parse_tokens(tokens, logs.logger()); tree.write_trace(); tree.collect_used_mod_names(&mut mod_names); let id2 = self.syntaxes.alloc(SyntaxData { tree, mod_names: mod_names.split_off(0), logs, }); assert_eq!(id2, id); } let mut mod_names = HashSet::new(); for syntax_data in self.syntaxes.iter() { for mod_name in &syntax_data.mod_names { if !self.doc_name_map.contains_key(mod_name) { mod_names.insert(mod_name); } } } unresolved_mod_names.extend(mod_names.iter().map(ToString::to_string)); } pub fn compile_v2(&mut self) -> Result<String, Vec<(Doc, PathBuf, TRange, String)>> { let logs = Logs::new(); self.semantics.reserve(self.syntaxes.len()); for (id, syntax) in self.syntaxes.enumerate_mut() { let doc = Doc::from(id.to_index()); let doc_name = self.docs[id].name.to_string(); let doc_logs = take(&mut syntax.logs); let k_mod = self.mod_outline.mods.alloc(KModInfo { name: doc_name }); let name_symbols = super::front::generate_outline( doc, &syntax.tree, &mut self.mod_outline, &doc_logs.logger(), ); syntax.logs = doc_logs; let id2 = self.semantics.alloc(SemanticsData { k_mod, name_symbols, }); assert_eq!(id2, id); } let mut aliases = take(&mut self.mod_outline.aliases); resolve_aliases(&mut aliases, &self.mod_outline, logs.logger()); self.mod_outline.aliases = aliases; for ((id, syntax), semantics) in self.syntaxes.enumerate_mut().zip(self.semantics.iter_mut()) { let doc = Doc::from(id.to_index()); let doc_logs = take(&mut syntax.logs); super::front::convert_to_cps( doc, &syntax.tree, &mut semantics.name_symbols, &self.mod_outline, &mut self.mod_data, &doc_logs.logger(), ); logs.logger().extend_from_doc_logs(doc, doc_logs); } if logs.is_fatal() { let mut errors = vec![]; self.logs_into_errors(logs, &mut errors); return Err(errors); } let mut mod_data = take(&mut self.mod_data); resolve_types(&self.mod_outline, &mut mod_data, logs.logger()); self.mod_data = mod_data; super::cps::eval_cps(&mut self.mod_outline, &mut self.mod_data, &logs.logger()); if logs.is_fatal() { let mut errors = vec![]; self.logs_into_errors(logs, &mut errors); return Err(errors); } KConstEnumOutline::determine_tags( &mut self.mod_outline.consts, &self.mod_outline.const_enums, ); eliminate_unit(&mut self.mod_outline, &mut self.mod_data); Ok(clang_dump(&self.mod_outline, &self.mod_data)) } } pub struct SyntaxDump { pub tree: String, pub errors: Vec<(TRange, String)>, } pub fn dump_syntax(source_path: &Path, source_code: &str) -> SyntaxDump { let mut project = Project::new(); let name = source_path .file_stem() .and_then(|stem| stem.to_str()) .unwrap_or("<main>") .to_string(); let source_path = make_path_relative_to_manifest_dir(source_path); let text = source_code.to_string(); project.insert(name, source_path, text).ok().unwrap(); let mut unresolved_doc_names = vec![]; project.parse(&mut unresolved_doc_names); debug_assert_eq!(project.syntaxes.len(), 1); let mut tree = String::new(); let mut errors = vec![]; let logs = Logs::new(); if let Some((doc, syntax)) = project.syntaxes.enumerate_mut().next() { let doc_logs = take(&mut syntax.logs); logs.logger() .extend_from_doc_logs(Doc::from(doc.to_index()), doc_logs); tree = format!("{:#?}", syntax.tree); } project.logs_into_errors(logs, &mut errors); let errors = errors .into_iter() .map(|(_, _, range, message)| (range, message)) .collect(); SyntaxDump { tree, errors } } pub fn compile_v2(source_path: &Path, source_code: &str) -> Option<String> { let project_dir = source_path.parent(); let mut project = Project::new(); let name = source_path .file_stem() .and_then(|stem| stem.to_str()) .unwrap_or("<main>") .to_string(); let source_path = make_path_relative_to_manifest_dir(source_path); let text = source_code.to_string(); project.insert(name, source_path, text).ok().unwrap(); let mut missed_files = HashSet::new(); let mut unresolved_doc_names = vec![]; loop { assert!(unresolved_doc_names.is_empty()); project.parse(&mut unresolved_doc_names); if unresolved_doc_names.is_empty() { break; } let project_dir = project_dir.unwrap(); let mut stuck = true; for doc_name in unresolved_doc_names.drain(..) { let file_path = project_dir.join(format!("{}.jacco", doc_name)); if missed_files.contains(&file_path) { continue; } let source_code = match std::fs::read_to_string(&file_path) { Ok(text) => text, Err(_) => { missed_files.insert(file_path.to_path_buf()); continue; } }; project .insert(doc_name, file_path.to_path_buf(), source_code) .ok(); stuck = false; } if stuck { break; } } if !missed_files.is_empty() { for path in missed_files { error!( "このファイルが use 文で必要とされていますが、見つかりませんでした {:?}",
one } } } fn make_path_relative_to_manifest_dir(path: &Path) -> PathBuf { fn segments(path: &Path) -> io::Result<Vec<String>> { Ok(path .canonicalize()? .components() .filter_map(|c| match c { Component::Normal(name) => Some(name.to_string_lossy().to_string()), _ => None, }) .collect()) } fn make_relative_path(dest_path: &Path, base_path: &Path) -> io::Result<PathBuf> { let dest = segments(dest_path)?; let base = segments(base_path)?; let common_prefix_len = dest .iter() .zip(base.iter()) .take_while(|(dest_name, base_name)| dest_name == base_name) .count(); let mut out = PathBuf::new(); for _ in 0..base.len() - common_prefix_len { out.push("..".to_string()); } for name in &dest[common_prefix_len..] { out.push(name); } Ok(out) } let manifest_dir: &str = env!("CARGO_MANIFEST_DIR"); let mut base_dir = PathBuf::from(manifest_dir); base_dir.pop(); make_relative_path(path, &base_dir).unwrap_or_default() }
path.to_string_lossy(), ); } return None; } match project.compile_v2() { Ok(code) => Some(code), Err(errors) => { if errors.is_empty() { unreachable!("エラーがないのにコンパイルエラーが発生しています。"); } for (_, path, range, message) in errors { error!("{}:{} {}", path.to_string_lossy(), range, message); } N
function_block-random_span
[ { "content": "fn logs_into_errors(logs: Logs, tree: &PTree) -> Vec<(TRange, String)> {\n\n logs.finish()\n\n .into_iter()\n\n .map(|item| {\n\n let mut message = item.message().to_string();\n\n let range = match item.loc().inner().and_then(|(_, loc)| loc.range(tree)) {\n\n...
Rust
src/main.rs
shoyo/sudoku
ceda5bb09a9df776b28c4a48e9d9359ce5400776
use std::fmt::{Display, Error, Formatter}; mod boards; const ROWS: usize = 9; const COLS: usize = 9; const CAGE_ROWS: usize = 3; const CAGE_COLS: usize = 3; struct Sudoku { board: Vec<Vec<Option<u8>>>, } impl Sudoku { fn new(initial: Vec<(usize, usize, u8)>) -> Result<Self, String> { let mut board = Vec::with_capacity(ROWS); for _ in 0..ROWS { let mut row = Vec::with_capacity(COLS); for _ in 0..COLS { row.push(None); } board.push(row); } for (row, col, val) in initial { if row >= ROWS || col >= COLS || val == 0 || val > 9 { return Err(format!( "Value: {} at position ({}, {}) is invalid.", val, row, col )); } if board[row][col] != None { return Err(format!( "Value already exists at position ({}, {}).", row, col )); } board[row][col] = Some(val); } Ok(Self { board: board }) } fn solve(&mut self) -> Result<(), ()> { let (row, col) = match self.find_open_cell_() { Some(cell) => cell, None => return Ok(()), }; for val in 1..10 { if self.valid_insert(row, col, val) { self.board[row][col] = Some(val); match self.solve() { Ok(_) => return Ok(()), Err(_) => self.board[row][col] = None, } } } Err(()) } fn verify(&self) -> bool { for i in 0..ROWS { if !self.verify_row_(i) { return false; } } for j in 0..COLS { if !self.verify_col_(j) { return false; } } for ci in 0..CAGE_ROWS { for cj in 0..CAGE_COLS { if !self.verify_cage_(ci, cj) { return false; } } } true } fn verify_row_(&self, row: usize) -> bool { let mut seen = [false; 10]; for col in 0..COLS { let val = match self.board[row][col] { Some(val) => val as usize, None => return false, }; if seen[val] || val > 9 { return false; } seen[val] = true; } true } fn verify_col_(&self, col: usize) -> bool { let mut seen = [false; 10]; for row in 0..ROWS { let val = match self.board[row][col] { Some(val) => val as usize, None => return false, }; if seen[val] || val > 9 { return false; } seen[val] = true; } true } fn verify_cage_(&self, cage_row: usize, cage_col: usize) -> bool { let mut seen = [false; 10]; for i in 0..CAGE_ROWS { for j in 0..CAGE_COLS { let val = match self.board[cage_row * CAGE_ROWS + i][cage_col * CAGE_COLS + j] { Some(val) => val as usize, None => return false, }; if seen[val] || val > 9 { return false; } seen[val] = true; } } true } fn find_open_cell_(&self) -> Option<(usize, usize)> { for i in 0..ROWS { for j in 0..COLS { if self.board[i][j] == None { return Some((i, j)); } } } None } fn valid_insert(&self, row: usize, col: usize, val: u8) -> bool { self.board[row][col] == None && self.valid_row_insert_(row, val) && self.valid_col_insert_(col, val) && self.valid_cage_insert_(row / CAGE_ROWS, col / CAGE_COLS, val) } fn valid_row_insert_(&self, row: usize, val: u8) -> bool { for col in 0..COLS { if let Some(v) = self.board[row][col] { if v == val { return false; } } } true } fn valid_col_insert_(&self, col: usize, val: u8) -> bool { for row in 0..ROWS { if let Some(v) = self.board[row][col] { if v == val { return false; } } } true } fn valid_cage_insert_(&self, cage_row: usize, cage_col: usize, val: u8) -> bool { for i in 0..CAGE_ROWS { for j in 0..CAGE_COLS { if let Some(v) = self.board[cage_row * CAGE_ROWS + i][cage_col * CAGE_COLS + j] { if v == val { return false; } } } } true } } impl Display for Sudoku { fn fmt(&self, _fmt: &mut Formatter<'_>) -> Result<(), Error> { for i in 0..ROWS { for j in 0..COLS { match self.board[i][j] { Some(num) => print!(" {} ", num), None => print!(" - "), } } println!(); } Ok(()) } } fn main() { let board = boards::VALID_PUZZLE_1.to_vec(); let mut puzzle = Sudoku::new(board).unwrap(); println!("BEFORE:"); println!("{}", puzzle); match puzzle.solve() { Ok(_) => { println!("AFTER:"); println!("{}", puzzle); } Err(_) => { println!("Invalid puzzle."); } } } #[cfg(test)] mod tests { use super::*; #[test] fn create_puzzle() { let puzzle = Sudoku::new(vec![(0, 1, 3), (5, 3, 8), (8, 8, 4)]); assert!(puzzle.is_ok()); } #[test] fn create_invalid_puzzle() { let puzzle = Sudoku::new(vec![(0, 0, 10)]); assert!(puzzle.is_err()); } #[test] fn verify_valid_solution() { let puzzle = Sudoku::new(boards::VALID_SOLUTION.to_vec()).unwrap(); assert_eq!(puzzle.verify(), true); } #[test] fn verify_invalid_solution() { let puzzle = Sudoku::new(boards::INVALID_SOLUTION.to_vec()).unwrap(); assert_eq!(puzzle.verify(), false); } #[test] fn verify_valid_row() { let puzzle = Sudoku::new(boards::VALID_ROW.to_vec()).unwrap(); assert_eq!(puzzle.verify_row_(4), true); } #[test] fn verify_invalid_row() { let puzzle = Sudoku::new(boards::INVALID_ROW.to_vec()).unwrap(); assert_eq!(puzzle.verify_row_(4), false); } #[test] fn verify_valid_col() { let puzzle = Sudoku::new(boards::VALID_COL.to_vec()).unwrap(); assert_eq!(puzzle.verify_col_(4), true); } #[test] fn verify_invalid_col() { let puzzle = Sudoku::new(boards::INVALID_COL.to_vec()).unwrap(); assert_eq!(puzzle.verify_col_(4), false); } #[test] fn verify_valid_cage() { let puzzle = Sudoku::new(boards::VALID_CAGE.to_vec()).unwrap(); assert_eq!(puzzle.verify_cage_(0, 0), true); } #[test] fn verify_invalid_cage() { let puzzle = Sudoku::new(boards::INVALID_CAGE.to_vec()).unwrap(); assert_eq!(puzzle.verify_cage_(0, 0), false); } #[test] fn try_valid_row_insert() { let puzzle = Sudoku::new(Vec::new()).unwrap(); assert_eq!(puzzle.valid_row_insert_(0, 1), true); } #[test] fn try_invalid_row_insert() { let puzzle = Sudoku::new(boards::VALID_ROW.to_vec()).unwrap(); assert_eq!(puzzle.valid_row_insert_(4, 1), false); } #[test] fn try_valid_col_insert() { let puzzle = Sudoku::new(Vec::new()).unwrap(); assert_eq!(puzzle.valid_col_insert_(0, 1), true); } #[test] fn try_invalid_col_insert() { let puzzle = Sudoku::new(boards::VALID_COL.to_vec()).unwrap(); assert_eq!(puzzle.valid_col_insert_(4, 1), false); } #[test] fn try_valid_cage_insert() { let puzzle = Sudoku::new(Vec::new()).unwrap(); assert_eq!(puzzle.valid_cage_insert_(0, 0, 1), true); } #[test] fn try_invalid_cage_insert() { let puzzle = Sudoku::new(boards::VALID_CAGE.to_vec()).unwrap(); assert_eq!(puzzle.valid_cage_insert_(0, 0, 1), false); } #[test] fn solve_valid_puzzle_1() { let mut puzzle = Sudoku::new(boards::VALID_PUZZLE_1.to_vec()).unwrap(); assert_eq!(puzzle.verify(), false); let _ = puzzle.solve(); assert_eq!(puzzle.verify(), true); } #[test] fn solve_valid_puzzle_2() { let mut puzzle = Sudoku::new(boards::VALID_PUZZLE_2.to_vec()).unwrap(); assert_eq!(puzzle.verify(), false); let _ = puzzle.solve(); assert_eq!(puzzle.verify(), true); } #[test] fn solve_valid_puzzle_3() { let mut puzzle = Sudoku::new(boards::VALID_PUZZLE_3.to_vec()).unwrap(); assert_eq!(puzzle.verify(), false); let _ = puzzle.solve(); assert_eq!(puzzle.verify(), true); } }
use std::fmt::{Display, Error, Formatter}; mod boards; const ROWS: usize = 9; const COLS: usize = 9; const CAGE_ROWS: usize = 3; const CAGE_COLS: usize = 3; struct Sudoku { board: Vec<Vec<Option<u8>>>, } impl Sudoku { fn new(initial: Vec<(usize, usize, u8)>) -> Result<Self, String> { let mut board = Vec::with_capacity(ROWS); for _ in 0..ROWS { let mut row = Vec::with_capacity(COLS); for _ in 0..COLS { row.push(None); } board.push(row); } for (row, col, val) in initial { if row >= ROWS || col >= COLS || val == 0 || val > 9 { return Err(format!( "Value: {} at position ({}, {}) is invalid.", val, row, col )); } if board[row][col] != None { return Err(format!( "Value already exists at position ({}, {}).", row, col )); } board[row][col] = Some(val); } Ok(Self { board: board }) } fn solve(&mut self) -> Result<(), ()> { let (row, col) = match self.find_open_cell_() { Some(cell) => cell, None => return Ok(()), }; for val in 1..10 { if self.valid_insert(row, col, val) { self.board[row][col] = Some(val); match self.solve() { Ok(_) => return Ok(()), Err(_) => self.board[row][col] = None, } } } Err(()) } fn verify(&self) -> bool { for i in 0..ROWS { if !self.verify_row_(i) { return false; } } for j in 0..COLS { if !self.verify_col_(j) { return false; } } for ci in 0..CAGE_ROWS { for cj in 0..CAGE_COLS { if !self.verify_cage_(ci, cj) { return false; } } } true } fn verify_row_(&self, row: usize) -> bool { let mut seen = [false; 10]; for col in 0..COLS { let val = match self.board[row][col] { Some(val) => val as usize, None => return false, }; if seen[val] || val > 9 { return false; } seen[val] = true; } true } fn verify_col_(&self, col: usize) -> bool { let mut seen = [false; 10]; for row in 0..ROWS { let val = match self.board[row][col] { Some(val) => val as usize, None => return false, }; if seen[val] || val > 9 { return false; } seen[val] = true; } true } fn verify_cage_(&self, cage_row: usize, cage_col: usize) -> bool { let mut seen = [false; 10]; for i in 0..CAGE_ROWS { for j in 0..CAGE_COLS { let val = match self.board[cage_row * CAGE_ROWS + i][cage_col * CAGE_COLS + j] { Some(val) => val as usize, None => return false, }; if seen[val] || val > 9 { return false; } seen[val] = true; } } true } fn find_open_cell_(&self) -> Option<(usize, usize)> { for i in 0..ROWS { for j in 0..COLS { if self.board[i][j] == None { return Some((i, j)); } } } None } fn valid_insert(&self, row: usize, col: usize, val: u8) -> bool { self.board[row][col] == None && self.valid_row_insert_(row, val) && self.valid_col_insert_(col, val) && self.valid_cage_insert_(row / CAGE_ROWS, col / CAGE_COLS, val) } fn valid_row_insert_(&self, row: usize, val: u8) -> bool { for col in 0..COLS { if let Some(v) = self.board[row][col] { if v == val { return false; } } } true } fn valid_col_insert_(&self, col: usize, val: u8) -> bool { for row in 0..ROWS { if let Some(v) = self.board[row][col] { if v == val { return false; } } } true } fn valid_cage_insert_(&self, cage_row: usize, cage_col: usize, val: u8) -> bool { for i in 0..CAGE_ROWS { for j in 0..CAGE_COLS { if let Some(v) = self.board[cage_row * CAGE_ROWS + i][cage_col * CAGE_COLS + j] { if v == val { return false; } } } } true } } impl Display for Sudoku {
} fn main() { let board = boards::VALID_PUZZLE_1.to_vec(); let mut puzzle = Sudoku::new(board).unwrap(); println!("BEFORE:"); println!("{}", puzzle); match puzzle.solve() { Ok(_) => { println!("AFTER:"); println!("{}", puzzle); } Err(_) => { println!("Invalid puzzle."); } } } #[cfg(test)] mod tests { use super::*; #[test] fn create_puzzle() { let puzzle = Sudoku::new(vec![(0, 1, 3), (5, 3, 8), (8, 8, 4)]); assert!(puzzle.is_ok()); } #[test] fn create_invalid_puzzle() { let puzzle = Sudoku::new(vec![(0, 0, 10)]); assert!(puzzle.is_err()); } #[test] fn verify_valid_solution() { let puzzle = Sudoku::new(boards::VALID_SOLUTION.to_vec()).unwrap(); assert_eq!(puzzle.verify(), true); } #[test] fn verify_invalid_solution() { let puzzle = Sudoku::new(boards::INVALID_SOLUTION.to_vec()).unwrap(); assert_eq!(puzzle.verify(), false); } #[test] fn verify_valid_row() { let puzzle = Sudoku::new(boards::VALID_ROW.to_vec()).unwrap(); assert_eq!(puzzle.verify_row_(4), true); } #[test] fn verify_invalid_row() { let puzzle = Sudoku::new(boards::INVALID_ROW.to_vec()).unwrap(); assert_eq!(puzzle.verify_row_(4), false); } #[test] fn verify_valid_col() { let puzzle = Sudoku::new(boards::VALID_COL.to_vec()).unwrap(); assert_eq!(puzzle.verify_col_(4), true); } #[test] fn verify_invalid_col() { let puzzle = Sudoku::new(boards::INVALID_COL.to_vec()).unwrap(); assert_eq!(puzzle.verify_col_(4), false); } #[test] fn verify_valid_cage() { let puzzle = Sudoku::new(boards::VALID_CAGE.to_vec()).unwrap(); assert_eq!(puzzle.verify_cage_(0, 0), true); } #[test] fn verify_invalid_cage() { let puzzle = Sudoku::new(boards::INVALID_CAGE.to_vec()).unwrap(); assert_eq!(puzzle.verify_cage_(0, 0), false); } #[test] fn try_valid_row_insert() { let puzzle = Sudoku::new(Vec::new()).unwrap(); assert_eq!(puzzle.valid_row_insert_(0, 1), true); } #[test] fn try_invalid_row_insert() { let puzzle = Sudoku::new(boards::VALID_ROW.to_vec()).unwrap(); assert_eq!(puzzle.valid_row_insert_(4, 1), false); } #[test] fn try_valid_col_insert() { let puzzle = Sudoku::new(Vec::new()).unwrap(); assert_eq!(puzzle.valid_col_insert_(0, 1), true); } #[test] fn try_invalid_col_insert() { let puzzle = Sudoku::new(boards::VALID_COL.to_vec()).unwrap(); assert_eq!(puzzle.valid_col_insert_(4, 1), false); } #[test] fn try_valid_cage_insert() { let puzzle = Sudoku::new(Vec::new()).unwrap(); assert_eq!(puzzle.valid_cage_insert_(0, 0, 1), true); } #[test] fn try_invalid_cage_insert() { let puzzle = Sudoku::new(boards::VALID_CAGE.to_vec()).unwrap(); assert_eq!(puzzle.valid_cage_insert_(0, 0, 1), false); } #[test] fn solve_valid_puzzle_1() { let mut puzzle = Sudoku::new(boards::VALID_PUZZLE_1.to_vec()).unwrap(); assert_eq!(puzzle.verify(), false); let _ = puzzle.solve(); assert_eq!(puzzle.verify(), true); } #[test] fn solve_valid_puzzle_2() { let mut puzzle = Sudoku::new(boards::VALID_PUZZLE_2.to_vec()).unwrap(); assert_eq!(puzzle.verify(), false); let _ = puzzle.solve(); assert_eq!(puzzle.verify(), true); } #[test] fn solve_valid_puzzle_3() { let mut puzzle = Sudoku::new(boards::VALID_PUZZLE_3.to_vec()).unwrap(); assert_eq!(puzzle.verify(), false); let _ = puzzle.solve(); assert_eq!(puzzle.verify(), true); } }
fn fmt(&self, _fmt: &mut Formatter<'_>) -> Result<(), Error> { for i in 0..ROWS { for j in 0..COLS { match self.board[i][j] { Some(num) => print!(" {} ", num), None => print!(" - "), } } println!(); } Ok(()) }
function_block-full_function
[]
Rust
native-windows-gui/src/controls/tooltip.rs
cg31/native-windows-gui
19fe8f6fadc3c5cd9365106e78d87e46d9c813dd
use winapi::shared::minwindef::{UINT, LPARAM, WPARAM}; use winapi::um::winnt::WCHAR; use crate::win32::window_helper as wh; use crate::win32::base_helper::{check_hwnd, to_utf16, from_utf16}; use crate::{Icon, NwgError}; use super::{ControlBase, ControlHandle}; use std::{mem, ptr}; const NOT_BOUND: &'static str = "Tooltip is not yet bound to a winapi object"; const BAD_HANDLE: &'static str = "INTERNAL ERROR: Tooltip handle is not HWND!"; #[derive(Copy, Clone, Debug)] pub enum TooltipIcon { None, Info, Warning, Error, InfoLarge, WarningLarge, ErrorLarge } /** Tooltips appear automatically, or pop up, when the user pauses the mouse pointer over a tool or some other UI element. The tooltip appears near the pointer and disappears when the user clicks a mouse button, moves the pointer away from the tool, or simply waits for a few seconds. A tooltip can be applied to multiple controls, each with their own custom text. This is done/undone using the `register`/`unregister` functions. So do not think as Tooltip as a standalone toolip, but more like a manager. A tooltip can support static text using `register` and dynamic text using `register_callback`. Tooltip requires the `tooltip` features Example: ```rust use native_windows_gui as nwg; /// Building a tooltip and add tooltips at the same time fn build_tooltip(tt: &mut nwg::Tooltip, btn1: &nwg::Button, btn2: &nwg::Button) { nwg::Tooltip::builder() .register(btn1, "A test button") .register_callback(btn2) .build(tt); } /// Adding/Updating a tooltip after the initial tooltip creation fn add_tooltip(btn: &nwg::Button, tt: &nwg::Tooltip) { tt.register(btn, "This is a button!"); } /// Dynamic tooltip callback setup fn add_dynamic_tooltip(tt: &nwg::Tooltip, btn: &nwg::Button) { tt.register_callback(btn); } struct GuiStruct { // Skipping other members tt: nwg::Tooltip, button: nwg::Button } impl GuiStruct { /// The dynamic tooltip callback, triggered by the event loop fn events_callback(&self, evt: nwg::Event, evt_data: &nwg::EventData, handle: nwg::ControlHandle) { match evt { nwg::Event::OnTooltipText => { // Compare the handle to check which control will display the tooltip if &handle == &self.button { let tooltip_data = evt_data.on_tooltip_text(); tooltip_data.set_text(&format!("Button text: \"{}\"", self.button.text())); } }, _ => {} } } } ``` */ #[derive(Default, PartialEq, Eq)] pub struct Tooltip { pub handle: ControlHandle } impl Tooltip { pub fn builder<'a>() -> TooltipBuilder<'a> { TooltipBuilder { title: None, ico: None, default_ico: None, register: Vec::new(), register_cb: Vec::new() } } /* Work with Comclt32.dll version 6.0. Should be implemented eventually Return the icon if it is a icon defined in TooltipIcon. If not, returns `None`. pub fn default_icon(&self) -> Option<TooltipIcon> { use winapi::um::commctrl::{TTGETTITLE, TTM_GETTITLE}; use winapi::um::commctrl::{TTI_NONE, TTI_INFO, TTI_WARNING, TTI_ERROR, TTI_INFO_LARGE, TTI_WARNING_LARGE, TTI_ERROR_LARGE}; if self.handle.blank() { panic!(NOT_BOUND); } let handle = self.handle.hwnd().expect(BAD_HANDLE); let mut tt = TTGETTITLE { dwSize: mem::size_of::<TTGETTITLE>() as DWORD, uTitleBitmap: 0, cch: 0, pszTitle: ptr::null_mut() }; let tt_ptr = &mut tt as *mut TTGETTITLE; wh::send_message(handle, TTM_GETTITLE, 0, tt_ptr as LPARAM); println!("{:?}", tt.uTitleBitmap); match tt.uTitleBitmap as usize { TTI_NONE => Some(TooltipIcon::None), TTI_INFO => Some(TooltipIcon::Info), TTI_WARNING => Some(TooltipIcon::Warning), TTI_ERROR => Some(TooltipIcon::Error), TTI_INFO_LARGE => Some(TooltipIcon::InfoLarge), TTI_WARNING_LARGE => Some(TooltipIcon::WarningLarge), TTI_ERROR_LARGE => Some(TooltipIcon::ErrorLarge), _ => None } } */ pub fn text(&self, owner: &ControlHandle, buffer_size: Option<usize>) -> String { use winapi::um::commctrl::{TTM_GETTEXTW, TTTOOLINFOW, TTF_IDISHWND, TTF_SUBCLASS}; use winapi::shared::{basetsd::UINT_PTR, windef::RECT}; let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE); let owner_handle = { if owner.blank() { panic!(NOT_BOUND); } owner.hwnd().expect(BAD_HANDLE) }; let buffer_size = buffer_size.unwrap_or(200); let mut text: Vec<WCHAR> = Vec::with_capacity(buffer_size); unsafe { text.set_len(buffer_size); } let mut tool = TTTOOLINFOW { cbSize: mem::size_of::<TTTOOLINFOW>() as UINT, uFlags: TTF_IDISHWND | TTF_SUBCLASS, hwnd: owner_handle, uId: owner_handle as UINT_PTR, rect: RECT { left: 0, top: 0, right: 0, bottom: 0 }, hinst: ptr::null_mut(), lpszText: text.as_mut_ptr(), lParam: 0, lpReserved: ptr::null_mut() }; let tool_ptr = &mut tool as *mut TTTOOLINFOW; wh::send_message(handle, TTM_GETTEXTW, 0, tool_ptr as LPARAM); from_utf16(&text) } pub fn set_text<'a>(&self, owner: &ControlHandle, text: &'a str) { use winapi::um::commctrl::{TTM_UPDATETIPTEXTW, TTTOOLINFOW, TTF_IDISHWND, TTF_SUBCLASS}; use winapi::shared::{basetsd::UINT_PTR, windef::RECT}; let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE); let mut text = to_utf16(text); let owner_handle = { if owner.blank() { panic!(NOT_BOUND); } owner.hwnd().expect(BAD_HANDLE) }; let tool = TTTOOLINFOW { cbSize: mem::size_of::<TTTOOLINFOW>() as UINT, uFlags: TTF_IDISHWND | TTF_SUBCLASS, hwnd: owner_handle, uId: owner_handle as UINT_PTR, rect: RECT { left: 0, top: 0, right: 0, bottom: 0 }, hinst: ptr::null_mut(), lpszText: text.as_mut_ptr(), lParam: 0, lpReserved: ptr::null_mut() }; let tool_ptr = &tool as *const TTTOOLINFOW; wh::send_message(handle, TTM_UPDATETIPTEXTW, 0, tool_ptr as LPARAM); } pub fn set_decoration<'a>(&self, title: &'a str, ico: &Icon) { use winapi::um::commctrl::{TTM_SETTITLEW}; let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE); let title = to_utf16(title); wh::send_message(handle, TTM_SETTITLEW, ico.handle as WPARAM, title.as_ptr() as LPARAM); } pub fn set_default_decoration<'a>(&self, title: &'a str, icon: TooltipIcon) { use winapi::um::commctrl::{TTM_SETTITLEW}; use winapi::um::commctrl::{TTI_NONE, TTI_INFO, TTI_WARNING, TTI_ERROR, TTI_INFO_LARGE, TTI_WARNING_LARGE, TTI_ERROR_LARGE}; let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE); let bitmap_handle = match icon { TooltipIcon::None => TTI_NONE, TooltipIcon::Info => TTI_INFO, TooltipIcon::Warning => TTI_WARNING, TooltipIcon::Error => TTI_ERROR, TooltipIcon::InfoLarge => TTI_INFO_LARGE, TooltipIcon::WarningLarge => TTI_WARNING_LARGE, TooltipIcon::ErrorLarge => TTI_ERROR_LARGE }; let title = to_utf16(title); wh::send_message(handle, TTM_SETTITLEW, bitmap_handle as WPARAM, title.as_ptr() as LPARAM); } pub fn hide(&self) { use winapi::um::commctrl::{TTM_POP}; let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE); wh::send_message(handle, TTM_POP, 0, 0); } pub fn count(&self) -> usize { use winapi::um::commctrl::{TTM_GETTOOLCOUNT}; let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE); wh::send_message(handle, TTM_GETTOOLCOUNT, 0, 0) as usize } pub fn set_delay_time(&self, delay: Option<u16>) { use winapi::um::commctrl::{TTDT_INITIAL, TTM_SETDELAYTIME}; let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE); let value = match delay { Some(d) => d & 0xFFFF, None => u16::max_value() & 0xFFFF, }; wh::send_message(handle, TTM_SETDELAYTIME, TTDT_INITIAL as WPARAM, value as LPARAM); } pub fn delay_time(&self) -> u16 { use winapi::um::commctrl::{TTDT_INITIAL, TTM_GETDELAYTIME}; let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE); wh::send_message(handle, TTM_GETDELAYTIME, TTDT_INITIAL as WPARAM, 0) as u16 } pub fn set_enabled(&self, v: bool) { use winapi::um::commctrl::TTM_ACTIVATE; let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE); wh::send_message(handle, TTM_ACTIVATE, v as WPARAM, 0); } pub fn register<'a, W: Into<ControlHandle>>(&self, owner: W, text: &'a str) { use winapi::um::commctrl::{TTM_ADDTOOLW, TTTOOLINFOW, TTF_IDISHWND, TTF_SUBCLASS}; use winapi::shared::{basetsd::UINT_PTR, windef::RECT}; let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE); let owner = owner.into(); let mut text = to_utf16(text); let owner_handle = { if owner.blank() { panic!(NOT_BOUND); } owner.hwnd().expect(BAD_HANDLE) }; let tool = TTTOOLINFOW { cbSize: mem::size_of::<TTTOOLINFOW>() as UINT, uFlags: TTF_IDISHWND | TTF_SUBCLASS, hwnd: owner_handle, uId: owner_handle as UINT_PTR, rect: RECT { left: 0, top: 0, right: 0, bottom: 0 }, hinst: ptr::null_mut(), lpszText: text.as_mut_ptr(), lParam: 0, lpReserved: ptr::null_mut() }; let tool_ptr = &tool as *const TTTOOLINFOW; wh::send_message(handle, TTM_ADDTOOLW, 0, tool_ptr as LPARAM); } pub fn register_callback<W: Into<ControlHandle>>(&self, owner: W) { use winapi::um::commctrl::{TTM_ADDTOOLW, TTTOOLINFOW, TTF_IDISHWND, TTF_SUBCLASS, LPSTR_TEXTCALLBACKW}; use winapi::shared::{basetsd::UINT_PTR, windef::RECT}; let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE); let owner = owner.into(); let owner_handle = { if owner.blank() { panic!(NOT_BOUND); } owner.hwnd().expect(BAD_HANDLE) }; let tool = TTTOOLINFOW { cbSize: mem::size_of::<TTTOOLINFOW>() as UINT, uFlags: TTF_IDISHWND | TTF_SUBCLASS, hwnd: owner_handle, uId: owner_handle as UINT_PTR, rect: RECT { left: 0, top: 0, right: 0, bottom: 0 }, hinst: ptr::null_mut(), lpszText: LPSTR_TEXTCALLBACKW, lParam: 0, lpReserved: ptr::null_mut() }; let tool_ptr = &tool as *const TTTOOLINFOW; wh::send_message(handle, TTM_ADDTOOLW, 0, tool_ptr as LPARAM); } pub fn unregister<W: Into<ControlHandle>>(&self, owner: W) { use winapi::um::commctrl::{TTM_DELTOOLW, TTTOOLINFOW, TTF_IDISHWND, TTF_SUBCLASS}; use winapi::shared::{basetsd::UINT_PTR, windef::RECT}; let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE); let owner = owner.into(); let owner_handle = { if owner.blank() { panic!(NOT_BOUND); } owner.hwnd().expect(BAD_HANDLE) }; let tool = TTTOOLINFOW { cbSize: mem::size_of::<TTTOOLINFOW>() as UINT, uFlags: TTF_IDISHWND | TTF_SUBCLASS, hwnd: owner_handle, uId: owner_handle as UINT_PTR, rect: RECT { left: 0, top: 0, right: 0, bottom: 0 }, hinst: ptr::null_mut(), lpszText: ptr::null_mut(), lParam: 0, lpReserved: ptr::null_mut() }; let tool_ptr = &tool as *const TTTOOLINFOW; wh::send_message(handle, TTM_DELTOOLW, 0, tool_ptr as LPARAM); } pub fn class_name(&self) -> &'static str { winapi::um::commctrl::TOOLTIPS_CLASS } pub fn flags(&self) -> u32 { 0 } pub fn forced_flags(&self) -> u32 { use winapi::um::winuser::{WS_POPUP}; use winapi::um::commctrl::{TTS_ALWAYSTIP, TTS_NOPREFIX}; WS_POPUP | TTS_ALWAYSTIP | TTS_NOPREFIX } } impl Drop for Tooltip { fn drop(&mut self) { self.handle.destroy(); } } pub struct TooltipBuilder<'a> { title: Option<&'a str>, ico: Option<&'a Icon>, default_ico: Option<TooltipIcon>, register: Vec<(ControlHandle, &'a str)>, register_cb: Vec<ControlHandle>, } impl<'a> TooltipBuilder<'a> { pub fn register<W: Into<ControlHandle>>(mut self, widget: W, text: &'a str) -> TooltipBuilder<'a> { self.register.push((widget.into(), text)); self } pub fn register_callback<W: Into<ControlHandle>>(mut self, widget: W) -> TooltipBuilder<'a> { self.register_cb.push(widget.into()); self } pub fn decoration(mut self, title: Option<&'a str>, ico: Option<&'a Icon>) -> TooltipBuilder<'a> { self.title = title; self.ico = ico; self } pub fn default_decoration(mut self, title: Option<&'a str>, ico: Option<TooltipIcon>) -> TooltipBuilder<'a> { self.title = title; self.default_ico = ico; self } pub fn build(self, tooltip: &mut Tooltip) -> Result<(), NwgError> { *tooltip = Default::default(); tooltip.handle = ControlBase::build_hwnd() .class_name(tooltip.class_name()) .forced_flags(tooltip.forced_flags()) .flags(tooltip.flags()) .build()?; if self.title.is_some() || self.ico.is_some() || self.default_ico.is_some() { let title = self.title.unwrap_or(""); match (self.ico, self.default_ico) { (Some(ico), None) | (Some(ico), _) => tooltip.set_decoration(title, ico), (None, Some(ico)) => tooltip.set_default_decoration(title, ico), (None, None) => tooltip.set_default_decoration(title, TooltipIcon::None), } } for (handle, text) in self.register { tooltip.register(&handle, text); } for handle in self.register_cb { tooltip.register_callback(&handle); } Ok(()) } }
use winapi::shared::minwindef::{UINT, LPARAM, WPARAM}; use winapi::um::winnt::WCHAR; use crate::win32::window_helper as wh; use crate::win32::base_helper::{check_hwnd, to_utf16, from_utf16}; use crate::{Icon, NwgError}; use super::{ControlBase, ControlHandle}; use std::{mem, ptr}; const NOT_BOUND: &'static str = "Tooltip is not yet bound to a winapi object"; const BAD_HANDLE: &'static str = "INTERNAL ERROR: Tooltip handle is not HWND!"; #[derive(Copy, Clone, Debug)] pub enum TooltipIcon { None, Info, Warning, Error, InfoLarge, WarningLarge, ErrorLarge } /** Tooltips appear automatically, or pop up, when the user pauses the mouse pointer over a tool or some other UI element. The tooltip appears near the pointer and disappears when the user clicks a mouse button, moves the pointer away from the tool, or simply waits for a few seconds. A tooltip can be applied to multiple controls, each with their own custom text. This is done/undone using the `register`/`unregister` functions. So do not think as Tooltip as a standalone toolip, but more like a manager. A tooltip can support static text using `register` and dynamic text using `register_callback`. Tooltip requires the `tooltip` features Example: ```rust use native_windows_gui as nwg; /// Building a tooltip and add tooltips at the same time fn build_tooltip(tt: &mut nwg::Tooltip, btn1: &nwg::Button, btn2: &nwg::Button) { nwg::Tooltip::builder() .register(btn1, "A test button") .register_callback(btn2) .build(tt); } /// Adding/Updating a tooltip after the initial tooltip creation fn add_tooltip(btn: &nwg::Button, tt: &nwg::Tooltip) { tt.register(btn, "This is a button!"); } /// Dynamic tooltip callback setup fn add_dynamic_tooltip(tt: &nwg::Tooltip, btn: &nwg::Button) { tt.register_callback(btn); } struct GuiStruct { // Skipping other members tt: nwg::Tooltip, button: nwg::Button } impl GuiStruct { /// The dynamic tooltip callback, triggered by the event loop fn events_callback(&self, evt: nwg::Event, evt_data: &nwg::EventData, handle: nwg::ControlHandle) { match evt { nwg::Event::OnTooltipText => { // Compare the handle to check which control will display the tooltip if &handle == &self.button { let tooltip_data = evt_data.on_tooltip_text(); tooltip_data.set_text(&format!("Button text: \"{}\"", self.button.text())); } }, _ => {} } } } ``` */ #[derive(Default, PartialEq, Eq)] pub struct Tooltip { pub handle: ControlHandle } impl Tooltip { pub fn builder<'a>() -> TooltipBuilder<'a> { TooltipBuilder { title: None, ico: None, default_ico: None, register: Vec::new(), register_cb: Vec::new() } } /* Work with Comclt32.dll version 6.0. Should be implemented eventually Return the icon if it is a icon defined in TooltipIcon. If not, returns `None`. pub fn default_icon(&self) -> Option<TooltipIcon> { use winapi::um::commctrl::{TTGETTITLE, TTM_GETTITLE}; use winapi::um::commctrl::{TTI_NONE, TTI_INFO, TTI_WARNING, TTI_ERROR, TTI_INFO_LARGE, TTI_WARNING_LARGE, TTI_ERROR_LARGE}; if self.handle.blank() { panic!(NOT_BOUND); } let handle = self.handle.hwnd().expect(BAD_HANDLE); let mut tt = TTGETTITLE { dwSize: mem::size_of::<TTGETTITLE>() as DWORD, uTitleBitmap: 0, cch: 0, pszTitle: ptr::null_mut() }; let tt_ptr = &mut tt as *mut TTGETTITLE; wh::send_message(handle, TTM_GETTITLE, 0, tt_ptr as LPARAM); println!("{:?}", tt.uTitleBitmap); match tt.uTitleBitmap as usize { TTI_NONE => Some(TooltipIcon::None), TTI_INFO => Some(TooltipIcon::Info), TTI_WARNING => Some(TooltipIcon::Warning), TTI_ERROR => Some(TooltipIcon::Error), TTI_INFO_LARGE => Some(TooltipIcon::InfoLarge), TTI_WARNING_LARGE => Some(TooltipIcon::WarningLarge), TTI_ERROR_LARGE => Some(TooltipIcon::ErrorLarge), _ => None } } */ pub fn text(&self, owner: &ControlHandle, buffer_size: Option<usize>) -> String { use winapi::um::commctrl::{TTM_GETTEXTW, TTTOOLINFOW, TTF_IDISHWND, TTF_SUBCLASS}; use winapi::shared::{basetsd::UINT_PTR, windef::RECT}; let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE); let owner_handle = { if owner.blank() { panic!(NOT_BOUND); } owner.hwnd().expect(BAD_HANDLE) }; let buffer_size = buffer_size.unwrap_or(200); let mut text: Vec<WCHAR> = Vec::with_capacity(buffer_size); unsafe { text.set_len(buffer_size); } let mut tool = TTTOOLINFOW { cbSize: mem::size_of::<TTTOOLINFOW>() as UINT, uFlags: TTF_IDISHWND | TTF_SUBCLASS, hwnd: owner_handle, uId: owner_handle as UINT_PTR, rect: RECT { left: 0, top: 0, right: 0, bottom: 0 }, hinst: ptr::null_mut(), lpszText: text.as_mut_ptr(), lParam: 0, lpReserved: ptr::null_mut() }; let tool_ptr = &mut tool as *mut TTTOOLINFOW; wh::send_message(handle, TTM_GETTEXTW, 0, tool_ptr as LPARAM); from_utf16(&text) } pub fn set_text<'a>(&self, owner: &ControlHandle, text: &'a str) { use winapi::um::commctrl::{TTM_UPDATETIPTEXTW, TTTOOLINFOW, TTF_IDISHWND, TTF_SUBCLASS}; use winapi::shared::{basetsd::UINT_PTR, windef::RECT}; let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE); let mut text = to_utf16(text); let owner_handle = { if owner.blank() { panic!(NOT_BOUND); } owner.hwnd().expect(BAD_HANDLE) }; let tool = TTTOOLINFOW { cbSize: mem::size_of::<TTTOOLINFOW>() as UINT, uFlags: TTF_IDISHWND | TTF_SUBCLASS, hwnd: owner_handle, uId: owner_handle as UINT_PTR, rect: RECT { left: 0, top: 0, right: 0, bottom: 0 }, hinst: ptr::null_mut(), lpszText: text.as_mut_ptr(), lParam: 0, lpReserved: ptr::null_mut() }; let tool_ptr = &tool as *const TTTOOLINFOW; wh::send_message(handle, TTM_UPDATETIPTEXTW, 0, tool_ptr as LPARAM); } pub fn set_decoration<'a>(&self, title: &'a str, ico: &Icon) { use winapi::um::commctrl::{TTM_SETTITLEW}; let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE); let title = to_utf16(title); wh::send_message(handle, TTM_SETTITLEW, ico.handle as WPARAM, title.as_ptr() as LPARAM); } pub fn set_default_decoration<'a>(&self, title: &'a str, icon: TooltipIcon) { use winapi::um::commctrl::{TTM_SETTITLEW}; use winapi::um::commctrl::{TTI_NONE, TTI_INFO, TTI_WARNING, TTI_ERROR, TTI_INFO_LARGE, TTI_WARNING_LARGE, TTI_ERROR_LARGE}; let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE); let bitmap_handle = match icon { TooltipIcon::None => TTI_NONE, TooltipIcon::Info => TTI_INFO, TooltipIcon::Warning => TTI_WARNING, TooltipIcon::Error => TTI_ERROR, TooltipIcon::InfoLarge => TTI_INFO_LARGE, TooltipIcon::WarningLarge => TTI_WARNING_LARGE, TooltipIcon::ErrorLarge => TTI_ERROR_LARGE }; let title = to_utf16(title); wh::send_message(handle, TTM_SETTITLEW, bitmap_handle as WPARAM, title.as_ptr() as LPARAM); } pub fn hide(&self) { use winapi::um::commctrl::{TTM_POP}; let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE); wh::send_message(handle, TTM_POP, 0, 0); } pub fn count(&self) -> usize { use winapi::um::commctrl::{TTM_GETTOOLCOUNT}; let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE); wh::send_message(handle, TTM_GETTOOLCOUNT, 0, 0) as usize } pub fn set_delay_time(&self, delay: Option<u16>) { use winapi::um::commctrl::{TTDT_INITIAL, TTM_SETDELAYTIME}; let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE); let value = match delay { Some(d) => d & 0xFFFF, None => u16::max_value() & 0xFFFF, }; wh::send_message(handle, TTM_SETDELAYTIME, TTDT_INITIAL as WPARAM, value as LPARAM); } pub fn delay_time(&self) -> u16 { use winapi::um::commctrl::{TTDT_INITIAL, TTM_GETDELAYTIME}; let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE); wh::send_message(handle, TTM_GETDELAYTIME, TTDT_INITIAL as WPARAM, 0) as u16 } pub fn set_enabled(&self, v: bool) { use winapi::um::commctrl::TTM_ACTIVATE; let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE); wh::send_message(handle, TTM_ACTIVATE, v as WPARAM, 0); } pub fn register<'a, W: Into<ControlHandle>>(&self, owner: W, text: &'a str) { use winapi::um::commctrl::{TTM_ADDTOOLW, TTTOOLINFOW, TTF_IDISHWND, TTF_SUBCLASS}; use winapi::shared::{basetsd::UINT_PTR, windef::RECT}; let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE); let owner = owner.into(); let mut text = to_utf16(text); let owner_handle = { if owner.blank() { panic!(NOT_BOUND); } owner.hwnd().expect(BAD_HANDLE) }; let tool = TTTOOLINFOW { cbSize: mem::size_of::<TTTOOLINFOW>() as UINT, uFlags: TTF_IDISHWND | TTF_SUBCLASS, hwnd: owner_handle, uId: owner_handle as UINT_PTR, rect: RECT { left: 0, top: 0, right: 0, bottom: 0 }, hinst: ptr::null_mut(), lpszText: text.as_mut_ptr(), lParam: 0, lpReserved: ptr::null_mut() }; let tool_ptr = &tool as *const TTTOOLINFOW; wh::send_message(handle, TTM_ADDTOOLW, 0, tool_ptr as LPARAM); } pub fn register_callback<W: Into<ControlHandle>>(&self, owner: W) { use winapi::um::commctrl::{TTM_ADDTOOLW, TTTOOLINFOW, TTF_IDISHWND, TTF_SUBCLASS, LPSTR_TEXTCALLBACKW}; use winapi::shared::{basetsd::UINT_PTR, windef::RECT}; let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE); let owner = owner.into(); let owner_handle = { if owner.blank() { panic!(NOT_BOUND); } owner.hwnd().expect(BAD_HANDLE) }; let tool = TTTOOLINFOW { cbSize: mem::size_of::<TTTOOLINFOW>() as UINT, uFlags: TTF_IDISHWND | TTF_SUBCLASS, hwnd: owner_handle, uId: owner_handle as UINT_PTR, rect: RECT { left: 0, top: 0, right: 0, bottom: 0 }, hinst: ptr::null_mut(), lpszText: LPSTR_TEXTCALLBACKW, lParam: 0, lpReserved: ptr::null_mut() }; let tool_ptr = &tool as *const TTTOOLINFOW; wh::send_message(handle, TTM_ADDTOOLW, 0, tool_ptr as LPARAM); } pub fn unregister<W: Into<ControlHandle>>(&self, owner: W) { use winapi::um::commctrl::{TTM_DELTOOLW, TTTOOLINFOW, TTF_IDISHWND, TTF_SUBCLASS}; use winapi::shared::{basetsd::UINT_PTR, windef::RECT}; let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE); let owner = owner.into(); let owner_handle = { if owner.blank() { panic!(NOT_BOUND); } owner.hwnd().expect(BAD_HANDLE) }; let tool = TTTOOLINFOW { cbSize: mem::size_of::<TTTOOLINFOW>() as UINT, uFlags: TTF_IDISHWND | TTF_SUBCLASS, hwnd: owner_handle, uId: owner_handle as UINT_PTR, rect: RECT { left: 0, top: 0, right: 0, bottom: 0 }, hinst: ptr::null_mut(), lpszText: ptr::null_mut(), lParam: 0, lpReserved: ptr::null_mut() }; let tool_ptr = &tool as *const TTTOOLINFOW; wh::send_message(handle, TTM_DELTOOLW, 0, tool_ptr as LPARAM); } pub fn class_name(&self) -> &'static str { winapi::um::commctrl::TOOLTIPS_CLASS } pub fn flags(&self) -> u32 { 0 } pub fn forced_flags(&self) -> u32 { use winapi::um::winuser::{WS_POPUP}; use winapi::um::commctrl::{TTS_ALWAYSTIP, TTS_NOPREFIX}; WS_POPUP | TTS_ALWAYSTIP | TTS_NOPREFIX } } impl Drop for Tooltip { fn drop(&mut self) { self.handle.destroy(); } } pub struct TooltipBuilder<'a> { title: Option<&'a str>, ico: Option<&'a Icon>, default_ico: Option<TooltipIcon>, register: Vec<(ControlHandle, &'a str)>, register_cb: Vec<ControlHandle>, } impl<'a> TooltipBuilder<'a> { pub fn register<W: Into<ControlHandle>>(mut self, widget: W, text: &'a str) -> TooltipBuilder<'a> { self.register.push((widget.into(), text)); self } pub fn register_callback<W: Into<ControlHandle>>(mut self, widget: W) -> TooltipBuilder<'a> { self.register_cb.push(widget.into()); self } pub fn decoration(mut self, title: Option<&'a str>, ico: Option<&'a Icon>) -> TooltipBuilder<'a> { self.title = title; self.ico = ico; self } pub fn default_decoration(mut self, title: Option<&'a str>, ico: Option<TooltipIcon>) -> TooltipBuilder<'a> { self.title = title; self.default_ico = ico; self } pub fn build(self, tooltip: &mut Tooltip) -> Result<(), NwgError> { *tooltip = Default::default(); tooltip.handle = ControlBase::build_hwnd() .class_name(tooltip.class_name()) .forced_flags(tooltip.forced_flags()) .flags(tooltip.flags()) .build()?; if self.title.is_some() || self.ico.is_some() || self.default_ico.is_some() { let title = self.title.unwrap_or("");
} for (handle, text) in self.register { tooltip.register(&handle, text); } for handle in self.register_cb { tooltip.register_callback(&handle); } Ok(()) } }
match (self.ico, self.default_ico) { (Some(ico), None) | (Some(ico), _) => tooltip.set_decoration(title, ico), (None, Some(ico)) => tooltip.set_default_decoration(title, ico), (None, None) => tooltip.set_default_decoration(title, TooltipIcon::None), }
if_condition
[ { "content": "pub fn check_hwnd(handle: &ControlHandle, not_bound: &str, bad_handle: &str) -> HWND {\n\n use winapi::um::winuser::IsWindow;\n\n\n\n if handle.blank() { panic!(\"{}\", not_bound); }\n\n match handle.hwnd() {\n\n Some(hwnd) => match unsafe { IsWindow(hwnd) } {\n\n 0 => {...
Rust
rust/xaynet-analytics/src/data_combination/data_points/was_active_each_past_period.rs
xaynetwork/xaynet
ae6a7b2127599b8ac9cd663a5259d2ff01392fe0
use chrono::{DateTime, Utc}; use std::collections::BTreeMap; use crate::{ data_combination::data_points::data_point::{ CalcWasActiveEachPastPeriod, CalculateDataPoints, DataPointMetadata, }, database::analytics_event::data_model::AnalyticsEvent, }; impl CalcWasActiveEachPastPeriod { pub fn new( metadata: DataPointMetadata, events: Vec<AnalyticsEvent>, period_thresholds: Vec<DateTime<Utc>>, ) -> Self { Self { metadata, events, period_thresholds, } } fn group_timestamps_by_period_threshold(&self) -> BTreeMap<DateTime<Utc>, Vec<DateTime<Utc>>> { let mut timestamps_by_period_threshold = BTreeMap::new(); for these_thresholds in self.period_thresholds.windows(2) { let newer_threshold = these_thresholds.first().unwrap(); let older_threshold = these_thresholds.last().unwrap(); let timestamps: Vec<DateTime<Utc>> = self .events .iter() .filter(|event| { event.timestamp < *newer_threshold && event.timestamp > *older_threshold }) .map(|event| event.timestamp) .collect(); timestamps_by_period_threshold.insert(*newer_threshold, timestamps); } timestamps_by_period_threshold } } impl CalculateDataPoints for CalcWasActiveEachPastPeriod { fn metadata(&self) -> DataPointMetadata { self.metadata } fn calculate(&self) -> Vec<u32> { let timestamps_by_period_threshold = self.group_timestamps_by_period_threshold(); timestamps_by_period_threshold .values() .rev() .map(|timestamps| !timestamps.is_empty() as u32) .collect::<Vec<u32>>() } } #[cfg(test)] mod tests { use chrono::{DateTime, Duration, Utc}; use super::*; use crate::{ data_combination::data_points::data_point::{Period, PeriodUnit}, database::analytics_event::data_model::AnalyticsEventType, }; #[test] fn test_calculate_no_events_in_a_period() { let end_period = DateTime::parse_from_rfc3339("2021-02-02T00:00:00-00:00") .unwrap() .with_timezone(&Utc); let metadata = DataPointMetadata::new(Period::new(PeriodUnit::Days, 1), end_period); let period_thresholds = vec![end_period, end_period - Duration::days(1)]; let was_active_each_past_period = CalcWasActiveEachPastPeriod::new(metadata, Vec::new(), period_thresholds); assert_eq!(was_active_each_past_period.calculate(), vec![0]); } #[test] fn test_calculate_one_event_in_a_period() { let end_period = DateTime::parse_from_rfc3339("2021-03-03T00:00:00-00:00") .unwrap() .with_timezone(&Utc); let metadata = DataPointMetadata::new(Period::new(PeriodUnit::Days, 1), end_period); let events = vec![AnalyticsEvent::new( "test1", AnalyticsEventType::UserAction, end_period - Duration::hours(12), None, )]; let period_thresholds = vec![end_period, end_period - Duration::days(1)]; let was_active_each_past_period = CalcWasActiveEachPastPeriod::new(metadata, events, period_thresholds); assert_eq!(was_active_each_past_period.calculate(), vec![1]); } #[test] fn test_calculate_no_events_in_two_periods() { let end_period = DateTime::parse_from_rfc3339("2021-04-04T00:00:00-00:00") .unwrap() .with_timezone(&Utc); let metadata = DataPointMetadata::new(Period::new(PeriodUnit::Days, 2), end_period); let period_thresholds = vec![ end_period, end_period - Duration::days(1), end_period - Duration::days(2), ]; let was_active_each_past_period = CalcWasActiveEachPastPeriod::new(metadata, Vec::new(), period_thresholds); assert_eq!(was_active_each_past_period.calculate(), vec![0, 0]); } #[test] fn test_calculate_one_event_in_one_period_zero_in_another() { let end_period = DateTime::parse_from_rfc3339("2021-05-05T00:00:00-00:00") .unwrap() .with_timezone(&Utc); let metadata = DataPointMetadata::new(Period::new(PeriodUnit::Days, 2), end_period); let events = vec![AnalyticsEvent::new( "test1", AnalyticsEventType::UserAction, end_period - Duration::hours(12), None, )]; let period_thresholds = vec![ end_period, end_period - Duration::days(1), end_period - Duration::days(2), ]; let was_active_each_past_period = CalcWasActiveEachPastPeriod::new(metadata, events, period_thresholds); assert_eq!(was_active_each_past_period.calculate(), vec![1, 0]); } #[test] fn test_calculate_two_events_in_one_period_zero_in_another() { let end_period = DateTime::parse_from_rfc3339("2021-06-06T00:00:00-00:00") .unwrap() .with_timezone(&Utc); let metadata = DataPointMetadata::new(Period::new(PeriodUnit::Days, 2), end_period); let events = vec![ AnalyticsEvent::new( "test1", AnalyticsEventType::UserAction, end_period - Duration::hours(12), None, ), AnalyticsEvent::new( "test2", AnalyticsEventType::AppError, end_period - Duration::hours(15), None, ), ]; let period_thresholds = vec![ end_period, end_period - Duration::days(1), end_period - Duration::days(2), ]; let was_active_each_past_period = CalcWasActiveEachPastPeriod::new(metadata, events, period_thresholds); assert_eq!(was_active_each_past_period.calculate(), vec![1, 0]); } #[test] fn test_calculate_two_periods_with_one_event_each() { let end_period = DateTime::parse_from_rfc3339("2021-07-07T00:00:00-00:00") .unwrap() .with_timezone(&Utc); let metadata = DataPointMetadata::new(Period::new(PeriodUnit::Days, 2), end_period); let events = vec![ AnalyticsEvent::new( "test1", AnalyticsEventType::UserAction, end_period - Duration::hours(12), None, ), AnalyticsEvent::new( "test2", AnalyticsEventType::AppError, end_period - Duration::hours(36), None, ), ]; let period_thresholds = vec![ end_period, end_period - Duration::days(1), end_period - Duration::days(2), ]; let was_active_each_past_period = CalcWasActiveEachPastPeriod::new(metadata, events, period_thresholds); assert_eq!(was_active_each_past_period.calculate(), vec![1, 1]); } }
use chrono::{DateTime, Utc}; use std::collections::BTreeMap; use crate::{ data_combination::data_points::data_point::{ CalcWasActiveEachPastPeriod, CalculateDataPoints, DataPointMetadata, }, database::analytics_event::data_model::AnalyticsEvent, }; impl CalcWasActiveEachPastPeriod { pub fn new( metadata: DataPointMetadata, events: Vec<AnalyticsEvent>, period_thresholds: Vec<DateTime<Utc>>, ) -> Self { Self { metadata, events, period_thresholds, } } fn group_timestamps_by_period_threshold(&self) -> BTreeMap<DateTime<Utc>, Vec<DateTime<Utc>>> { let mut timestamps_by_period_threshold = BTreeMap::new(); for these_thresholds in self.period_thresholds.windows(2) { let newer_threshold = these_thresholds.first().unwrap(); let older_threshold = these_thresholds.last().unwrap(); let timestamps: Vec<DateTime<Utc>> = self .events .iter() .filter(|event| { event.timestamp < *newer_threshold && event.timestamp > *older_threshold }) .map(|event| event.timestamp) .collect(); timestamps_by_period_threshold.insert(*newer_threshold, timestamps); } timestamps_by_period_threshold } } impl CalculateDataPoints for CalcWasActiveEachPastPeriod { fn metadata(&self) -> DataPointMetadata { self.metadata } fn calculate(&self) -> Vec<u32> { let timestamps_by_period_threshold = self.group_timestamps_by_period_threshold(); timestamps_by_period_threshold .values() .rev() .map(|timestamps| !timestamps.is_empty() as u32) .collect::<Vec<u32>>() } } #[cfg(test)] mod tests { use chrono::{DateTime, Duration, Utc}; use super::*; use crate::{ data_combination::data_points::data_point::{Period, PeriodUnit}, database::analytics_event::data_model::AnalyticsEventType, }; #[test] fn test_calculate_no_events_in_a_period() { let end_period = DateTime::parse_from_rfc3339("2021-02-02T00:00:00-00:00") .unwrap() .with_timezone(&Utc); let metadata = DataPointMetadata::new(Period::new(PeriodUnit::Days, 1), end_period); let period_thresholds = vec![end_period, end_period - Duration::days(1)]; let was_active_each_past_period = CalcWasActiveEachPastPeriod::new(metadata, Vec::new(), period_thresholds); assert_eq!(was_active_each_past_period.calculate(), vec![0]); } #[test] fn test_calculate_one_event_in_a_period() { let end_period = DateTime::parse_from_rfc3339("2021-03-03T00:00:00-00:00") .unwrap() .with_timezone(&Utc); let metadata = DataPointMetadata::new(Period::new(PeriodUnit::Days, 1), end_period); let events = vec![AnalyticsEvent::new( "test1", AnalyticsEventType::UserAction, end_period - Duration::hours(12), None, )]; let period_thresholds = vec![end_period, end_period - Duration::days(1)]; let was_active_each_past_period = CalcWasActiveEachPastPeriod::new(metadata, events, period_thresholds); assert_eq!(was_active_each_past_period.calculate(), vec![1]); } #[test] fn test_calculate_no_events_in_two_periods() { let end_period = DateTime::parse_from_rfc3339("2021-04-04T00:00:00-00:00") .unwrap() .with_timezone(&Utc); let metadata = DataPointMetadata::new(Period
ch_past_period = CalcWasActiveEachPastPeriod::new(metadata, Vec::new(), period_thresholds); assert_eq!(was_active_each_past_period.calculate(), vec![0, 0]); } #[test] fn test_calculate_one_event_in_one_period_zero_in_another() { let end_period = DateTime::parse_from_rfc3339("2021-05-05T00:00:00-00:00") .unwrap() .with_timezone(&Utc); let metadata = DataPointMetadata::new(Period::new(PeriodUnit::Days, 2), end_period); let events = vec![AnalyticsEvent::new( "test1", AnalyticsEventType::UserAction, end_period - Duration::hours(12), None, )]; let period_thresholds = vec![ end_period, end_period - Duration::days(1), end_period - Duration::days(2), ]; let was_active_each_past_period = CalcWasActiveEachPastPeriod::new(metadata, events, period_thresholds); assert_eq!(was_active_each_past_period.calculate(), vec![1, 0]); } #[test] fn test_calculate_two_events_in_one_period_zero_in_another() { let end_period = DateTime::parse_from_rfc3339("2021-06-06T00:00:00-00:00") .unwrap() .with_timezone(&Utc); let metadata = DataPointMetadata::new(Period::new(PeriodUnit::Days, 2), end_period); let events = vec![ AnalyticsEvent::new( "test1", AnalyticsEventType::UserAction, end_period - Duration::hours(12), None, ), AnalyticsEvent::new( "test2", AnalyticsEventType::AppError, end_period - Duration::hours(15), None, ), ]; let period_thresholds = vec![ end_period, end_period - Duration::days(1), end_period - Duration::days(2), ]; let was_active_each_past_period = CalcWasActiveEachPastPeriod::new(metadata, events, period_thresholds); assert_eq!(was_active_each_past_period.calculate(), vec![1, 0]); } #[test] fn test_calculate_two_periods_with_one_event_each() { let end_period = DateTime::parse_from_rfc3339("2021-07-07T00:00:00-00:00") .unwrap() .with_timezone(&Utc); let metadata = DataPointMetadata::new(Period::new(PeriodUnit::Days, 2), end_period); let events = vec![ AnalyticsEvent::new( "test1", AnalyticsEventType::UserAction, end_period - Duration::hours(12), None, ), AnalyticsEvent::new( "test2", AnalyticsEventType::AppError, end_period - Duration::hours(36), None, ), ]; let period_thresholds = vec![ end_period, end_period - Duration::days(1), end_period - Duration::days(2), ]; let was_active_each_past_period = CalcWasActiveEachPastPeriod::new(metadata, events, period_thresholds); assert_eq!(was_active_each_past_period.calculate(), vec![1, 1]); } }
::new(PeriodUnit::Days, 2), end_period); let period_thresholds = vec![ end_period, end_period - Duration::days(1), end_period - Duration::days(2), ]; let was_active_ea
function_block-random_span
[ { "content": "/// Create an [`EventPublisher`]/[`EventSubscriber`] pair with default\n\n/// values similar to those produced in practice when instantiating a\n\n/// new coordinator.\n\npub fn new_event_channels() -> (EventPublisher, EventSubscriber) {\n\n let keys = EncryptKeyPair::generate();\n\n let par...
Rust
client-core/src/service/sync_state_service.rs
Itchibon777/chain
48ddd62c467795757620a5bda011fe0a287b887d
use chain_core::common::H256; use client_common::tendermint::lite; use client_common::{ErrorKind, Result, ResultExt, Storage}; use parity_scale_codec::{Decode, Encode}; use tendermint::validator; const KEYSPACE: &str = "core_wallet_sync"; #[derive(Debug, Encode, Decode)] pub struct SyncState { pub last_block_height: u64, pub last_app_hash: String, pub trusted_state: lite::TrustedState, pub staking_root: H256, } impl SyncState { pub fn genesis(genesis_validators: Vec<validator::Info>, staking_root: H256) -> SyncState { SyncState { last_block_height: 0, last_app_hash: "".to_owned(), trusted_state: lite::TrustedState::genesis(genesis_validators), staking_root, } } } pub fn load_sync_state<S: Storage>(storage: &S, name: &str) -> Result<Option<SyncState>> { storage.load(KEYSPACE, name) } pub fn save_sync_state<S: Storage>(storage: &S, name: &str, state: &SyncState) -> Result<()> { storage.save(KEYSPACE, name, state) } pub fn delete_sync_state<S: Storage>(storage: &S, name: &str) -> Result<()> { storage.delete(KEYSPACE, name)?; Ok(()) } #[derive(Debug, Default, Clone)] pub struct SyncStateService<S> where S: Storage, { storage: S, } impl<S> SyncStateService<S> where S: Storage, { #[inline] pub fn new(storage: S) -> Self { Self { storage } } pub fn save_global_state(&self, name: &str, state: &SyncState) -> Result<()> { self.storage.set(KEYSPACE, name, state.encode()).map(|_| ()) } #[inline] pub fn delete_global_state(&self, name: &str) -> Result<()> { self.storage.delete(KEYSPACE, name).map(|_| ()) } #[inline] pub fn clear(&self) -> Result<()> { self.storage.clear(KEYSPACE) } pub fn get_global_state(&self, name: &str) -> Result<Option<SyncState>> { if let Some(bytes) = self.storage.get(KEYSPACE, name)? { Ok(Some(SyncState::decode(&mut bytes.as_slice()).chain( || { ( ErrorKind::DeserializationError, format!( "Unable to deserialize global state for wallet with name {}", name ), ) }, )?)) } else { Ok(None) } } } #[cfg(test)] mod tests { use parity_scale_codec::{Decode, Encode}; use tendermint::{block::Height, lite}; use super::{lite::TrustedState, SyncState, SyncStateService}; use client_common::storage::MemoryStorage; use test_common::block_generator::{BlockGenerator, GeneratorClient}; #[test] fn check_flow() { let storage = MemoryStorage::default(); let global_state_service = SyncStateService::new(storage); let name = "name"; assert!(global_state_service .get_global_state(name) .unwrap() .is_none()); assert!(global_state_service .save_global_state( name, &SyncState { last_block_height: 5, last_app_hash: "3891040F29C6A56A5E36B17DCA6992D8F91D1EAAB4439D008D19A9D703271D3C" .to_string(), trusted_state: TrustedState::genesis(vec![]), staking_root: [0u8; 32], } ) .is_ok()); assert_eq!( 5, global_state_service .get_global_state(name) .unwrap() .unwrap() .last_block_height ); assert_eq!( "3891040F29C6A56A5E36B17DCA6992D8F91D1EAAB4439D008D19A9D703271D3C".to_string(), global_state_service .get_global_state(name) .unwrap() .unwrap() .last_app_hash ); assert!(global_state_service.clear().is_ok()); assert!(global_state_service .get_global_state(name) .unwrap() .is_none()); } #[test] fn check_sync_state_serialization() { let c = GeneratorClient::new(BlockGenerator::one_node()); { let mut gen = c.gen.write().unwrap(); gen.gen_block(&[]); gen.gen_block(&[]); } let gen = c.gen.read().unwrap(); let header = gen.signed_header(Height::default()); let trusted_state = lite::TrustedState::new( lite::SignedHeader::new(header.clone(), header.header.clone()), gen.validators.clone(), ) .into(); let mut state = SyncState::genesis(vec![], [0u8; 32]); state.last_block_height = 1; state.last_app_hash = "0F46E113C21F9EACB26D752F9523746CF8D47ECBEA492736D176005911F973A5".to_owned(); state.trusted_state = trusted_state; let bytes = state.encode(); let state2 = SyncState::decode(&mut bytes.as_slice()).unwrap(); assert_eq!(bytes, state2.encode()); } }
use chain_core::common::H256; use client_common::tendermint::lite; use client_common::{ErrorKind, Result, ResultExt, Storage}; use parity_scale_codec::{Decode, Encode}; use tendermint::validator; const KEYSPACE: &str = "core_wallet_sync"; #[derive(Debug, Encode, Decode)] pub struct SyncState { pub last_block_height: u64, pub last_app_hash: String, pub trusted_state: lite::TrustedState, pub staking_root: H256, } impl SyncState { pub fn genesis(genesis_validators: Vec<validator::Info>, staking_root: H256) -> SyncState { SyncState { last_block_height: 0, last_app_hash: "".to_owned(), trusted_state: lite::TrustedState::genesis(genesis_validators), staking_root, } } } pub fn load_sync_state<S: Storage>(storage: &S, name: &str) -> Result<Option<SyncState>> { storage.load(KEYSPACE, name) } pub fn save_sync_state<S: Storage>(storage: &S, name: &str, state: &SyncState) -> Result<()> { storage.save(KEYSPACE, name, state) } pub fn delete_sync_state<S: Storage>(storage: &S, name: &str) -> Result<()> { storage.delete(KEYSPACE, name)?; Ok(()) } #[derive(Debug, Default, Clone)] pub struct SyncStateService<S> where S: Storage, { storage: S, } impl<S> SyncStateService<S> where S: Storage, { #[inline] pub fn new(storage: S) -> Self { Self { storage } } pub fn save_global_state(&self, name: &str, state: &SyncState) -> Result<()> { self.storage.set(KEYSPACE, name, state.encode()).map(|_| ()) } #[inline] pub fn delete_global_state(&self, name: &str) -> Result<()> { self.storage.delete(KEYSPACE, name).map(|_| ()) } #[inline] pub fn clear(&self) -> Result<()> { self.storage.clear(KEYSPACE) } pub fn get_global_state(&self, name: &str) -> Result<Option<SyncState>> { if let Some(bytes) = self.storage.get(KEYSPACE, name)? { Ok(
) } else { Ok(None) } } } #[cfg(test)] mod tests { use parity_scale_codec::{Decode, Encode}; use tendermint::{block::Height, lite}; use super::{lite::TrustedState, SyncState, SyncStateService}; use client_common::storage::MemoryStorage; use test_common::block_generator::{BlockGenerator, GeneratorClient}; #[test] fn check_flow() { let storage = MemoryStorage::default(); let global_state_service = SyncStateService::new(storage); let name = "name"; assert!(global_state_service .get_global_state(name) .unwrap() .is_none()); assert!(global_state_service .save_global_state( name, &SyncState { last_block_height: 5, last_app_hash: "3891040F29C6A56A5E36B17DCA6992D8F91D1EAAB4439D008D19A9D703271D3C" .to_string(), trusted_state: TrustedState::genesis(vec![]), staking_root: [0u8; 32], } ) .is_ok()); assert_eq!( 5, global_state_service .get_global_state(name) .unwrap() .unwrap() .last_block_height ); assert_eq!( "3891040F29C6A56A5E36B17DCA6992D8F91D1EAAB4439D008D19A9D703271D3C".to_string(), global_state_service .get_global_state(name) .unwrap() .unwrap() .last_app_hash ); assert!(global_state_service.clear().is_ok()); assert!(global_state_service .get_global_state(name) .unwrap() .is_none()); } #[test] fn check_sync_state_serialization() { let c = GeneratorClient::new(BlockGenerator::one_node()); { let mut gen = c.gen.write().unwrap(); gen.gen_block(&[]); gen.gen_block(&[]); } let gen = c.gen.read().unwrap(); let header = gen.signed_header(Height::default()); let trusted_state = lite::TrustedState::new( lite::SignedHeader::new(header.clone(), header.header.clone()), gen.validators.clone(), ) .into(); let mut state = SyncState::genesis(vec![], [0u8; 32]); state.last_block_height = 1; state.last_app_hash = "0F46E113C21F9EACB26D752F9523746CF8D47ECBEA492736D176005911F973A5".to_owned(); state.trusted_state = trusted_state; let bytes = state.encode(); let state2 = SyncState::decode(&mut bytes.as_slice()).unwrap(); assert_eq!(bytes, state2.encode()); } }
Some(SyncState::decode(&mut bytes.as_slice()).chain( || { ( ErrorKind::DeserializationError, format!( "Unable to deserialize global state for wallet with name {}", name ), ) }, )?)
call_expression
[ { "content": "/// Delete wallet state from storage\n\npub fn delete_wallet_state<S: Storage>(storage: &S, name: &str) -> Result<()> {\n\n storage.delete(KEYSPACE, name)?;\n\n Ok(())\n\n}\n\n\n\n/// Wallet state\n\n#[derive(Debug, Encode, Decode)]\n\npub struct WalletState {\n\n /// UTxO\n\n pub unsp...
Rust
src/postgres_driver/utils.rs
phaer/inspektor
2423173199b233ca8c4d7e6ed89bdf8957b7f432
use crate::postgres_driver::errors::DecoderError; use crate::postgres_driver::message::*; use anyhow::*; use byteorder::{ByteOrder, NetworkEndian}; use bytes::{Buf, BufMut, BytesMut}; use std::collections::HashMap; use tokio::io::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt}; pub async fn decode_init_startup_message<T>(mut conn: T) -> Result<FrontendMessage, DecoderError> where T: AsyncRead + Unpin + AsyncReadExt + AsyncWrite + AsyncWriteExt, { let len = decode_frame_length(&mut conn).await?; let mut buf = BytesMut::new(); buf.resize(len, b'0'); conn.read_exact(&mut buf).await?; let version_number = buf.get_i32(); match version_number { VERSION_SSL => return Ok(FrontendMessage::SslRequest), VERSION_3 => { let mut params = HashMap::new(); while *buf.get(0).unwrap() != 0 { let key = read_cstr(&mut buf).map_err(|_| anyhow!("error while reading key params"))?; let val = read_cstr(&mut buf).map_err(|_| anyhow!("error while reading value params"))?; params.insert(key, val); } return Ok(FrontendMessage::Startup { params: params, version: version_number, }); } _ => { return Err(DecoderError::UnsupporedVersion); } }; } pub async fn decode_frame_length<T>(mut conn: T) -> Result<usize, anyhow::Error> where T: AsyncRead + Unpin, { let mut buf = [0; 4]; conn.read_exact(&mut buf).await?; let frame_len = NetworkEndian::read_u32(&buf) as usize; if frame_len < 4 { return Err(anyhow!("invalid frame length")); } Ok(frame_len - 4) } pub fn read_cstr(buf: &mut BytesMut) -> Result<String, Error> { if let Some(pos) = buf.iter().position(|d| *d == 0) { let str = std::str::from_utf8(&buf[..pos]) .map_err(|_| anyhow!("error while reading cstr"))? .to_string(); buf.advance(pos + 1); return Ok(str); } Err(anyhow!("string has not termination deliminiter")) } pub fn write_cstr(buf: &mut BytesMut, val: &[u8]) -> Result<(), anyhow::Error> { if val.contains(&0) { return Err(anyhow!("cstr should not contain 0 value")); } buf.put_slice(val); buf.put_u8(0); Ok(()) } pub async fn decode_password_message<T>(mut conn: T) -> Result<FrontendMessage, anyhow::Error> where T: AsyncRead + AsyncReadExt + Unpin, { let mut buf = [0; 1]; conn.read_exact(&mut buf).await?; if buf[0] != b'p' { return Err(anyhow!("incoming message is not a password message")); } let len = decode_frame_length(&mut conn).await.map_err(|_| { anyhow!("error while decoding frame length while decoding password message") })?; let mut buf = BytesMut::new(); buf.resize(len, b'0'); conn.read_exact(&mut buf).await?; let password = read_cstr(&mut buf).map_err(|err| anyhow!("error while reading password {:?}", err))?; Ok(FrontendMessage::PasswordMessage { password: password }) } #[inline] pub fn write_message<F>(buf: &mut BytesMut, f: F) -> Result<(), anyhow::Error> where F: FnOnce(&mut BytesMut) -> Result<(), anyhow::Error>, { let base = buf.len(); buf.extend_from_slice(&[0; 4]); f(buf)?; let size = (buf.len() - base) as i32; NetworkEndian::write_i32(&mut buf[base..], size); Ok(()) } #[inline] pub fn read_counted_message<F, T>(buf: &mut BytesMut, mut f: F) -> Result<Vec<T>, anyhow::Error> where F: FnMut(&mut BytesMut) -> Result<T, anyhow::Error>, { let len = NetworkEndian::read_i16(buf) as usize; buf.advance(2); let mut result = Vec::with_capacity(len); for _ in 0..len { result.push(f(buf)?); } Ok(result) } #[inline] pub fn write_counted_message<I, T, F>( items: I, mut f: F, buf: &mut BytesMut, ) -> Result<(), anyhow::Error> where I: IntoIterator<Item = T>, F: FnMut(T, &mut BytesMut) -> Result<(), anyhow::Error>, { let base = buf.len(); buf.extend_from_slice(&[0; 2]); let mut count = 0; for item in items { f(item, buf)?; count += 1; } let count = count as i16; NetworkEndian::write_i16(&mut buf[base..], count); Ok(()) }
use crate::postgres_driver::errors::DecoderError; use crate::postgres_driver::message::*; use anyhow::*; use byteorder::{ByteOrder, NetworkEndian}; use bytes::{Buf, BufMut, BytesMut}; use std::collections::HashMap; use tokio::io::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt}; pub async fn decode_init_startup_message<T>(mut conn: T) -> Result<FrontendMessage, DecoderError> where T: AsyncRead + Unpin + AsyncReadExt + AsyncWrite + AsyncWriteExt, { let len = decode_frame_length(&mut conn).await?; let mut buf = BytesMut::new(); buf.resize(len, b'0'); conn.read_exact(&mut buf).await?; let version_number = buf.get_i32(); match version_number { VERSION_SSL => return Ok(FrontendMessage::SslRequest), VERSION_3 => { let mut params = HashMap::new(); while *buf.get(0).unwrap() != 0 { let key = read_cstr(&mut buf).map_err(|_| anyhow!("error while reading key params"))?; let val = read_cstr(&mut buf).map_err(|_| anyhow!("error while reading value params"))?; params.insert(key, val); } return Ok(FrontendMessage::Startup { params: params, version: version_number, }); } _ => { return Err(DecoderError::UnsupporedVersion); } }; }
pub fn read_cstr(buf: &mut BytesMut) -> Result<String, Error> { if let Some(pos) = buf.iter().position(|d| *d == 0) { let str = std::str::from_utf8(&buf[..pos]) .map_err(|_| anyhow!("error while reading cstr"))? .to_string(); buf.advance(pos + 1); return Ok(str); } Err(anyhow!("string has not termination deliminiter")) } pub fn write_cstr(buf: &mut BytesMut, val: &[u8]) -> Result<(), anyhow::Error> { if val.contains(&0) { return Err(anyhow!("cstr should not contain 0 value")); } buf.put_slice(val); buf.put_u8(0); Ok(()) } pub async fn decode_password_message<T>(mut conn: T) -> Result<FrontendMessage, anyhow::Error> where T: AsyncRead + AsyncReadExt + Unpin, { let mut buf = [0; 1]; conn.read_exact(&mut buf).await?; if buf[0] != b'p' { return Err(anyhow!("incoming message is not a password message")); } let len = decode_frame_length(&mut conn).await.map_err(|_| { anyhow!("error while decoding frame length while decoding password message") })?; let mut buf = BytesMut::new(); buf.resize(len, b'0'); conn.read_exact(&mut buf).await?; let password = read_cstr(&mut buf).map_err(|err| anyhow!("error while reading password {:?}", err))?; Ok(FrontendMessage::PasswordMessage { password: password }) } #[inline] pub fn write_message<F>(buf: &mut BytesMut, f: F) -> Result<(), anyhow::Error> where F: FnOnce(&mut BytesMut) -> Result<(), anyhow::Error>, { let base = buf.len(); buf.extend_from_slice(&[0; 4]); f(buf)?; let size = (buf.len() - base) as i32; NetworkEndian::write_i32(&mut buf[base..], size); Ok(()) } #[inline] pub fn read_counted_message<F, T>(buf: &mut BytesMut, mut f: F) -> Result<Vec<T>, anyhow::Error> where F: FnMut(&mut BytesMut) -> Result<T, anyhow::Error>, { let len = NetworkEndian::read_i16(buf) as usize; buf.advance(2); let mut result = Vec::with_capacity(len); for _ in 0..len { result.push(f(buf)?); } Ok(result) } #[inline] pub fn write_counted_message<I, T, F>( items: I, mut f: F, buf: &mut BytesMut, ) -> Result<(), anyhow::Error> where I: IntoIterator<Item = T>, F: FnMut(T, &mut BytesMut) -> Result<(), anyhow::Error>, { let base = buf.len(); buf.extend_from_slice(&[0; 2]); let mut count = 0; for item in items { f(item, buf)?; count += 1; } let count = count as i16; NetworkEndian::write_i16(&mut buf[base..], count); Ok(()) }
pub async fn decode_frame_length<T>(mut conn: T) -> Result<usize, anyhow::Error> where T: AsyncRead + Unpin, { let mut buf = [0; 4]; conn.read_exact(&mut buf).await?; let frame_len = NetworkEndian::read_u32(&buf) as usize; if frame_len < 4 { return Err(anyhow!("invalid frame length")); } Ok(frame_len - 4) }
function_block-full_function
[ { "content": "pub fn read_config(config_path: &std::path::Path) -> Result<Config, anyhow::Error> {\n\n let config_buf = std::fs::read(config_path)\n\n .map_err(|e| anyhow!(\"error while reading config. err: {:?}\", e))?;\n\n Ok(serde_yaml::from_slice::<Config>(&config_buf[..])?)\n\n}\n\n\n\n#[cfg(t...
Rust
src/chart.rs
longbridgeapp/cli-candlestick-chart
2081638215a330bf1914b008cb94d053a0190390
use colored::Color; use crate::{ chart_data::ChartData, chart_renderer::ChartRenderer, info_bar::InfoBar, volume_pane::VolumePane, y_axis::YAxis, }; use std::cell::RefCell; use std::rc::Rc; #[derive(Debug, Clone)] #[cfg_attr(feature = "serde", derive(serde::Deserialize))] pub struct Candle { pub open: f64, pub high: f64, pub low: f64, pub close: f64, pub volume: Option<f64>, pub timestamp: Option<i64>, } pub(crate) enum CandleType { Bearish, Bullish, } impl Candle { #[allow(dead_code)] pub fn new( open: f64, high: f64, low: f64, close: f64, volume: Option<f64>, timestamp: Option<i64>, ) -> Candle { Candle { open, high, low, close, volume, timestamp, } } pub(crate) fn get_type(&self) -> CandleType { match self.open < self.close { true => CandleType::Bullish, false => CandleType::Bearish, } } } pub struct Chart { pub(crate) renderer: ChartRenderer, pub(crate) y_axis: YAxis, pub(crate) chart_data: Rc<RefCell<ChartData>>, pub(crate) info_bar: InfoBar, pub(crate) volume_pane: VolumePane, } impl Chart { pub fn new(candles: &[Candle]) -> Self { Self::new_with_canvas_size(candles, None) } pub fn new_with_canvas_size(candles: &[Candle], canvas_size: Option<(u16, u16)>) -> Self { let renderer = ChartRenderer::new(); let chart_data = match canvas_size { Some(canvas_size) => Rc::new(RefCell::new(ChartData::new_with_canvas_size( candles.to_vec(), canvas_size, ))), None => Rc::new(RefCell::new(ChartData::new(candles.to_vec()))), }; let y_axis = YAxis::new(chart_data.clone()); let info_bar = InfoBar::new("APPLE".to_string(), chart_data.clone()); let volume_pane = VolumePane::new( chart_data.clone(), (chart_data.borrow().canvas_size.1 / 6) as i64, ); chart_data .borrow_mut() .compute_height(&info_bar, &volume_pane); Chart { renderer, y_axis, chart_data, info_bar, volume_pane, } } pub fn draw(&self) { self.renderer.render(self); } pub fn draw_to_buffer(&self) -> String { self.renderer.render_to_buffer(self) } pub fn set_name(&mut self, name: String) { self.info_bar.name = name; } pub fn set_bear_color(&mut self, color: Color) { self.renderer.bearish_color = color; } pub fn set_bull_color(&mut self, color: Color) { self.renderer.bullish_color = color; } pub fn set_vol_bear_color(&mut self, color: Color) { self.volume_pane.bearish_color = color; } pub fn set_vol_bull_color(&mut self, color: Color) { self.volume_pane.bullish_color = color; } pub fn set_volume_pane_enabled(&mut self, enabled: bool) { self.volume_pane.enabled = enabled; } pub fn set_volume_pane_unicode_fill(&mut self, unicode_fill: char) { self.volume_pane.unicode_fill = unicode_fill; } pub fn set_volume_pane_height(&mut self, height: i64) { self.volume_pane.height = height; } pub fn set_info_bar_enabled(&mut self, enabled: bool) { self.info_bar.enabled = enabled; } }
use colored::Color; use crate::{ chart_data::ChartData, chart_renderer::ChartRenderer, info_bar::InfoBar, volume_pane::VolumePane, y_axis::YAxis, }; use std::cell::RefCell; use std::rc::Rc; #[derive(Debug, Clone)] #[cfg_attr(feature = "serde", derive(serde::Deserialize))] pub struct Candle { pub open: f64, pub high: f64, pub low: f64, pub close: f64, pub volume: Option<f64>, pub timestamp: Option<i64>, } pub(crate) enum CandleType { Bearish, Bullish, } impl Candle { #[allow(dead_code)] pub fn new( open: f64, high: f64, low: f64, close: f64, volume: Option<f64>, timestamp: Option<i64>, ) -> Candle { Candle { open, high, low, close, volume, timestamp, } } pub(crate) fn get_type(&self) -> CandleType { match self.open < self.close { true => CandleType::Bullish, false => CandleType::Bearish, } } } pub struct Chart { pub(crate) renderer: ChartRenderer, pub(crate) y_axis: YAxis, pub(crate) chart_data: Rc<RefCell<ChartData>>, pub(crate) info_bar: InfoBar, pub(crate) volume_pane: VolumePane, } impl Chart { pub fn
self.volume_pane.enabled = enabled; } pub fn set_volume_pane_unicode_fill(&mut self, unicode_fill: char) { self.volume_pane.unicode_fill = unicode_fill; } pub fn set_volume_pane_height(&mut self, height: i64) { self.volume_pane.height = height; } pub fn set_info_bar_enabled(&mut self, enabled: bool) { self.info_bar.enabled = enabled; } }
new(candles: &[Candle]) -> Self { Self::new_with_canvas_size(candles, None) } pub fn new_with_canvas_size(candles: &[Candle], canvas_size: Option<(u16, u16)>) -> Self { let renderer = ChartRenderer::new(); let chart_data = match canvas_size { Some(canvas_size) => Rc::new(RefCell::new(ChartData::new_with_canvas_size( candles.to_vec(), canvas_size, ))), None => Rc::new(RefCell::new(ChartData::new(candles.to_vec()))), }; let y_axis = YAxis::new(chart_data.clone()); let info_bar = InfoBar::new("APPLE".to_string(), chart_data.clone()); let volume_pane = VolumePane::new( chart_data.clone(), (chart_data.borrow().canvas_size.1 / 6) as i64, ); chart_data .borrow_mut() .compute_height(&info_bar, &volume_pane); Chart { renderer, y_axis, chart_data, info_bar, volume_pane, } } pub fn draw(&self) { self.renderer.render(self); } pub fn draw_to_buffer(&self) -> String { self.renderer.render_to_buffer(self) } pub fn set_name(&mut self, name: String) { self.info_bar.name = name; } pub fn set_bear_color(&mut self, color: Color) { self.renderer.bearish_color = color; } pub fn set_bull_color(&mut self, color: Color) { self.renderer.bullish_color = color; } pub fn set_vol_bear_color(&mut self, color: Color) { self.volume_pane.bearish_color = color; } pub fn set_vol_bull_color(&mut self, color: Color) { self.volume_pane.bullish_color = color; } pub fn set_volume_pane_enabled(&mut self, enabled: bool) {
random
[ { "content": "\n\n pub fn new() -> ChartRenderer {\n\n #[cfg(target_os = \"windows\")]\n\n control::set_virtual_terminal(true).unwrap();\n\n\n\n ChartRenderer {\n\n bullish_color: Color::Green,\n\n bearish_color: Color::Red,\n\n }\n\n }\n\n\n\n fn color...
Rust
src/tsort.rs
VladimirMarkelov/todo_lib
6cf36ce3c4ff33266533c25fddae6d4eb10a7e24
use std::cmp::Ordering; use crate::timer; use crate::todo; use crate::todotxt; #[derive(Debug, Clone)] pub struct Conf { pub fields: Option<String>, pub rev: bool, } impl Default for Conf { fn default() -> Conf { Conf { fields: None, rev: false } } } pub(crate) fn cmp_opt_dates(d1: Option<chrono::NaiveDate>, d2: Option<chrono::NaiveDate>) -> Ordering { match (&d1, &d2) { (None, None) => Ordering::Equal, (Some(_), None) => Ordering::Less, (None, Some(_)) => Ordering::Greater, (Some(v1), Some(v2)) => v1.cmp(v2), } } pub(crate) fn equal_opt_rec(r1: &Option<todotxt::Recurrence>, r2: &Option<todotxt::Recurrence>) -> bool { match (&r1, &r2) { (None, None) => true, (Some(_), None) | (None, Some(_)) => false, (Some(v1), Some(v2)) => v1 == v2, } } fn cmp_opt_arrays(a1: &[String], a2: &[String]) -> Ordering { if a1.is_empty() && !a2.is_empty() { return Ordering::Greater; } else if !a1.is_empty() && a2.is_empty() { return Ordering::Less; } else if a1.is_empty() && a2.is_empty() { return Ordering::Equal; } let max = if a1.len() > a2.len() { a2.len() } else { a1.len() }; let mut ord = Ordering::Equal; for idx in 0..max { let s1_low = a1[idx].to_lowercase(); let s2_low = a2[idx].to_lowercase(); ord = s1_low.cmp(&s2_low); if ord != Ordering::Equal { break; } } if ord == Ordering::Equal { ord = a1.len().cmp(&a2.len()) } ord } pub fn sort(ids: &mut todo::IDVec, todos: &todo::TaskSlice, c: &Conf) { if c.fields.is_none() && !c.rev { return; } let low: String; let fields: Vec<&str> = match &c.fields { None => Vec::new(), Some(v) => { low = v.trim_start_matches(|c: char| c == ' ' || c == '=').to_lowercase(); low.split(|c: char| c == ',' || c == ':').collect() } }; if !fields.is_empty() { ids.sort_by(|a, b| { if *a >= todos.len() && *b >= todos.len() { return Ordering::Equal; } else if *a >= todos.len() { return Ordering::Greater; } else if *b >= todos.len() { return Ordering::Less; } let mut res: Ordering = Ordering::Equal; for f in &fields { res = match *f { "pri" | "priority" => todos[*a].priority.cmp(&todos[*b].priority), "due" => cmp_opt_dates(todos[*a].due_date, todos[*b].due_date), "thr" => cmp_opt_dates(todos[*a].threshold_date, todos[*b].threshold_date), "completed" | "finished" => cmp_opt_dates(todos[*a].finish_date, todos[*b].finish_date), "created" | "create" => cmp_opt_dates(todos[*a].create_date, todos[*b].create_date), "subject" | "text" | "subj" => todos[*a].subject.cmp(&todos[*b].subject), "done" => { let f1 = if timer::is_timer_on(&todos[*a]) { 1 } else if todos[*a].recurrence.is_some() { 2 } else if todos[*a].finished { 3 } else { 0 }; let f2 = if timer::is_timer_on(&todos[*b]) { 1 } else if todos[*b].recurrence.is_some() { 2 } else if todos[*b].finished { 3 } else { 0 }; f1.cmp(&f2) } "proj" | "project" => cmp_opt_arrays(&todos[*a].projects, &todos[*b].projects), "ctx" | "context" => cmp_opt_arrays(&todos[*a].contexts, &todos[*b].contexts), "active" => { let a_act = timer::is_timer_on(&todos[*a]); let b_act = timer::is_timer_on(&todos[*b]); b_act.cmp(&a_act) } _ => Ordering::Equal, }; if res != Ordering::Equal { break; } } res }); } if c.rev { ids.reverse(); } }
use std::cmp::Ordering; use crate::timer; use crate::todo; use crate::todotxt; #[derive(Debug, Clone)] pub struct Conf { pub fields: Option<String>, pub rev: bool, } impl Default for Conf { fn default() -> Conf { Conf { fields: None, rev: false } } } pub(crate) fn cmp_opt_dates(d1: Option<chrono::NaiveDate>, d2: Option<chrono::NaiveDate>) -> Ordering { match (&d1, &d2) { (None, None) => Ordering::Equal, (Some(_), None) => Ordering::Less, (None, Some(_)) => Ordering::Greater, (Some(v1), Some(v2)) => v1.cmp(v2), } } pub(crate) fn equal_opt_rec(r1: &Option<todotxt::Recurrence>, r2: &Option<todotxt::Recurrence>) -> bool { match (&r1, &r2) { (None, None) => true, (Some(_), None) | (None, Some(_)) => false, (Some(v1), Some(v2)) => v1 == v2, } } fn cmp_opt_arrays(a1: &[String], a2: &[String]) -> Ordering { if a1.is_empty() && !a2.is_empty() { return Ordering::Greater; } else if !a1.is_empty() && a2.is_empty() { return Ordering::Less; } else if a1.is_empty() && a2.is_empty() { return Ordering::Equal; } let max = if a1.len() > a2.len() { a2.len() } else { a1.len() }; let mut ord = Ordering::Equal; for idx in 0..max { let s1_l
match *f { "pri" | "priority" => todos[*a].priority.cmp(&todos[*b].priority), "due" => cmp_opt_dates(todos[*a].due_date, todos[*b].due_date), "thr" => cmp_opt_dates(todos[*a].threshold_date, todos[*b].threshold_date), "completed" | "finished" => cmp_opt_dates(todos[*a].finish_date, todos[*b].finish_date), "created" | "create" => cmp_opt_dates(todos[*a].create_date, todos[*b].create_date), "subject" | "text" | "subj" => todos[*a].subject.cmp(&todos[*b].subject), "done" => { let f1 = if timer::is_timer_on(&todos[*a]) { 1 } else if todos[*a].recurrence.is_some() { 2 } else if todos[*a].finished { 3 } else { 0 }; let f2 = if timer::is_timer_on(&todos[*b]) { 1 } else if todos[*b].recurrence.is_some() { 2 } else if todos[*b].finished { 3 } else { 0 }; f1.cmp(&f2) } "proj" | "project" => cmp_opt_arrays(&todos[*a].projects, &todos[*b].projects), "ctx" | "context" => cmp_opt_arrays(&todos[*a].contexts, &todos[*b].contexts), "active" => { let a_act = timer::is_timer_on(&todos[*a]); let b_act = timer::is_timer_on(&todos[*b]); b_act.cmp(&a_act) } _ => Ordering::Equal, }; if res != Ordering::Equal { break; } } res }); } if c.rev { ids.reverse(); } }
ow = a1[idx].to_lowercase(); let s2_low = a2[idx].to_lowercase(); ord = s1_low.cmp(&s2_low); if ord != Ordering::Equal { break; } } if ord == Ordering::Equal { ord = a1.len().cmp(&a2.len()) } ord } pub fn sort(ids: &mut todo::IDVec, todos: &todo::TaskSlice, c: &Conf) { if c.fields.is_none() && !c.rev { return; } let low: String; let fields: Vec<&str> = match &c.fields { None => Vec::new(), Some(v) => { low = v.trim_start_matches(|c: char| c == ' ' || c == '=').to_lowercase(); low.split(|c: char| c == ',' || c == ':').collect() } }; if !fields.is_empty() { ids.sort_by(|a, b| { if *a >= todos.len() && *b >= todos.len() { return Ordering::Equal; } else if *a >= todos.len() { return Ordering::Greater; } else if *b >= todos.len() { return Ordering::Less; } let mut res: Ordering = Ordering::Equal; for f in &fields { res =
random
[ { "content": "fn vec_match(task_list: &[String], filter: &[String]) -> bool {\n\n if filter.is_empty() {\n\n return true;\n\n }\n\n for f in filter.iter() {\n\n if (f == NONE_TITLE && task_list.is_empty()) || (f == ANY_TITLE && !task_list.is_empty()) {\n\n return true;\n\n ...
Rust
src/lib.rs
Flux-Audio/chaos_osc
30fc168cf5738c1cc53a9b63ddc47fc582c4eae7
#[macro_use] extern crate vst; use vst::buffer::AudioBuffer; use vst::plugin::{Category, Info, Plugin, PluginParameters}; use vst::util::AtomicFloat; use std::sync::Arc; mod compute; struct Effect { params: Arc<EffectParameters>, sr: f32, scale: f64, th1: f64, th2: f64, osc1_th: f64, osc2_th: f64, w1: f64, w2: f64, } struct EffectParameters { len_ratio: AtomicFloat, scale: AtomicFloat, o1_amt: AtomicFloat, o2_amt: AtomicFloat, o1_f: AtomicFloat, o2_f: AtomicFloat, o1_fine: AtomicFloat, o2_fine: AtomicFloat, o2_to_o1_mod: AtomicFloat, o1_to_o2_mod: AtomicFloat, } impl Default for Effect { fn default() -> Effect { Effect { params: Arc::new(EffectParameters::default()), sr: 44100.0, scale: 1.0, th1: 3.0, th2: 4.0, osc1_th: 0.0, osc2_th: 0.0, w1: 0.0, w2: 0.0, } } } impl Default for EffectParameters { fn default() -> EffectParameters { EffectParameters { len_ratio: AtomicFloat::new(0.5), scale: AtomicFloat::new(0.5), o1_amt: AtomicFloat::new(0.0), o2_amt: AtomicFloat::new(0.0), o1_f: AtomicFloat::new(0.5), o2_f: AtomicFloat::new(0.5), o1_fine: AtomicFloat::new(0.5), o2_fine: AtomicFloat::new(0.5), o2_to_o1_mod: AtomicFloat::new(0.0), o1_to_o2_mod: AtomicFloat::new(0.0), } } } impl Plugin for Effect { fn get_info(&self) -> Info { Info { name: "CHAOS_OSC".to_string(), vendor: "Flux-Audio".to_string(), unique_id: 40942320, version: 020, inputs: 0, outputs: 2, parameters: 10, category: Category::Generator, ..Default::default() } } fn set_sample_rate(&mut self, rate: f32){ self.sr = rate; self.scale = 44100.0 / rate as f64; } fn init(&mut self) {} fn process(&mut self, buffer: &mut AudioBuffer<f32>) { let (_, outputs) = buffer.split(); let (mut l, mut r) = outputs.split_at_mut(1); let stereo_out = l[0].iter_mut().zip(r[0].iter_mut()); for (left_out, right_out) in stereo_out{ let o1_amt = self.params.o1_amt.get() as f64; let o2_amt = self.params.o2_amt.get() as f64; let o1_f = (self.params.o1_f.get()*8.0 + self.params.o1_fine.get()) as f64; let o2_f = (self.params.o2_f.get()*8.0 + self.params.o2_fine.get()) as f64; let o2_to_o1_mod = self.params.o2_to_o1_mod.get() as f64; let o1_to_o2_mod = self.params.o1_to_o2_mod.get() as f64; let mut scale = self.params.scale.get() as f64; scale = scale*scale*8.0; let l2 = (self.params.len_ratio.get()*2.0 + 0.01) as f64; let l1 = 2.03 - l2; let osc1 = compute::oct_to_rad(o1_f + self.osc2_th.sin()*o2_to_o1_mod, self.sr); let osc2 = compute::oct_to_rad(o2_f + self.osc1_th.sin()*o1_to_o2_mod, self.sr); self.osc1_th = compute::wrap(self.osc1_th + osc1); self.osc2_th = compute::wrap(self.osc2_th + osc2); let (dth1, dth2, dw1, dw2) = compute::step( self.th1, self.th2, self.w1, self.w2, l1, l2 ); let sat = 15.0/(scale + 0.01); self.w1 = ((self.w1 + dw1*0.1*self.scale*scale)/sat).tanh()*sat; self.w2 = ((self.w2 + dw2*0.1*self.scale*scale)/sat).tanh()*sat; let dth1 = compute::fade((dth1*0.1*self.scale*scale/sat).tanh()*sat, o1_amt, osc1); let dth2 = compute::fade((dth2*0.1*self.scale*scale/sat).tanh()*sat, o2_amt, osc2); self.th1 = compute::wrap(self.th1 + dth1); self.th2 = compute::wrap(self.th2 + dth2); *left_out = self.th1.sin() as f32; *right_out = self.th2.sin() as f32; } } fn get_parameter_object(&mut self) -> Arc<dyn PluginParameters> { Arc::clone(&self.params) as Arc<dyn PluginParameters> } } impl PluginParameters for EffectParameters { fn get_parameter(&self, index: i32) -> f32 { match index { 0 => self.len_ratio.get(), 1 => self.scale.get(), 2 => self.o1_amt.get(), 3 => self.o2_amt.get(), 4 => self.o1_f.get(), 5 => self.o2_f.get(), 6 => self.o1_fine.get(), 7 => self.o2_fine.get(), 8 => self.o2_to_o1_mod.get(), 9 => self.o1_to_o2_mod.get(), _ => 0.0, } } fn set_parameter(&self, index: i32, val: f32) { #[allow(clippy::single_match)] match index { 0 => self.len_ratio.set(val), 1 => self.scale.set(val), 2 => self.o1_amt.set(val), 3 => self.o2_amt.set(val), 4 => self.o1_f.set(val), 5 => self.o2_f.set(val), 6 => self.o1_fine.set(val), 7 => self.o2_fine.set(val), 8 => self.o2_to_o1_mod.set(val), 9 => self.o1_to_o2_mod.set(val), _ => (), } } fn get_parameter_text(&self, index: i32) -> String { match index { 0 => format!("L1: {:.2}, L2: {:.2}", 1.0-self.len_ratio.get(), self.len_ratio.get()), 1 => format!("{:.2}", self.scale.get()), 2 => format!("{:.2}", self.o1_amt.get()), 3 => format!("{:.2}", self.o2_amt.get()), 4 => format!("{:.2}", self.o1_f.get()*8.0), 5 => format!("{:.2}", self.o2_f.get()*8.0), 6 => format!("{:.2}", self.o1_fine.get()), 7 => format!("{:.2}", self.o2_fine.get()), 8 => format!("{:.1}", self.o2_to_o1_mod.get()*200.0), 9 => format!("{:.1}", self.o1_to_o2_mod.get()*200.0), _ => "".to_string(), } } fn get_parameter_name(&self, index: i32) -> String { match index { 0 => "L1 <=> L2", 1 => "- <=> +", 2 => "O1", 3 => "O2", 4 => "F1", 5 => "F2", 6 => "F1.f", 7 => "F2.f", 8 => "M1", 9 => "M2", _ => "", } .to_string() } } plugin_main!(Effect);
#[macro_use] extern crate vst; use vst::buffer::AudioBuffer; use vst::plugin::{Category, Info, Plugin, PluginParameters}; use vst::util::AtomicFloat; use std::sync::Arc; mod compute; struct Effect { params: Arc<EffectParameters>, sr: f32, scale: f64, th1: f64, th2: f64, osc1_th: f64, osc2_th: f64, w1: f64, w2: f64, } struct EffectParameters { len_ratio: AtomicFloat, scale: AtomicFloat, o1_amt: AtomicFloat, o2_amt: AtomicFloat, o1_f: AtomicFloat, o2_f: AtomicFloat, o1_fine: AtomicFloat, o2_fine: AtomicFloat, o2_to_o1_mod: AtomicFloat, o1_to_o2_mod: AtomicFloat, } impl Default for Effect { fn default() -> Effect { Effect { params: Arc::new(EffectParameters::default()), sr: 44100.0, scale: 1.0, th1: 3.0, th2: 4.0, osc1_th: 0.0, osc2_th: 0.0, w1: 0.0, w2: 0.0, } } } impl Default for EffectParameters { fn default() -> EffectParameters { EffectParameters { len_ratio: AtomicFloat::new(0.5), scale: AtomicFloat::new(0.5), o1_amt: AtomicFloat::new(0.0), o2_amt: AtomicFloat::new(0.0), o1_f: AtomicFloat::new(0.5), o2_f: AtomicFloat::new(0.5), o1_fine: AtomicFloat::new(0.5), o2_fine: AtomicFloat::new(0.5), o2_to_o1_mod: AtomicFloat::new(0.0), o1_to_o2_mod: AtomicFloat::new(0.0), } } } impl Plugin for Effect { fn get_info(&self) -> Info { Info { name: "CHAOS_OSC".to_string(), vendor: "Flux-Audio".to_string(), unique_id: 40942320, version: 020, inputs: 0, outputs: 2, parameters: 10, category: Category::Generator, ..Default::default() } } fn set_sample_rate(&mut self, rate: f32){ self.sr = rate; self.scale = 44100.0 / rate as f64; } fn init(&mut self) {} fn process(&mut self, buffer: &mut AudioBuffer<f32>) { let (_, outputs) = buffer.split(); let (mut l, mut r) = outputs.split_at_mut(1); let stereo_out = l[0].iter_mut().zip(r[0].iter_mut()); for (left_out, right_out) in stereo_out{ let o1_amt = self.params.o1_amt.get() as f64; let o2_amt = self.params.o
fn get_parameter_object(&mut self) -> Arc<dyn PluginParameters> { Arc::clone(&self.params) as Arc<dyn PluginParameters> } } impl PluginParameters for EffectParameters { fn get_parameter(&self, index: i32) -> f32 { match index { 0 => self.len_ratio.get(), 1 => self.scale.get(), 2 => self.o1_amt.get(), 3 => self.o2_amt.get(), 4 => self.o1_f.get(), 5 => self.o2_f.get(), 6 => self.o1_fine.get(), 7 => self.o2_fine.get(), 8 => self.o2_to_o1_mod.get(), 9 => self.o1_to_o2_mod.get(), _ => 0.0, } } fn set_parameter(&self, index: i32, val: f32) { #[allow(clippy::single_match)] match index { 0 => self.len_ratio.set(val), 1 => self.scale.set(val), 2 => self.o1_amt.set(val), 3 => self.o2_amt.set(val), 4 => self.o1_f.set(val), 5 => self.o2_f.set(val), 6 => self.o1_fine.set(val), 7 => self.o2_fine.set(val), 8 => self.o2_to_o1_mod.set(val), 9 => self.o1_to_o2_mod.set(val), _ => (), } } fn get_parameter_text(&self, index: i32) -> String { match index { 0 => format!("L1: {:.2}, L2: {:.2}", 1.0-self.len_ratio.get(), self.len_ratio.get()), 1 => format!("{:.2}", self.scale.get()), 2 => format!("{:.2}", self.o1_amt.get()), 3 => format!("{:.2}", self.o2_amt.get()), 4 => format!("{:.2}", self.o1_f.get()*8.0), 5 => format!("{:.2}", self.o2_f.get()*8.0), 6 => format!("{:.2}", self.o1_fine.get()), 7 => format!("{:.2}", self.o2_fine.get()), 8 => format!("{:.1}", self.o2_to_o1_mod.get()*200.0), 9 => format!("{:.1}", self.o1_to_o2_mod.get()*200.0), _ => "".to_string(), } } fn get_parameter_name(&self, index: i32) -> String { match index { 0 => "L1 <=> L2", 1 => "- <=> +", 2 => "O1", 3 => "O2", 4 => "F1", 5 => "F2", 6 => "F1.f", 7 => "F2.f", 8 => "M1", 9 => "M2", _ => "", } .to_string() } } plugin_main!(Effect);
2_amt.get() as f64; let o1_f = (self.params.o1_f.get()*8.0 + self.params.o1_fine.get()) as f64; let o2_f = (self.params.o2_f.get()*8.0 + self.params.o2_fine.get()) as f64; let o2_to_o1_mod = self.params.o2_to_o1_mod.get() as f64; let o1_to_o2_mod = self.params.o1_to_o2_mod.get() as f64; let mut scale = self.params.scale.get() as f64; scale = scale*scale*8.0; let l2 = (self.params.len_ratio.get()*2.0 + 0.01) as f64; let l1 = 2.03 - l2; let osc1 = compute::oct_to_rad(o1_f + self.osc2_th.sin()*o2_to_o1_mod, self.sr); let osc2 = compute::oct_to_rad(o2_f + self.osc1_th.sin()*o1_to_o2_mod, self.sr); self.osc1_th = compute::wrap(self.osc1_th + osc1); self.osc2_th = compute::wrap(self.osc2_th + osc2); let (dth1, dth2, dw1, dw2) = compute::step( self.th1, self.th2, self.w1, self.w2, l1, l2 ); let sat = 15.0/(scale + 0.01); self.w1 = ((self.w1 + dw1*0.1*self.scale*scale)/sat).tanh()*sat; self.w2 = ((self.w2 + dw2*0.1*self.scale*scale)/sat).tanh()*sat; let dth1 = compute::fade((dth1*0.1*self.scale*scale/sat).tanh()*sat, o1_amt, osc1); let dth2 = compute::fade((dth2*0.1*self.scale*scale/sat).tanh()*sat, o2_amt, osc2); self.th1 = compute::wrap(self.th1 + dth1); self.th2 = compute::wrap(self.th2 + dth2); *left_out = self.th1.sin() as f32; *right_out = self.th2.sin() as f32; } }
function_block-function_prefixed
[ { "content": "/// this is the simplified version of the double pendulum differential equation\n\n/// system, the old one was broken\n\npub fn step(th1: f64, th2: f64, w1: f64, w2: f64, l1: f64, l2: f64)\n\n -> (f64, f64, f64, f64){\n\n\n\n let c = (th1 - th2).cos();\n\n let s = (th1 - th2).sin(); \...
Rust
drepr/readers/src/iterators/index/unknown_range_iterator.rs
scorpio975/d-repr
1d08024192642233d42d29e1d05f8713ee265bca
use crate::prelude::{Index, RAReader, Value}; use super::IndexIterator; macro_rules! generate_unknown_range_iter { (get_index_type, $mut_kw:ident) => { &'a mut [Index] }; (get_index_type,) => { Vec<Index> }; ($class:ident $(, $mut_kw:ident )?) => { #[derive(Debug)] pub struct $class<'a> { ra_reader: &'a dyn RAReader, lowerbounds: $(&'a $mut_kw)? Vec<usize>, upperbounds: Vec<usize>, neg_upperbounds: $(&'a $mut_kw)? Vec<usize>, steps: $(&'a $mut_kw)? Vec<usize>, unfrozen_dims: $(&'a $mut_kw)? Vec<usize>, last_unknown_dim: usize, unknown_upperbounds: $(&'a $mut_kw)? Vec<bool>, index: generate_unknown_range_iter!(get_index_type, $($mut_kw)?), has_more: bool, tree_ptrs: Vec<&'a Value>, } impl<'a> $class<'a> { pub fn new( ra_reader: &'a dyn RAReader, index: generate_unknown_range_iter!(get_index_type, $($mut_kw)?), unfrozen_dims: $(&'a $mut_kw)? Vec<usize>, unknown_upperbounds: $(&'a $mut_kw)? Vec<bool>, lowerbounds: $(&'a $mut_kw)? Vec<usize>, mut upperbounds: Vec<usize>, neg_upperbounds: $(&'a $mut_kw)? Vec<usize>, steps: $(&'a $mut_kw)? Vec<usize>, ) -> $class<'a> { let mut last_unknown_dim = 0; for (i, &is_unknown) in unknown_upperbounds.iter().enumerate().rev() { if is_unknown { last_unknown_dim = i; break; } } let tree_ptrs = create_tree_ptrs_and_update_unknown_upperbound( ra_reader, index.as_ref(), last_unknown_dim, &unknown_upperbounds, &neg_upperbounds, &mut upperbounds, ); $class { ra_reader, index, unfrozen_dims, unknown_upperbounds, last_unknown_dim, lowerbounds, upperbounds, neg_upperbounds, steps, has_more: true, tree_ptrs, } } } impl<'a> IndexIterator for $class<'a> { fn value(&self) -> &[Index] { &self.index } #[inline] fn mut_value(&mut self) -> &mut [Index] { &mut self.index } fn advance(&mut self) -> bool { if self.has_more { for &dim_pivot in self.unfrozen_dims.iter() { match &mut self.index[dim_pivot] { Index::Idx(idx) => { *idx += self.steps[dim_pivot]; if *idx >= self.upperbounds[dim_pivot] { *idx = self.lowerbounds[dim_pivot] as usize; } else { if dim_pivot < self.last_unknown_dim { update_local_upperbounds( self.ra_reader, &mut self.tree_ptrs, self.index.as_ref(), self.last_unknown_dim, &self.unknown_upperbounds, &self.neg_upperbounds, &mut self.upperbounds, dim_pivot, ); } return true; } } _ => unreachable!(), } } self.has_more = false; } return false; } fn freeze_last_step(&mut self) { if self.unfrozen_dims[0] == self.steps.len() - 1 { self.unfrozen_dims.drain(..1); } self.steps.pop(); self.upperbounds.pop(); self.lowerbounds.pop(); self.neg_upperbounds.pop(); self.unknown_upperbounds.pop(); if self.last_unknown_dim >= self.unknown_upperbounds.len() { for i in (0..self.unknown_upperbounds.len()).rev() { if self.unknown_upperbounds[i] { self.last_unknown_dim = i; break; } self.tree_ptrs.pop(); } } } } } } generate_unknown_range_iter!(UnknownRangeIter); generate_unknown_range_iter!(UnknownRangeRefIter, mut); pub fn create_tree_ptrs_and_update_unknown_upperbound<'a>( ra_reader: &'a dyn RAReader, index: &[Index], last_unknown_dim: usize, unknown_upperbounds: &[bool], neg_upperbounds: &[usize], upperbounds: &mut [usize], ) -> Vec<&'a Value> { if unknown_upperbounds[0] { upperbounds[0] = ra_reader.len() - neg_upperbounds[0]; } let mut tree_ptrs = vec![ra_reader.get_value(&index[..1], 0)]; for i in 1..last_unknown_dim { if unknown_upperbounds[i] { upperbounds[i] = tree_ptrs[i - 1].len() - neg_upperbounds[i]; } tree_ptrs.push(tree_ptrs[i - 1].get_child_value(&index[i])); } if last_unknown_dim > 0 { upperbounds[last_unknown_dim] = tree_ptrs[last_unknown_dim - 1].len() - neg_upperbounds[last_unknown_dim]; } tree_ptrs } pub fn update_local_upperbounds<'a>( ra_reader: &'a dyn RAReader, tree_ptrs: &mut [&'a Value], index: &[Index], last_unknown_dim: usize, unknown_upperbounds: &[bool], neg_upperbounds: &[usize], upperbounds: &mut [usize], mut start_idx: usize, ) { if start_idx == 0 { tree_ptrs[0] = ra_reader.get_value(&index[..1], 0); start_idx += 1; } for i in start_idx..last_unknown_dim { if unknown_upperbounds[i] { upperbounds[i] = tree_ptrs[i - 1].len() - neg_upperbounds[i]; } tree_ptrs[i] = tree_ptrs[i - 1].get_child_value(&index[i]); } if last_unknown_dim > 0 { upperbounds[last_unknown_dim] = tree_ptrs[last_unknown_dim - 1].len() - neg_upperbounds[last_unknown_dim]; } }
use crate::prelude::{Index, RAReader, Value}; use super::IndexIterator; macro_rules! generate_unknown_range_iter { (get_index_type, $mut_kw:ident) => { &'a mut [Index] }; (get_index_type,) => { Vec<Index> }; ($class:ident $(, $mut_kw:ident )?) => { #[derive(Debug)] pub struct $class<'a> { ra_reader: &'a dyn RAReader, lowerbounds: $(&'a $mut_kw)? Vec<usize>, upperbounds: Vec<usize>, neg_upperbounds: $(&'a $mut_kw)? Vec<usize>, steps: $(&'a $mut_kw)? Vec<usize>, unfrozen_dims: $(&'a $mut_kw)? Vec<usize>, last_unknown_dim: usize, unknown_upperbounds: $(&'a $mut_kw)? Vec<bool>, index: generate_unknown_range_iter!(get_index_type, $($mut_kw)?), has_more: bool, tree_ptrs: Vec<&'a Value>, } impl<'a> $class<'a> { pub fn new( ra_reader: &'a dyn RAReader, index: generate_unknown_range_iter!(get_index_type, $($mut_kw)?), unfrozen_dims: $(&'a $mut_kw)? Vec<usize>, unknown_upperbounds: $(&'a $mut_kw)? Vec<bool>, lowerbounds: $(&'a $mut_kw)? Vec<usize>, mut upperbounds: Vec<usize>, neg_upperbounds: $(&'a $mut_kw)? Vec<usize>, steps: $(&'a $mut_kw)? Vec<usize>, ) -> $class<'a> { let mut last_unknown_dim = 0; for (i, &is_unknown) in unknown_upperbounds.iter().enumerate().rev() { if is_unknown { last_unknown_dim = i; break; } } let tree_ptrs = create_tree_ptrs_and_update_unknown_upperbound( ra_reader, index.as_ref(), last_unknown_dim, &unknown_upperbounds, &neg_upperbounds, &mut upperbounds, ); $class { ra_reader, index, unfrozen_dims, unknown_upperbounds, last_unknown_dim, lowerbounds, upperbounds, neg_upperbounds, steps, has_more: true, tree_ptrs, } } } impl<'a> IndexIterator for $class<'a> { fn value(&self) -> &[Index] { &self.index } #[inline] fn mut_value(&mut self) -> &mut [Index] { &mut self.index } fn advance(&mut self) -> bool { if self.has_more { for &dim_pivot in self.unfrozen_dims.iter() { match &mut self.index[dim_pivot] { Index::Idx(idx) => { *idx += self.steps[dim_pivot]; if *idx >= self.upperbounds[dim_pivot] { *idx = self.lowerbounds[dim_pivot] as usize; } else { if dim_pivot < self.last_unknown_dim { update_local_upperbounds( self.ra_reader, &mut self.tree_ptrs, self.index.as_ref(), self.last_unknown_dim, &self.unknown_upperbounds, &self.neg_upperbounds, &mut self.upperbounds, dim_pivot, ); } return true; } } _ => unreachable!(), } } self.has_more = false; } return false; } fn freeze_last_step(&mut self) { if self.unfrozen_dims[0] == self.steps.len() - 1 { self.unfrozen_dims.drain(..1); } self.steps.pop(); self.upperbounds.pop(); self.lowerbounds.pop(); self.neg_upperbounds.pop(); self.unknown_upperbounds.pop(); if self.last_unknown_dim >= self.unknown_upperbounds.len() { for i in (0..self.unknown_upperbounds.len()).rev() { if self.unknown_upperbounds[i] { self.last_unknown_dim = i; break; } self.tree_ptrs.pop(); } } } } } } generate_unknown_range_iter!(UnknownRangeIter); generate_unknown_range_iter!(UnknownRangeRefIter, mut); pub fn create_tree_ptrs_and_update_unknown_upperbound<'a>( ra_reader: &'a dyn RAReader, index: &[Index], last_unknown_dim: usize, unknown_upperbounds: &[bool], neg_upperbounds: &[usize], upperbounds: &mut [usize], ) -> Vec<&'a Value> { if unknown_upperbounds[0] { upperbounds[0] = ra_reader.len() - neg_upperbounds[0]; } let mut tree_ptrs = vec![ra_reader.get_value(&index[..1], 0)]; for i in 1..last_unknown_dim { if unknown_upperbounds[i] { upperbounds[i] = tree_ptrs[i - 1].len() - neg_upperbounds[i]; } tree_ptrs.push(tree_ptrs[i - 1].get_child_value(&index[i])); } if last_unknown_dim > 0 { upperbounds[last_unknown_dim] = tree_ptrs[last_unknown_dim - 1].len() - neg_upperbounds[last_unknown_dim]; } tree_ptrs } pub fn update_local_upperbounds<'a>( ra_reader: &'a dyn RAReader, tree_ptrs: &mut [&'a Value], index: &[Index], last_unknown_dim: usize, unknown_upperbounds: &[bool], neg_upperbounds: &[usize], upperbounds: &mut [usize], mut start_idx: usize, ) { if start_idx == 0 { tree_ptrs[0] = ra_reader.get_value(&index[..1], 0); start_idx += 1; } for i in start_idx..last_unknown_dim { if unknown_upperbounds[i] {
upperbounds[i] = tree_ptrs[i - 1].len() - neg_upperbounds[i]; } tree_ptrs[i] = tree_ptrs[i - 1].get_child_value(&index[i]); } if last_unknown_dim > 0 { upperbounds[last_unknown_dim] = tree_ptrs[last_unknown_dim - 1].len() - neg_upperbounds[last_unknown_dim]; } }
function_block-function_prefix_line
[ { "content": "pub fn generic_optional_oprop_map(readers: &[Box<dyn RAReader>], writer: &mut dyn StreamClassWriter, oplan: &ObjectProp, oalign: &mut AlignmentFunc, subj_id: &str, subj_val: &Value, subj_idx: &[Index], o_idx: &mut [Index], is_subj_blank: bool, is_new_subj: bool) {\n\n match oalign {\n\n Alignm...
Rust
raster-tools/src/bin/raster-diff/args.rs
AspecScire/rasters.rs
28eb0067dd9ffc4d869d2fcf762c328e081446c2
use clap::*; use raster_tools::{utils::*, *}; use rasters::histogram::Config as HistConfig; use std::path::PathBuf; pub struct Args { pub input_a: PathBuf, pub input_b: PathBuf, pub negate: bool, pub hist: Option<(HistConfig, PathBuf)>, pub polygon: Option<geo::MultiPolygon<f64>>, pub output: Option<OutputArgs>, pub output_type: OutputType, pub chunk_size: usize, } pub enum OutputType { Value, Discretized, } pub fn parse_cmd_line() -> Args { use clap::ErrorKind::*; use clap::*; let matches = args_parser!("raster-diff") .about("Compute raster difference stats.") .arg( arg!("input_a") .required(true) .help("First input path (raster dataset)"), ) .arg( arg!("input_b") .required(true) .help("Second input path (raster dataset)"), ) .arg( opt!("negate") .help("Negate order of operands (default: second - first)") .takes_value(false), ) .arg( opt!("hist") .help("Generate histogram (requires min, max, bins|step)") .requires_all(&["min", "max", "binning"]), ) .arg( opt!("min") .allow_hyphen_values(true) .requires("hist") .help("Min value to consider"), ) .arg( opt!("max") .allow_hyphen_values(true) .requires("hist") .help("Max value to consider"), ) .arg(opt!("bins").help("Number of bins (overrides step size)")) .arg(opt!("step").help("Bin size for histogram")) .group( ArgGroup::with_name("binning") .args(&["bins", "step"]) .requires("hist"), ) .arg(opt!("polygon").help("Region to restrict to (Polygon or MultiPolygon WKT)")) .arg( opt!("output type") .help("Output type: discretized or the default, value") .requires("output"), ) .arg(opt!("output").help("Output path (raster dataset)")) .arg( opt!("driver") .requires("output") .help("Output driver (default: GTIFF)"), ) .arg( opt!("chunk size") .short("c") .help("Read chunk size (default: 64k pixels)"), ) .get_matches(); let input_a = value_t!(matches, "input_a", PathBuf).unwrap_or_else(|e| e.exit()); let input_b = value_t!(matches, "input_b", PathBuf).unwrap_or_else(|e| e.exit()); let hist_file = value_t!(matches, "hist", PathBuf).ok(); let hist = if let Some(hist_file) = hist_file { let hist = { let min = value_t!(matches, "min", f64).unwrap_or_else(|e| e.exit()); let max = value_t!(matches, "max", f64).unwrap_or_else(|e| e.exit()); let bins = value_t!(matches, "bins", usize).ok(); if let Some(bins) = bins { HistConfig::from_min_max_bins(min, max, bins) } else { HistConfig::from_min_max_step( min, max, value_t!(matches, "step", f64).unwrap_or_else(|e| e.exit()), ) } }; Some((hist, hist_file)) } else { None }; let negate = matches.is_present("negate"); let output = if matches.is_present("output") { let o = value_t!(matches, "output", PathBuf).unwrap_or_else(|e| e.exit()); let driver = value_t!(matches, "driver", String).unwrap_or_else(|_| String::from("GTIFF")); Some(OutputArgs { path: o, driver }) } else { None }; let output_type = { let output_type = value_t!(matches, "output type", String).unwrap_or_else(|_| String::from("value")); if output_type == "value" { OutputType::Value } else if output_type == "discretized" { OutputType::Discretized } else { Error::with_description( &format!("invalid output type: {}", output_type), InvalidValue, ) .exit() } }; if let OutputType::Discretized = output_type { if hist.is_none() { Error::with_description( "`discretized' output requires generating histogram (`--hist')", InvalidValue, ) .exit() } } let chunk_size = value_t!(matches, "chunk size", usize).unwrap_or_else(|_| 0x10000); let polygon = value_t!(matches, "polygon", String).ok().map(|wkt| { let geom = gdal::vector::Geometry::from_wkt(&wkt) .unwrap_or_else(|_| Error::with_description("cannot parse WKT", InvalidValue).exit()) .into(); use geo::Geometry::{MultiPolygon, Polygon}; match geom { Polygon(p) => p.into(), MultiPolygon(p) => p, _ => Error::with_description("WKT is not a (multi)-polygon", InvalidValue).exit(), } }); Args { input_a, input_b, hist, negate, polygon, chunk_size, output, output_type, } }
use clap::*; use raster_tools::{utils::*, *}; use rasters::histogram::Config as HistConfig; use std::path::PathBuf; pub struct Args { pub input_a: PathBuf, pub input_b: PathBuf, pub negate: bool, pub hist: Option<(HistConfig, PathBuf)>, pub polygon: Option<geo::MultiPolygon<f64>>, pub output: Option<OutputArgs>, pub output_type: OutputType, pub chunk_size: usize, } pub enum OutputType { Value, Discretized, } pub fn parse_cmd_line() -> Args { use clap::ErrorKind::*; use clap::*; let matches = args_parser!("raster-diff") .about("Compute raster difference stats.") .arg( arg!("input_a") .required(true) .help("First input path (raster dataset)"), ) .arg( arg!("input_b") .required(true) .help("Second input path (raster dataset)"), ) .arg( opt!("negate") .help("Negate order of operands (default: second - first)") .takes_value(false), ) .arg( opt!("hist") .help("Generate histogram (requires min, max, bins|ste
p)") .requires_all(&["min", "max", "binning"]), ) .arg( opt!("min") .allow_hyphen_values(true) .requires("hist") .help("Min value to consider"), ) .arg( opt!("max") .allow_hyphen_values(true) .requires("hist") .help("Max value to consider"), ) .arg(opt!("bins").help("Number of bins (overrides step size)")) .arg(opt!("step").help("Bin size for histogram")) .group( ArgGroup::with_name("binning") .args(&["bins", "step"]) .requires("hist"), ) .arg(opt!("polygon").help("Region to restrict to (Polygon or MultiPolygon WKT)")) .arg( opt!("output type") .help("Output type: discretized or the default, value") .requires("output"), ) .arg(opt!("output").help("Output path (raster dataset)")) .arg( opt!("driver") .requires("output") .help("Output driver (default: GTIFF)"), ) .arg( opt!("chunk size") .short("c") .help("Read chunk size (default: 64k pixels)"), ) .get_matches(); let input_a = value_t!(matches, "input_a", PathBuf).unwrap_or_else(|e| e.exit()); let input_b = value_t!(matches, "input_b", PathBuf).unwrap_or_else(|e| e.exit()); let hist_file = value_t!(matches, "hist", PathBuf).ok(); let hist = if let Some(hist_file) = hist_file { let hist = { let min = value_t!(matches, "min", f64).unwrap_or_else(|e| e.exit()); let max = value_t!(matches, "max", f64).unwrap_or_else(|e| e.exit()); let bins = value_t!(matches, "bins", usize).ok(); if let Some(bins) = bins { HistConfig::from_min_max_bins(min, max, bins) } else { HistConfig::from_min_max_step( min, max, value_t!(matches, "step", f64).unwrap_or_else(|e| e.exit()), ) } }; Some((hist, hist_file)) } else { None }; let negate = matches.is_present("negate"); let output = if matches.is_present("output") { let o = value_t!(matches, "output", PathBuf).unwrap_or_else(|e| e.exit()); let driver = value_t!(matches, "driver", String).unwrap_or_else(|_| String::from("GTIFF")); Some(OutputArgs { path: o, driver }) } else { None }; let output_type = { let output_type = value_t!(matches, "output type", String).unwrap_or_else(|_| String::from("value")); if output_type == "value" { OutputType::Value } else if output_type == "discretized" { OutputType::Discretized } else { Error::with_description( &format!("invalid output type: {}", output_type), InvalidValue, ) .exit() } }; if let OutputType::Discretized = output_type { if hist.is_none() { Error::with_description( "`discretized' output requires generating histogram (`--hist')", InvalidValue, ) .exit() } } let chunk_size = value_t!(matches, "chunk size", usize).unwrap_or_else(|_| 0x10000); let polygon = value_t!(matches, "polygon", String).ok().map(|wkt| { let geom = gdal::vector::Geometry::from_wkt(&wkt) .unwrap_or_else(|_| Error::with_description("cannot parse WKT", InvalidValue).exit()) .into(); use geo::Geometry::{MultiPolygon, Polygon}; match geom { Polygon(p) => p.into(), MultiPolygon(p) => p, _ => Error::with_description("WKT is not a (multi)-polygon", InvalidValue).exit(), } }); Args { input_a, input_b, hist, negate, polygon, chunk_size, output, output_type, } }
function_block-function_prefixed
[ { "content": "pub fn edit_dataset(path: &Path) -> Result<Dataset> {\n\n Ok(Dataset::open_ex(&path, Some(1), None, None, None)\n\n .with_context(|| format!(\"editing dataset {}\", path.display()))?)\n\n}\n\n\n\nuse gdal::raster::GdalType;\n", "file_path": "raster-tools/src/utils.rs", "rank": 0,...
Rust
src/diagnose.rs
Icenowy/ciel-rs
6e0f2305f29505d9c735f234b1f450f193fa2ca7
use anyhow::{anyhow, Result}; use console::style; use dbus::blocking::stdintf::org_freedesktop_dbus::Properties; use dbus::blocking::Connection; use fs3::statvfs; use indicatif::HumanBytes; use std::sync::mpsc::channel; use std::{fs::File, io::BufRead, time::Duration}; use std::{ io::{BufReader, Write}, thread, }; use tempfile::tempfile_in; use which::which; use crate::error; const SYSTEMD1_PATH: &str = "/org/freedesktop/systemd1"; const SYSTEMD1_DEST: &str = "org.freedesktop.systemd1"; const SYSTEMD1_OBJ: &str = "org.freedesktop.systemd1.Manager"; const TEST_TEXT: &[u8] = b"An-An was born a rabbit, but found herself a girl with bunny ears and tails when she woke up one day. She couldn't seem to remember why."; const TEST_PROGRAMS: &[&str] = &["systemd-nspawn", "systemd-run"]; const TEST_CASES: &[&dyn Fn() -> Result<String>] = &[ &test_sd_bus, &test_io_simple, &test_required_binaries, &test_fs_support, &test_vm_container, &test_disk_io, &test_disk_space, ]; fn test_sd_bus() -> Result<String> { let conn = Connection::new_system()?; let proxy = conn.with_proxy(SYSTEMD1_DEST, SYSTEMD1_PATH, Duration::from_secs(10)); let version: String = proxy.get(SYSTEMD1_OBJ, "Version")?; Ok(format!( "Systemd D-Bus (systemd {}) seems to be working", version )) } fn test_io_simple() -> Result<String> { File::open("/proc/1/cmdline")?; Ok("Basic I/O operations seem to be working".to_string()) } fn test_required_binaries() -> Result<String> { for binary in TEST_PROGRAMS { if which(binary).is_err() { return Err(anyhow!("Required program `{}` is not found", binary)); } } Ok("Required binaries are correctly installed".to_string()) } fn test_fs_support() -> Result<String> { let f = File::open("/proc/filesystems")?; let reader = BufReader::new(f); for line in reader.lines() { let line = line?; let mut fs_type = line.splitn(2, '\t'); if let Some(fs_type) = fs_type.nth(1) { if fs_type == "overlay" { return Ok("Filesystem support seems to be sufficient".to_string()); } } } Err(anyhow!( "Kernel does not support overlayfs, try `modprobe overlay`" )) } fn test_vm_container() -> Result<String> { let conn = Connection::new_system()?; let proxy = conn.with_proxy(SYSTEMD1_DEST, SYSTEMD1_PATH, Duration::from_secs(10)); let virt: String = proxy.get(SYSTEMD1_OBJ, "Virtualization")?; if virt == "wsl" { return Ok("!WSL is not supported".to_string()); } let virt_msg; if virt.is_empty() { virt_msg = String::new(); } else { virt_msg = format!("(running in {})", virt); } Ok(format!("Environment seems sane {}", virt_msg)) } fn test_disk_io() -> Result<String> { let (tx, rx) = channel(); thread::spawn(move || { let f = tempfile_in("./"); if let Ok(mut f) = f { if let Ok(()) = f.write_all(TEST_TEXT) { tx.send(()).unwrap(); } } }); if rx.recv_timeout(Duration::from_secs(10)).is_ok() { return Ok("Disk I/O seems ok".to_string()); } error!("The test file is taking too long to write, suspecting I/O stuck."); Err(anyhow!("Disk I/O is not working correctly")) } fn test_disk_space() -> Result<String> { let stats = statvfs(std::fs::canonicalize(".")?)?; if stats.available_space() < (10 * 1024 * 1024 * 1024) { Err(anyhow!("Disk space insufficient. Need at least 10 GB of free space to do something meaningful (You have {}).", HumanBytes(stats.available_space()))) } else { Ok(format!( "Disk space is sufficient ({} free of {}).", HumanBytes(stats.available_space()), HumanBytes(stats.total_space()) )) } } pub fn run_diagnose() -> Result<()> { let mut lines = vec![]; let mut has_error = false; for test in TEST_CASES { match test() { Ok(msg) => { if msg.starts_with('!') { lines.push(format!( "{} {}", style("!").yellow(), style(msg.strip_prefix('!').unwrap()).yellow().bold() )); continue; } lines.push(format!( "{} {}", style("✓").green(), style(msg).green().bold() )) } Err(err) => { has_error = true; lines.push(format!("{} {}", style("x").red(), style(err).red().bold())); break; } } } for line in lines { println!("{}", line); } if has_error { return Err(anyhow!("Test error detected")); } Ok(()) }
use anyhow::{anyhow, Result}; use console::style; use dbus::blocking::stdintf::org_freedesktop_dbus::Properties; use dbus::blocking::Connection; use fs3::statvfs; use indicatif::HumanBytes; use std::sync::mpsc::channel; use std::{fs::File, io::BufRead, time::Duration}; use std::{ io::{BufReader, Write}, thread, }; use tempfile::tempfile_in; use which::which; use crate::error; const SYSTEMD1_PATH: &str = "/org/freedesktop/systemd1"; const SYSTEMD1_DEST: &str = "org.freedesktop.systemd1"; const SYSTEMD1_OBJ: &str = "org.freedesktop.systemd1.Manager"; const TEST_TEXT: &[u8] = b"An-An was born a rabbit, but found herself a girl with bunny ears and tails when she woke up one day. She couldn't seem to remember why."; const TEST_PROGRAMS: &[&str] = &["systemd-nspawn", "systemd-run"]; const TEST_CASES: &[&dyn Fn() -> Result<String>] = &[ &test_sd_bus, &test_io_simple, &test_required_binaries, &test_fs_support, &test_vm_container, &test_disk_io, &test_disk_space, ]; fn test_sd_bus() -> Result<String> { let conn = Connection::new_system()?; let proxy = conn.with_proxy(SYSTEMD1_DEST, SYSTEMD1_PATH, Duration::from_secs(10)); let version: String = proxy.get(SYSTEMD1_OBJ, "Version")?; Ok(format!( "Systemd D-Bus (systemd {}) seems to be working", version )) } fn test_io_simple() -> Result<String> { File::open("/proc/1/cmdline")?; Ok("Basic I/O operations seem to be working".to_string()) } fn
ing()); } error!("The test file is taking too long to write, suspecting I/O stuck."); Err(anyhow!("Disk I/O is not working correctly")) } fn test_disk_space() -> Result<String> { let stats = statvfs(std::fs::canonicalize(".")?)?; if stats.available_space() < (10 * 1024 * 1024 * 1024) { Err(anyhow!("Disk space insufficient. Need at least 10 GB of free space to do something meaningful (You have {}).", HumanBytes(stats.available_space()))) } else { Ok(format!( "Disk space is sufficient ({} free of {}).", HumanBytes(stats.available_space()), HumanBytes(stats.total_space()) )) } } pub fn run_diagnose() -> Result<()> { let mut lines = vec![]; let mut has_error = false; for test in TEST_CASES { match test() { Ok(msg) => { if msg.starts_with('!') { lines.push(format!( "{} {}", style("!").yellow(), style(msg.strip_prefix('!').unwrap()).yellow().bold() )); continue; } lines.push(format!( "{} {}", style("✓").green(), style(msg).green().bold() )) } Err(err) => { has_error = true; lines.push(format!("{} {}", style("x").red(), style(err).red().bold())); break; } } } for line in lines { println!("{}", line); } if has_error { return Err(anyhow!("Test error detected")); } Ok(()) }
test_required_binaries() -> Result<String> { for binary in TEST_PROGRAMS { if which(binary).is_err() { return Err(anyhow!("Required program `{}` is not found", binary)); } } Ok("Required binaries are correctly installed".to_string()) } fn test_fs_support() -> Result<String> { let f = File::open("/proc/filesystems")?; let reader = BufReader::new(f); for line in reader.lines() { let line = line?; let mut fs_type = line.splitn(2, '\t'); if let Some(fs_type) = fs_type.nth(1) { if fs_type == "overlay" { return Ok("Filesystem support seems to be sufficient".to_string()); } } } Err(anyhow!( "Kernel does not support overlayfs, try `modprobe overlay`" )) } fn test_vm_container() -> Result<String> { let conn = Connection::new_system()?; let proxy = conn.with_proxy(SYSTEMD1_DEST, SYSTEMD1_PATH, Duration::from_secs(10)); let virt: String = proxy.get(SYSTEMD1_OBJ, "Virtualization")?; if virt == "wsl" { return Ok("!WSL is not supported".to_string()); } let virt_msg; if virt.is_empty() { virt_msg = String::new(); } else { virt_msg = format!("(running in {})", virt); } Ok(format!("Environment seems sane {}", virt_msg)) } fn test_disk_io() -> Result<String> { let (tx, rx) = channel(); thread::spawn(move || { let f = tempfile_in("./"); if let Ok(mut f) = f { if let Ok(()) = f.write_all(TEST_TEXT) { tx.send(()).unwrap(); } } }); if rx.recv_timeout(Duration::from_secs(10)).is_ok() { return Ok("Disk I/O seems ok".to_str
random
[ { "content": "/// Setting up cross-namespace bind-mounts for the container using systemd\n\nfn setup_bind_mounts(ns_name: &str, mounts: &[(String, &str)]) -> Result<()> {\n\n let conn = Connection::new_system()?;\n\n let proxy = conn.with_proxy(MACHINE1_DEST, MACHINE1_PATH, Duration::from_secs(10));\n\n ...
Rust
src/day11.rs
Aidiakapi/advent-of-code-2016
f02d8041594ac99a317588ef64866c66141166ef
use crate::prelude::*; trait FacilityBounds = std::array::LengthAtMost32 + Clone + Eq + Ord + std::hash::Hash; fn solve<const N: usize>(input: Vec<Vec<Module>>) -> Result<usize> where [Element; N]: std::array::LengthAtMost32 + Default, [bool; N]: Default, { use std::{collections::VecDeque, rc::Rc}; let facility: Facility<[Element; N]> = Facility::from_input(input)?; struct TrackedFacility<E: FacilityBounds> { parent: Option<Rc<Self>>, current: Facility<E>, } impl<const N: usize> TrackedFacility<[Element; N]> where [Element; N]: std::array::LengthAtMost32 + Default, [bool; N]: Default, { pub fn depth(&self) -> usize { match self.parent.as_ref() { Some(parent) => parent.depth() + 1, None => 0, } } } let mut visited = HashSet::new(); visited.insert(facility.clone()); let mut bfs = VecDeque::new(); bfs.push_back(Rc::new(TrackedFacility { parent: None, current: facility, })); while let Some(tracked) = bfs.pop_front() { for facility in tracked.current.next_configurations() { if !visited.insert(facility.clone()) { continue; } let child = Rc::new(TrackedFacility { parent: Some(Rc::clone(&tracked)), current: facility, }); if child.current.is_solved() { let depth = child.depth(); return Ok(depth); } bfs.push_back(child); } } Err(anyhow!("no solution found")) } pub fn pt1(input: Vec<Vec<Module>>) -> Result<usize> { solve::<5>(input) } pub fn pt2(mut input: Vec<Vec<Module>>) -> Result<usize> { input[0].push(Module::Generator("elerium")); input[0].push(Module::Microchip("elerium")); input[0].push(Module::Generator("dilithium")); input[0].push(Module::Microchip("dilithium")); solve::<7>(input) } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default, Hash)] #[repr(packed)] struct Element(u8); impl Element { #[inline(always)] fn generator(&self) -> u8 { self.0 >> 4 } #[inline(always)] fn microchip(&self) -> u8 { self.0 & 0x0f } #[inline(always)] fn move_generator(&mut self, new_floor: u8) { debug_assert!(new_floor & 0xf0 == 0); self.0 = (self.0 & 0x0f) | (new_floor << 4); } #[inline(always)] fn move_microchip(&mut self, new_floor: u8) { debug_assert!(new_floor & 0xf0 == 0); self.0 = (self.0 & 0xf0) | new_floor; } } const FLOOR_COUNT: u8 = 4; #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] struct Facility<E: FacilityBounds> { elements: E, elevator_position: u8, } impl<const N: usize> Facility<[Element; N]> where [Element; N]: std::array::LengthAtMost32 + Default, [bool; N]: Default, { fn from_input(input: Vec<Vec<Module>>) -> Result<Self> { assert!(N <= 8); if input.len() != FLOOR_COUNT as usize { return Err(anyhow!("invalid floor count")); } let mut element_map = HashMap::new(); for (floor_idx, floor) in input.into_iter().rev().enumerate() { for module in floor { let elem_map_len = element_map.len(); let value = element_map .entry(module.name()) .or_insert((elem_map_len, None, None)); let opt = match module { Module::Generator(_) => &mut value.1, Module::Microchip(_) => &mut value.2, }; if let Some(previous_floor) = opt { return Err(anyhow!( "duplicate module {:?} (floors {} and {})", module, FLOOR_COUNT as usize - floor_idx, FLOOR_COUNT as usize - *previous_floor )); } *opt = Some(floor_idx); } } if element_map.len() != N { return Err(anyhow!( "expected {} elements, got {}", N, element_map.len() )); } if !element_map .values() .all(|(_, a, b)| a.is_some() && b.is_some()) { return Err(anyhow!( "modules must be matching generator & microchip pairs" )); } let mut elements: [Element; N] = Default::default(); for (_, (elem_idx, generator_floor, microchip_floor)) in element_map { elements[elem_idx].move_generator(generator_floor.unwrap() as u8); elements[elem_idx].move_microchip(microchip_floor.unwrap() as u8); } let mut facility = Facility { elements, elevator_position: FLOOR_COUNT - 1, }; facility.normalize(); if !facility.is_safe_configuration() { return Err(anyhow!("unsafe starting conditions")); } Ok(facility) } fn normalize(&mut self) { self.elements.sort_unstable() } fn is_solved(&self) -> bool { self.elements.iter().all(|e| e.0 == 0) } fn is_safe_configuration(&self) -> bool { let mut has_generator: [bool; FLOOR_COUNT as usize] = Default::default(); let mut has_unpaired_microchip: [bool; FLOOR_COUNT as usize] = Default::default(); for elem in &self.elements { let generator_floor = elem.generator(); let microchip_floor = elem.microchip(); has_generator[elem.generator() as usize] = true; if generator_floor != microchip_floor { has_unpaired_microchip[microchip_floor as usize] = true; } } (0..FLOOR_COUNT as usize).all(|i| !has_generator[i] || !has_unpaired_microchip[i]) } fn next_configurations<'s>(&'s self) -> impl Iterator<Item = Facility<[Element; N]>> + 's { use std::iter::ExactSizeIterator; #[derive(Clone, Copy)] struct NextFloors { a: u8, b: u8, l: u8, c: u8, } impl Iterator for NextFloors { type Item = u8; fn next(&mut self) -> Option<Self::Item> { if self.c >= self.l { return None; } self.c += 1; Some(if self.c == 1 { self.a } else { self.b }) } fn size_hint(&self) -> (usize, Option<usize>) { let l = self.len(); (l, Some(l)) } } impl ExactSizeIterator for NextFloors { fn len(&self) -> usize { (self.l - self.c) as usize } } #[rustfmt::skip] let next_floors = if self.elevator_position == FLOOR_COUNT - 1 { if self.elevator_position == 0 { NextFloors { a: 0, b: 0, l: 0, c: 0 } } else { NextFloors { a: self.elevator_position - 1, b: 0, l: 1, c: 0 } } } else { if self.elevator_position == 0 { NextFloors { a: self.elevator_position + 1, b: 0, l: 1, c: 0 } } else { NextFloors { a: self.elevator_position - 1, b: self.elevator_position + 1, l: 2, c: 0 } } }; #[derive(Clone, Copy, PartialEq, Eq)] enum Moveable { Microchip(u8), Generator(u8), } let elevator_position = self.elevator_position; let moveable_items = self .elements .iter() .enumerate() .flat_map(move |(idx, elem)| { let idx = idx as u8; let mut array: ArrayVec<[Moveable; 2]> = ArrayVec::new(); unsafe { if elem.generator() == elevator_position { array.push_unchecked(Moveable::Generator(idx)); } if elem.microchip() == elevator_position { array.push_unchecked(Moveable::Microchip(idx)); } } array.into_iter() }); let moveable_pairs = moveable_items .clone() .enumerate() .flat_map(move |(count, a)| { repeat(None) .take(1) .chain(moveable_items.clone().skip(count + 1).map(Option::Some)) .map(move |b| (a, b)) }); moveable_pairs .flat_map(move |(a, b)| next_floors.map(move |f| (a, b, f))) .filter_map(move |(a, b, next_floor)| { let mut new = self.clone(); new.elevator_position = next_floor; #[rustfmt::skip] match a { Moveable::Generator(idx) => new.elements[idx as usize].move_generator(next_floor), Moveable::Microchip(idx) => new.elements[idx as usize].move_microchip(next_floor), } if let Some(b) = b { #[rustfmt::skip] match b { Moveable::Generator(idx) => new.elements[idx as usize].move_generator(next_floor), Moveable::Microchip(idx) => new.elements[idx as usize].move_microchip(next_floor), } } if new.is_safe_configuration() { new.normalize(); Some(new) } else { None } }) } } impl Debug for Element { fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { write!(fmt, "G({})xM({})", self.generator(), self.microchip()) } } pub fn parse(s: &str) -> IResult<&str, Vec<Vec<Module>>> { use parsers::*; map_res( fold_many1( delimited( tag("The "), pair( terminated( map_res(alpha1, |s: &str| { Ok(match s { "first" => 1, "second" => 2, "third" => 3, "fourth" => 4, _ => return Err(()), }) }), tag(" floor contains "), ), alt(( map(tag("nothing relevant"), |_| Vec::new()), separated_list( alt((tag(", and "), tag(" and "), tag(", "))), alt(( map( delimited(tag("a "), alpha1, tag("-compatible microchip")), Module::Microchip, ), map( delimited(tag("a "), alpha1, tag(" generator")), Module::Generator, ), )), ), )), ), terminated(char('.'), opt(line_ending)), ), HashMap::new(), |mut acc, (floor, modules)| { acc.insert(floor, modules); acc }, ), |mut layout| { if layout.len() != FLOOR_COUNT as usize { return Err(anyhow!("expected {} floors", FLOOR_COUNT)); } let mut res = Vec::with_capacity(FLOOR_COUNT as usize); for i in 1..=FLOOR_COUNT as usize { let v = layout .get_mut(&i) .ok_or_else(|| anyhow!("expected floors 1 through {}", FLOOR_COUNT))?; res.push(Vec::new()); std::mem::swap(&mut res[i - 1], v); } Ok(res) }, )(s) } #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Hash)] pub enum Module<'s> { Generator(&'s str), Microchip(&'s str), } impl<'s> Module<'s> { fn name(&self) -> &'s str { match self { Module::Generator(s) => s, Module::Microchip(s) => s, } } } #[test] fn day11() -> Result<()> { fn solve_2(input: Vec<Vec<Module>>) -> Result<usize> { solve::<2>(input) } test_part!(parse, solve_2, "\ The first floor contains a hydrogen-compatible microchip and a lithium-compatible microchip. The second floor contains a hydrogen generator. The third floor contains a lithium generator. The fourth floor contains nothing relevant." => 11); Ok(()) }
use crate::prelude::*; trait FacilityBounds = std::array::LengthAtMost32 + Clone + Eq + Ord + std::hash::Hash; fn solve<const N: usize>(input: Vec<Vec<Module>>) -> Result<usize> where [Element; N]: std::array::LengthAtMost32 + Default, [bool; N]: Default, { use std::{collections::VecDeque, rc::Rc}; let facility: Facility<[Element; N]> = Facility::from_input(input)?; struct TrackedFacility<E: FacilityBounds> { parent: Option<Rc<Self>>, current: Facility<E>, } impl<const N: usize> TrackedFacility<[Element; N]> where [Element; N]: std::array::LengthAtMost32 + Default, [bool; N]: Default, { pub fn depth(&self) -> usize { match self.parent.as_ref() { Some(parent) => parent.depth() + 1, None => 0, } } } let mut visited = HashSet::new(); visited.insert(facility.clone()); let mut bfs = VecDeque::new(); bfs.push_back(Rc::new(TrackedFacility { parent: None, current: facility, })); while let Some(tracked) = bfs.pop_front() { for facility in tracked.current.next_configurations() { if !visited.insert(facility.clone()) { continue; } let child = Rc::new(TrackedFacility { parent: Some(Rc::clone(&tracked)), current: facility, }); if child.current.is_solved() { let depth = child.depth(); return Ok(depth); } bfs.push_back(child); } } Err(anyhow!("no solution found")) } pub fn pt1(input: Vec<Vec<Module>>) -> Result<usize> { solve::<5>(input) } pub fn pt2(mut input: Vec<Vec<Module>>) -> Result<usize> { input[0].push(Module::Generator("elerium")); input[0].push(Module::Microchip("elerium")); input[0].push(Module::Generator("dilithium")); input[0].push(Module::Microchip("dilithium")); solve::<7>(input) } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default, Hash)] #[repr(packed)] struct Element(u8); impl Element { #[inline(always)] fn generator(&self) -> u8 { self.0 >> 4 } #[inline(always)] fn microchip(&self) -> u8 { self.0 & 0x0f } #[inline(always)] fn move_generator(&mut self, new_floor: u8) { debug_assert!(new_floor & 0xf0 == 0); self.0 = (self.0 & 0x0f) | (new_floor << 4); } #[inline(always)] fn move_microchip(&mut self, new_floor: u8) { debug_assert!(new_floor & 0xf0 == 0); self.0 = (self.0 & 0xf0) | new_floor; } } const FLOOR_COUNT: u8 = 4; #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] struct Facility<E: FacilityBounds> { elements: E, elevator_position: u8, } impl<const N: usize> Facility<[Element; N]> where [Element; N]: std::array::LengthAtMost32 + Default, [bool; N]: Default, { fn from_input(input: Vec<Vec<Module>>) -> Result<Self> { assert!(N <= 8); if input.len() != FLOOR_COUNT as usize { return Err(anyhow!("invalid floor count")); } let mut element_map = HashMap::new(); for (floor_idx, floor) in input.into_iter().rev().enumerate() { for module in floor { let elem_map_len = element_map.len(); let value = element_map .entry(module.name()) .or_insert((elem_map_len, None, None)); let opt = match module { Module::Generator(_) => &mut value.1, Module::Microchip(_) => &mut value.2, }; if let Some(previous_floor) = opt { return Err(anyhow!( "duplicate module {:?} (floors {} and {})", module, FLOOR_COUNT as usize - floor_idx, FLOOR_COUNT as usize - *previous_floor )); } *opt = Some(floor_idx); } } if element_map.len() != N { return Err(anyhow!( "expected {} elements, got {}", N, element_map.len() )); } if !element_map .values() .all(|(_, a, b)| a.is_some() && b.is_some()) { return Err(anyhow!( "modules must be matching generator & microchip pairs" )); } let mut elements: [Element; N] = Default::default(); for (_, (elem_idx, generator_floor, microchip_floor)) in element_map { elements[elem_idx].move_generator(generator_floor.unwrap() as u8); elements[elem_idx].move_microchip(microchip_floor.unwrap() as u8); } let mut facility = Facility { elements, elevator_position: FLOOR_COUNT - 1, }; facility.normalize(); if !facility.is_safe_configuration() { return Err(anyhow!("unsafe starting conditions")); } Ok(facility) } fn normalize(&mut self) { self.elements.sort_unstable() } fn is_solved(&self) -> bool { self.elements.iter().all(|e| e.0 == 0) } fn is_safe_configuration(&self) -> bool { let mut has_generator: [bool; FLOOR_COUNT as usize] = Default::default(); let mut has_unpaired_microchip: [bool; FLOOR_COUNT as usize] = Default::default(); for elem in &self.elements { let generator_floor = elem.generator(); let microchip_floor = elem.microchip(); has_generator[elem.generator() as usize] = true; if generator_floor != microchip_floor { has_unpaired_microchip[microchip_floor as usize] = true; } } (0..FLOOR_COUNT as usize).all(|i| !has_generator[i] || !has_unpaired_microchip[i]) } fn next_configurations<'s>(&'s self) -> impl Iterator<Item = Facility<[Element; N]>> + 's { use std::iter::ExactSizeIterator; #[derive(Clone, Copy)] struct NextFloors { a: u8, b: u8, l: u8, c: u8, } impl Iterator for NextFloors { type Item = u8; fn next(&mut self) -> Option<Self::Item> { if self.c >= self.l { return None; } self.c += 1; Some(if self.c == 1 { self.a } else { self.b }) } fn size_hint(&self) -> (usize, Option<usize>) { let l = self.len(); (l, Some(l)) } } impl ExactSizeIterator for NextFloors { fn len(&self) -> usize { (self.l - self.c) as usize } } #[rustfmt::skip] let next_floors = if self.elevator_position == FLOOR_COUNT - 1 { if self.elevator_position == 0 { NextFloors { a: 0, b: 0, l: 0, c: 0 } } else { NextFloors { a: self.elevator_position - 1, b: 0, l: 1, c: 0 } } } else { if self.elevator_position == 0 { NextFloors { a: self.elevator_position + 1, b: 0, l: 1, c: 0 } } else { NextFloors { a: self.elevator_position - 1, b: self.elevator_position + 1, l: 2, c: 0 } } }; #[derive(Clone, Copy, PartialEq, Eq)] enum Moveable { Microchip(u8), Generator(u8), } let elevator_position = self.elevator_position; let moveable_items = self .elements .iter() .enumerate() .flat_map(move |(idx, elem)| { let idx = idx as u8; let mut array: ArrayVec<[Moveable; 2]> = ArrayVec::new(); unsafe {
b, f))) .filter_map(move |(a, b, next_floor)| { let mut new = self.clone(); new.elevator_position = next_floor; #[rustfmt::skip] match a { Moveable::Generator(idx) => new.elements[idx as usize].move_generator(next_floor), Moveable::Microchip(idx) => new.elements[idx as usize].move_microchip(next_floor), } if let Some(b) = b { #[rustfmt::skip] match b { Moveable::Generator(idx) => new.elements[idx as usize].move_generator(next_floor), Moveable::Microchip(idx) => new.elements[idx as usize].move_microchip(next_floor), } } if new.is_safe_configuration() { new.normalize(); Some(new) } else { None } }) } } impl Debug for Element { fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { write!(fmt, "G({})xM({})", self.generator(), self.microchip()) } } pub fn parse(s: &str) -> IResult<&str, Vec<Vec<Module>>> { use parsers::*; map_res( fold_many1( delimited( tag("The "), pair( terminated( map_res(alpha1, |s: &str| { Ok(match s { "first" => 1, "second" => 2, "third" => 3, "fourth" => 4, _ => return Err(()), }) }), tag(" floor contains "), ), alt(( map(tag("nothing relevant"), |_| Vec::new()), separated_list( alt((tag(", and "), tag(" and "), tag(", "))), alt(( map( delimited(tag("a "), alpha1, tag("-compatible microchip")), Module::Microchip, ), map( delimited(tag("a "), alpha1, tag(" generator")), Module::Generator, ), )), ), )), ), terminated(char('.'), opt(line_ending)), ), HashMap::new(), |mut acc, (floor, modules)| { acc.insert(floor, modules); acc }, ), |mut layout| { if layout.len() != FLOOR_COUNT as usize { return Err(anyhow!("expected {} floors", FLOOR_COUNT)); } let mut res = Vec::with_capacity(FLOOR_COUNT as usize); for i in 1..=FLOOR_COUNT as usize { let v = layout .get_mut(&i) .ok_or_else(|| anyhow!("expected floors 1 through {}", FLOOR_COUNT))?; res.push(Vec::new()); std::mem::swap(&mut res[i - 1], v); } Ok(res) }, )(s) } #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Hash)] pub enum Module<'s> { Generator(&'s str), Microchip(&'s str), } impl<'s> Module<'s> { fn name(&self) -> &'s str { match self { Module::Generator(s) => s, Module::Microchip(s) => s, } } } #[test] fn day11() -> Result<()> { fn solve_2(input: Vec<Vec<Module>>) -> Result<usize> { solve::<2>(input) } test_part!(parse, solve_2, "\ The first floor contains a hydrogen-compatible microchip and a lithium-compatible microchip. The second floor contains a hydrogen generator. The third floor contains a lithium generator. The fourth floor contains nothing relevant." => 11); Ok(()) }
if elem.generator() == elevator_position { array.push_unchecked(Moveable::Generator(idx)); } if elem.microchip() == elevator_position { array.push_unchecked(Moveable::Microchip(idx)); } } array.into_iter() }); let moveable_pairs = moveable_items .clone() .enumerate() .flat_map(move |(count, a)| { repeat(None) .take(1) .chain(moveable_items.clone().skip(count + 1).map(Option::Some)) .map(move |b| (a, b)) }); moveable_pairs .flat_map(move |(a, b)| next_floors.map(move |f| (a,
function_block-random_span
[ { "content": "pub trait Node = Clone + Eq + Hash;\n", "file_path": "src/astar.rs", "rank": 1, "score": 279361.34725252783 }, { "content": "fn is_free_fn(designer_nr: usize) -> impl Fn(&Vec2us) -> bool + Clone + Copy {\n\n move |p| {\n\n (p.x * p.x + 3 * p.x + 2 * p.x * p.y + p.y + ...
Rust
src/statistic/metric/ratio.rs
Uzaaft/barter-rs
08f1ddd9c622d4b7c4cddfc4520242e0c3360c6e
use crate::statistic::summary::pnl::PnLReturnSummary; use serde::{Deserialize, Serialize}; pub trait Ratio { fn init(risk_free_return: f64) -> Self; fn ratio(&self) -> f64; fn trades_per_day(&self) -> f64; fn daily(&self) -> f64 { calculate_daily(self.ratio(), self.trades_per_day()) } fn annual(&self, trading_days: u32) -> f64 { calculate_annual(self.ratio(), self.trades_per_day(), trading_days) } } #[derive(Copy, Clone, PartialEq, PartialOrd, Debug, Deserialize, Serialize)] pub struct SharpeRatio { pub risk_free_return: f64, pub trades_per_day: f64, pub sharpe_ratio_per_trade: f64, } impl Ratio for SharpeRatio { fn init(risk_free_return: f64) -> Self { Self { risk_free_return, sharpe_ratio_per_trade: 0.0, trades_per_day: 0.0, } } fn ratio(&self) -> f64 { self.sharpe_ratio_per_trade } fn trades_per_day(&self) -> f64 { self.trades_per_day } } impl SharpeRatio { pub fn update(&mut self, pnl_returns: &PnLReturnSummary) { self.trades_per_day = pnl_returns.trades_per_day; self.sharpe_ratio_per_trade = match pnl_returns.total.dispersion.std_dev == 0.0 { true => 0.0, false => { (pnl_returns.total.mean - self.risk_free_return) / pnl_returns.total.dispersion.std_dev } }; } } #[derive(Copy, Clone, PartialEq, PartialOrd, Debug, Deserialize, Serialize)] pub struct SortinoRatio { pub risk_free_return: f64, pub trades_per_day: f64, pub sortino_ratio_per_trade: f64, } impl Ratio for SortinoRatio { fn init(risk_free_return: f64) -> Self { Self { risk_free_return, trades_per_day: 0.0, sortino_ratio_per_trade: 0.0, } } fn ratio(&self) -> f64 { self.sortino_ratio_per_trade } fn trades_per_day(&self) -> f64 { self.trades_per_day } } impl SortinoRatio { pub fn update(&mut self, pnl_returns: &PnLReturnSummary) { self.trades_per_day = pnl_returns.trades_per_day; self.sortino_ratio_per_trade = match pnl_returns.losses.dispersion.std_dev == 0.0 { true => 0.0, false => { (pnl_returns.total.mean - self.risk_free_return) / pnl_returns.losses.dispersion.std_dev } }; } } #[derive(Copy, Clone, PartialEq, PartialOrd, Debug, Deserialize, Serialize)] pub struct CalmarRatio { pub risk_free_return: f64, pub trades_per_day: f64, pub calmar_ratio_per_trade: f64, } impl Ratio for CalmarRatio { fn init(risk_free_return: f64) -> Self { Self { risk_free_return, trades_per_day: 0.0, calmar_ratio_per_trade: 0.0, } } fn ratio(&self) -> f64 { self.calmar_ratio_per_trade } fn trades_per_day(&self) -> f64 { self.trades_per_day } } impl CalmarRatio { pub fn update(&mut self, pnl_returns: &PnLReturnSummary, max_drawdown: f64) { self.trades_per_day = pnl_returns.trades_per_day; self.calmar_ratio_per_trade = match max_drawdown == 0.0 { true => 0.0, false => (pnl_returns.total.mean - self.risk_free_return) / max_drawdown.abs(), }; } } pub fn calculate_daily(ratio_per_trade: f64, trades_per_day: f64) -> f64 { ratio_per_trade * trades_per_day.sqrt() } pub fn calculate_annual(ratio_per_trade: f64, trades_per_day: f64, trading_days: u32) -> f64 { calculate_daily(ratio_per_trade, trades_per_day) * (trading_days as f64).sqrt() } #[cfg(test)] mod tests { use super::*; use crate::statistic::summary::pnl::PnLReturnSummary; fn sharpe_ratio_input(count: u64, mean: f64, std_dev: f64) -> PnLReturnSummary { let mut pnl_returns = PnLReturnSummary::new(); pnl_returns.total.count = count; pnl_returns.total.mean = mean; pnl_returns.total.dispersion.std_dev = std_dev; pnl_returns } fn sortino_update_input(count: u64, mean: f64, loss_std_dev: f64) -> PnLReturnSummary { let mut pnl_returns = PnLReturnSummary::new(); pnl_returns.total.count = count; pnl_returns.total.mean = mean; pnl_returns.losses.dispersion.std_dev = loss_std_dev; pnl_returns } fn calmar_ratio_returns_input(count: u64, mean: f64) -> PnLReturnSummary { let mut pnl_returns = PnLReturnSummary::new(); pnl_returns.total.count = count; pnl_returns.total.mean = mean; pnl_returns } #[test] fn sharpe_ratio_update() { let mut sharpe = SharpeRatio::init(0.0); struct TestCase { input_return: PnLReturnSummary, expected_sharpe: f64, } let test_cases = vec![ TestCase { input_return: sharpe_ratio_input(1, 0.1, 0.0), expected_sharpe: 0.0, }, TestCase { input_return: sharpe_ratio_input(2, 0.15, 0.05), expected_sharpe: 3.0, }, TestCase { input_return: sharpe_ratio_input(3, 0.2, (1.0_f64 / 150.0_f64).sqrt()), expected_sharpe: 6.0_f64.sqrt(), }, TestCase { input_return: sharpe_ratio_input(4, 0.25, (0.0125_f64).sqrt()), expected_sharpe: 5.0_f64.sqrt(), }, TestCase { input_return: sharpe_ratio_input(5, 0.12, (0.388_f64 / 5.0_f64).sqrt()), expected_sharpe: ((3.0 * 194_f64.sqrt()) / 97.0), }, ]; for (index, test) in test_cases.into_iter().enumerate() { sharpe.update(&test.input_return); let sharpe_diff = sharpe.sharpe_ratio_per_trade - test.expected_sharpe; assert!(sharpe_diff < 1e-10, "Test case: {:?}", index); } } #[test] fn sortino_ratio_update() { let mut sortino = SortinoRatio::init(0.0); struct TestCase { input_return: PnLReturnSummary, expected_sortino: f64, } let test_cases = vec![ TestCase { input_return: sortino_update_input(1, 0.1, 0.0), expected_sortino: 0.0, }, TestCase { input_return: sortino_update_input(2, 0.15, 0.0), expected_sortino: 0.0, }, TestCase { input_return: sortino_update_input(3, 0.2, 0.0), expected_sortino: 0.0, }, TestCase { input_return: sortino_update_input(4, 0.25, 0.0), expected_sortino: 0.0, }, TestCase { input_return: sortino_update_input(5, 0.12, 0.0), expected_sortino: 0.0, }, TestCase { input_return: sortino_update_input(6, 0.0, 0.1), expected_sortino: 0.0, }, TestCase { input_return: sortino_update_input(7, -0.1, 0.12472191), expected_sortino: -0.8017837443, }, ]; for (index, test) in test_cases.into_iter().enumerate() { sortino.update(&test.input_return); let sortino_diff = sortino.sortino_ratio_per_trade - test.expected_sortino; assert!(sortino_diff < 1e-10, "Test case: {:?}", index); } } #[test] fn calmar_ratio_update() { let mut calmar = CalmarRatio::init(0.0); struct TestCase { input_return: PnLReturnSummary, input_max_dd: f64, expected_calmar: f64, } let test_cases = vec![ TestCase { input_return: calmar_ratio_returns_input(1, 0.5), input_max_dd: 0.0, expected_calmar: 0.0, }, TestCase { input_return: calmar_ratio_returns_input(2, -0.5), input_max_dd: -0.70, expected_calmar: (-0.1 / 0.7), }, TestCase { input_return: calmar_ratio_returns_input(3, 0.2), input_max_dd: -0.7, expected_calmar: (0.2 / 0.7), }, TestCase { input_return: calmar_ratio_returns_input(4, 0.5), input_max_dd: -0.7, expected_calmar: (0.5 / 0.7), }, TestCase { input_return: calmar_ratio_returns_input(5, 0.24), input_max_dd: -0.8, expected_calmar: (0.24 / 0.8), }, ]; for (index, test) in test_cases.into_iter().enumerate() { calmar.update(&test.input_return, test.input_max_dd); let calmar_diff = calmar.calmar_ratio_per_trade - test.expected_calmar; assert!(calmar_diff < 1e-10, "Test case: {:?}", index); } } #[test] fn calculate_daily_ratios() { struct TestCase { ratio_per_trade: f64, trades_per_day: f64, expected_daily: f64, } let test_cases = vec![ TestCase { ratio_per_trade: -1.0, trades_per_day: 0.1, expected_daily: -0.31622776601683794, }, TestCase { ratio_per_trade: -1.0, trades_per_day: 1.0, expected_daily: -1.0, }, TestCase { ratio_per_trade: 0.0, trades_per_day: 0.1, expected_daily: 0.0, }, TestCase { ratio_per_trade: 0.0, trades_per_day: 1.0, expected_daily: 0.0, }, TestCase { ratio_per_trade: 1.0, trades_per_day: 0.1, expected_daily: 0.31622776601683794, }, TestCase { ratio_per_trade: 1.0, trades_per_day: 1.0, expected_daily: 1.0, }, TestCase { ratio_per_trade: 100.0, trades_per_day: 0.1, expected_daily: 31.622776601683793, }, TestCase { ratio_per_trade: 100.0, trades_per_day: 1.0, expected_daily: 100.0, }, ]; for test in test_cases { let actual = calculate_daily(test.ratio_per_trade, test.trades_per_day); assert_eq!(actual, test.expected_daily) } } #[test] fn calculate_annual_ratios() { struct TestCase { ratio_per_trade: f64, trades_per_day: f64, trading_days: u32, expected_annual: f64, } let test_cases = vec![ TestCase { ratio_per_trade: -1.0, trades_per_day: 0.1, trading_days: 252, expected_annual: -5.019960159204453, }, TestCase { ratio_per_trade: -1.0, trades_per_day: 1.0, trading_days: 365, expected_annual: -19.1049731745428, }, TestCase { ratio_per_trade: 0.0, trades_per_day: 0.1, trading_days: 252, expected_annual: 0.0, }, TestCase { ratio_per_trade: 0.0, trades_per_day: 1.0, trading_days: 365, expected_annual: 0.0, }, TestCase { ratio_per_trade: 1.0, trades_per_day: 0.1, trading_days: 252, expected_annual: 5.019960159204453, }, TestCase { ratio_per_trade: 1.0, trades_per_day: 1.0, trading_days: 365, expected_annual: 19.1049731745428, }, TestCase { ratio_per_trade: 100.0, trades_per_day: 0.1, trading_days: 252, expected_annual: 501.99601592044536, }, TestCase { ratio_per_trade: 100.0, trades_per_day: 1.0, trading_days: 365, expected_annual: 1910.49731745428, }, ]; for test in test_cases { let actual = calculate_annual(test.ratio_per_trade, test.trades_per_day, test.trading_days); assert_eq!(actual, test.expected_annual) } } }
use crate::statistic::summary::pnl::PnLReturnSummary; use serde::{Deserialize, Serialize}; pub trait Ratio { fn init(risk_free_return: f64) -> Self; fn ratio(&self) -> f64; fn trades_per_day(&self) -> f64; fn daily(&self) -> f64 { calculate_daily(self.ratio(), self.trades_per_day()) } fn annual(&self, trading_days: u32) -> f64 { calculate_annual(self.ratio(), self.trades_per_day(), trading_days) } } #[derive(Copy, Clone, PartialEq, PartialOrd, Debug, Deserialize, Serialize)] pub struct SharpeRatio { pub risk_free_return: f64, pub trades_per_day: f64, pub sharpe_ratio_per_trade: f64, } impl Ratio for SharpeRatio { fn init(risk_free_return: f64) -> Self { Self { risk_free_return, sharpe_ratio_per_trade: 0.0, trades_per_day: 0.0, } } fn ratio(&self) -> f64 { self.sharpe_ratio_per_trade } fn trades_per_day(&self) -> f64 { self.trades_per_day } } impl SharpeRatio { pub fn update(&mut self, pnl_returns: &PnLReturnSummary) { self.trades_per_day = pnl_returns.trades_per_day; self.sharpe_ratio_per_trade = match pnl_returns.total.dispersion.std_dev == 0.0 { true => 0.0, false => { (pnl_returns.total.mean - self.risk_free_return) / pnl_returns.total.dispersion.std_dev } }; } } #[derive(Copy, Clone, PartialEq, PartialOrd, Debug, Deserialize, Serialize)] pub struct SortinoRatio { pub risk_free_return: f64, pub trades_per_day: f64, pub sortino_ratio_per_trade: f64, } impl Ratio for SortinoRatio { fn init(risk_free_return: f64) -> Self { Self { risk_free_return, trades_per_day: 0.0, sortino_ratio_per_trade: 0.0, } } fn ratio(&self) -> f64 { self.sortino_ratio_per_trade } fn trades_per_day(&self) -> f64 { self.trades_per_day } } impl SortinoRatio { pub fn update(&mut self, pnl_returns: &PnLReturnSummary) { self.trades_per_day = pnl_returns.trades_per_day; self.sortino_ratio_per_trade = match pnl_returns.losses.dispersion.std_dev == 0.0 { true => 0.0, false => { (pnl_returns.total.mean - self.risk_free_return) / pnl_returns.losses.dispersion.std_dev } }; } } #[derive(Copy, Clone, PartialEq, PartialOrd, Debug, Deserialize, Serialize)] pub struct CalmarRatio { pub risk_free_return: f64, pub trades_per_day: f64, pub calmar_ratio_per_trade: f64, } impl Ratio for CalmarRatio { fn init(risk_free_return: f64) -> Self { Self { risk_free_return, trades_per_day: 0.0, calmar_ratio_per_trade: 0.0, } } fn ratio(&self) -> f64 { self.calmar_ratio_per_trade } fn trades_per_day(&self) -> f64 { self.trades_per_day } } impl CalmarRatio { pub fn update(&mut self, pnl_returns: &PnLReturnSummary, max_drawdown: f64) { self.trades_per_day = pnl_returns.trades_per_day; self.calmar_ratio_per_trade = match max_drawdown == 0.0 { true => 0.0, false => (pnl_returns.total.mean - self.risk_free_return) / max_drawdown.abs(), }; } } pub fn calculate_daily(ratio_per_trade: f64, trades_per_day: f64) -> f64 { ratio_per_trade * trades_per_day.sqrt() } pub fn calculate_annual(ratio_per_trade: f64, trades_per_day: f64, trading_days: u32) -> f64 { calculate_daily(ratio_per_trade, trades_per_day) * (trading_days as f64).sqrt() } #[cfg(test)] mod tests { use super::*; use crate::statistic::summary::pnl::PnLReturnSummary; fn sharpe_ratio_input(count: u64, mean: f64, std_dev: f64) -> PnLReturnSummary { let mut pnl_returns = PnLReturnSummary::new(); pnl_returns.total.count = count; pnl_returns.total.mean = mean; pnl_returns.total.dispersion.std_dev = std_dev; pnl_returns } fn sortino_update_input(count: u64, mean: f64, loss_std_dev: f64) -> PnLReturnSummary { let mut pnl_returns = PnLReturnSummary::new(); pnl_returns.total.count = count; pnl_returns.total.mean = mean; pnl_returns.losses.dispersion.std_dev = loss_std_dev; pnl_returns } fn calmar_ratio_returns_input(count: u64, mean: f64) -> PnLReturnSummary { let mut pnl_returns = PnLReturnSummary::new(); pnl_returns.total.count = count; pnl_returns.total.mean = mean; pnl_returns } #[test] fn sharpe_ratio_update() { let mut sharpe = SharpeRatio::init(0.0); struct TestCase { input_return: PnLReturnSummary, expected_sharpe: f64, } let test_cases = vec![ TestCase { input_return: sharpe_ratio_input(1, 0.1, 0.0), expected_sharpe: 0.0, }, TestCase { input_return: sharpe_ratio_input(2, 0.15, 0.05), expected_sharpe: 3.0, }, TestCase { input_return: sharpe_ratio_input(3, 0.2, (1.0_f64 / 150.0_f64).sqrt()), expected_sharpe: 6.0_f64.sqrt(), }, TestCase { input_return: sharpe_ratio_input(4, 0.25, (0.0125_f64).sqrt()), expected_sharpe: 5.0_f64.sqrt(), }, TestCase { input_return: sharpe_ratio_input(5, 0.12, (0.388_f64 / 5.0_f64).sqrt()), expected_sharpe: ((3.0 * 194_f64.sqrt()) / 97.0), }, ]; for (index, test) in test_cases.into_iter().enumerate() { sharpe.update(&test.input_return); let sharpe_diff = sharpe.sharpe_ratio_per_trade - test.expected_sharpe; assert!(sharpe_diff < 1e-10, "Test case: {:?}", index); } } #[test] fn sortino_ratio_update() { let mut sortino = SortinoRatio::init(0.0); struct TestCase { input_return: PnLReturnSummary, expected_sortino: f64, }
#[test] fn calmar_ratio_update() { let mut calmar = CalmarRatio::init(0.0); struct TestCase { input_return: PnLReturnSummary, input_max_dd: f64, expected_calmar: f64, } let test_cases = vec![ TestCase { input_return: calmar_ratio_returns_input(1, 0.5), input_max_dd: 0.0, expected_calmar: 0.0, }, TestCase { input_return: calmar_ratio_returns_input(2, -0.5), input_max_dd: -0.70, expected_calmar: (-0.1 / 0.7), }, TestCase { input_return: calmar_ratio_returns_input(3, 0.2), input_max_dd: -0.7, expected_calmar: (0.2 / 0.7), }, TestCase { input_return: calmar_ratio_returns_input(4, 0.5), input_max_dd: -0.7, expected_calmar: (0.5 / 0.7), }, TestCase { input_return: calmar_ratio_returns_input(5, 0.24), input_max_dd: -0.8, expected_calmar: (0.24 / 0.8), }, ]; for (index, test) in test_cases.into_iter().enumerate() { calmar.update(&test.input_return, test.input_max_dd); let calmar_diff = calmar.calmar_ratio_per_trade - test.expected_calmar; assert!(calmar_diff < 1e-10, "Test case: {:?}", index); } } #[test] fn calculate_daily_ratios() { struct TestCase { ratio_per_trade: f64, trades_per_day: f64, expected_daily: f64, } let test_cases = vec![ TestCase { ratio_per_trade: -1.0, trades_per_day: 0.1, expected_daily: -0.31622776601683794, }, TestCase { ratio_per_trade: -1.0, trades_per_day: 1.0, expected_daily: -1.0, }, TestCase { ratio_per_trade: 0.0, trades_per_day: 0.1, expected_daily: 0.0, }, TestCase { ratio_per_trade: 0.0, trades_per_day: 1.0, expected_daily: 0.0, }, TestCase { ratio_per_trade: 1.0, trades_per_day: 0.1, expected_daily: 0.31622776601683794, }, TestCase { ratio_per_trade: 1.0, trades_per_day: 1.0, expected_daily: 1.0, }, TestCase { ratio_per_trade: 100.0, trades_per_day: 0.1, expected_daily: 31.622776601683793, }, TestCase { ratio_per_trade: 100.0, trades_per_day: 1.0, expected_daily: 100.0, }, ]; for test in test_cases { let actual = calculate_daily(test.ratio_per_trade, test.trades_per_day); assert_eq!(actual, test.expected_daily) } } #[test] fn calculate_annual_ratios() { struct TestCase { ratio_per_trade: f64, trades_per_day: f64, trading_days: u32, expected_annual: f64, } let test_cases = vec![ TestCase { ratio_per_trade: -1.0, trades_per_day: 0.1, trading_days: 252, expected_annual: -5.019960159204453, }, TestCase { ratio_per_trade: -1.0, trades_per_day: 1.0, trading_days: 365, expected_annual: -19.1049731745428, }, TestCase { ratio_per_trade: 0.0, trades_per_day: 0.1, trading_days: 252, expected_annual: 0.0, }, TestCase { ratio_per_trade: 0.0, trades_per_day: 1.0, trading_days: 365, expected_annual: 0.0, }, TestCase { ratio_per_trade: 1.0, trades_per_day: 0.1, trading_days: 252, expected_annual: 5.019960159204453, }, TestCase { ratio_per_trade: 1.0, trades_per_day: 1.0, trading_days: 365, expected_annual: 19.1049731745428, }, TestCase { ratio_per_trade: 100.0, trades_per_day: 0.1, trading_days: 252, expected_annual: 501.99601592044536, }, TestCase { ratio_per_trade: 100.0, trades_per_day: 1.0, trading_days: 365, expected_annual: 1910.49731745428, }, ]; for test in test_cases { let actual = calculate_annual(test.ratio_per_trade, test.trades_per_day, test.trading_days); assert_eq!(actual, test.expected_annual) } } }
let test_cases = vec![ TestCase { input_return: sortino_update_input(1, 0.1, 0.0), expected_sortino: 0.0, }, TestCase { input_return: sortino_update_input(2, 0.15, 0.0), expected_sortino: 0.0, }, TestCase { input_return: sortino_update_input(3, 0.2, 0.0), expected_sortino: 0.0, }, TestCase { input_return: sortino_update_input(4, 0.25, 0.0), expected_sortino: 0.0, }, TestCase { input_return: sortino_update_input(5, 0.12, 0.0), expected_sortino: 0.0, }, TestCase { input_return: sortino_update_input(6, 0.0, 0.1), expected_sortino: 0.0, }, TestCase { input_return: sortino_update_input(7, -0.1, 0.12472191), expected_sortino: -0.8017837443, }, ]; for (index, test) in test_cases.into_iter().enumerate() { sortino.update(&test.input_return); let sortino_diff = sortino.sortino_ratio_per_trade - test.expected_sortino; assert!(sortino_diff < 1e-10, "Test case: {:?}", index); } }
function_block-function_prefix_line
[]
Rust
bindings/wasm/src/subscriber/subscriberw.rs
Dr-Electron/streams
054bf6d1cd42ba615c43a5ca1e935e942e443f5d
use js_sys::Array; use wasm_bindgen::prelude::*; use crate::{ types::*, user::userw::*, }; use core::cell::RefCell; use iota_streams::{ app::{ futures::executor::block_on, transport::{ tangle::client::Client as ApiClient, TransportOptions, }, }, app_channels::api::{ psk_from_seed, pskid_from_psk, tangle::Subscriber as ApiSubscriber, }, core::{ prelude::{ Rc, String, }, psk::{ pskid_from_hex_str, pskid_to_hex_string, }, }, ddml::types::*, }; #[wasm_bindgen] pub struct Subscriber { subscriber: Rc<RefCell<ApiSubscriber<Rc<RefCell<ApiClient>>>>>, } #[wasm_bindgen] impl Subscriber { #[wasm_bindgen(constructor)] pub fn new(seed: String, options: SendOptions) -> Subscriber { let mut client = ApiClient::new_from_url(&options.url()); client.set_send_options(options.into()); let transport = Rc::new(RefCell::new(client)); let subscriber = Rc::new(RefCell::new(ApiSubscriber::new(&seed, transport))); Subscriber { subscriber } } pub fn from_client(client: Client, seed: String) -> Subscriber { let subscriber = Rc::new(RefCell::new(ApiSubscriber::new(&seed, client.to_inner()))); Subscriber { subscriber } } #[wasm_bindgen(catch)] pub fn import(client: Client, bytes: Vec<u8>, password: &str) -> Result<Subscriber> { block_on(ApiSubscriber::import(&bytes, password, client.to_inner())) .map(|v| Subscriber { subscriber: Rc::new(RefCell::new(v)), }) .into_js_result() } pub async fn recover(seed: String, ann_address: Address, options: SendOptions) -> Result<Subscriber> { let mut client = ApiClient::new_from_url(&options.url()); client.set_send_options(options.into()); let transport = Rc::new(RefCell::new(client)); ApiSubscriber::recover(&seed, ann_address.as_inner(), transport) .await .map(|sub| Subscriber { subscriber: Rc::new(RefCell::new(sub)), }) .into_js_result() } pub fn clone(&self) -> Subscriber { Subscriber { subscriber: self.subscriber.clone(), } } #[wasm_bindgen(catch)] pub fn channel_address(&self) -> Result<String> { self.subscriber .borrow_mut() .channel_address() .map(|addr| addr.to_string()) .ok_or("channel not subscribed") .into_js_result() } #[wasm_bindgen(catch)] pub fn get_client(&self) -> Client { Client(self.subscriber.borrow_mut().get_transport().clone()) } #[wasm_bindgen(catch)] pub fn is_multi_branching(&self) -> Result<bool> { Ok(self.subscriber.borrow_mut().is_multi_branching()) } #[wasm_bindgen(catch)] pub fn store_psk(&self, psk_seed_str: String) -> Result<String> { let psk = psk_from_seed(psk_seed_str.as_bytes()); let pskid = pskid_from_psk(&psk); let pskid_str = pskid_to_hex_string(&pskid); self.subscriber.borrow_mut().store_psk(pskid, psk).into_js_result()?; Ok(pskid_str) } #[wasm_bindgen(catch)] pub fn get_public_key(&self) -> Result<String> { Ok(public_key_to_string(self.subscriber.borrow_mut().get_public_key())) } #[wasm_bindgen(catch)] pub fn author_public_key(&self) -> Result<String> { self.subscriber .borrow_mut() .author_public_key() .ok_or("channel not registered, author's public key not found") .map(|author_pk| hex::encode(author_pk.to_bytes())) .into_js_result() } #[wasm_bindgen(catch)] pub fn is_registered(&self) -> Result<bool> { Ok(self.subscriber.borrow_mut().is_registered()) } #[wasm_bindgen(catch)] pub fn unregister(&self) -> Result<()> { self.subscriber.borrow_mut().unregister(); Ok(()) } #[wasm_bindgen(catch)] pub fn export(&self, password: &str) -> Result<Vec<u8>> { block_on(self.subscriber.borrow_mut().export(password)).into_js_result() } #[wasm_bindgen(catch)] pub async fn receive_announcement(self, link: Address) -> Result<()> { self.subscriber .borrow_mut() .receive_announcement(link.as_inner()) .await .into_js_result() } #[wasm_bindgen(catch)] pub async fn receive_keyload(self, link: Address) -> Result<bool> { self.subscriber .borrow_mut() .receive_keyload(link.as_inner()) .await .into_js_result() } #[wasm_bindgen(catch)] pub async fn receive_tagged_packet(self, link: Address) -> Result<UserResponse> { self.subscriber .borrow_mut() .receive_tagged_packet(link.as_inner()) .await .map(|(pub_bytes, masked_bytes)| { UserResponse::new(link, None, Some(Message::new(None, pub_bytes.0, masked_bytes.0))) }) .into_js_result() } #[wasm_bindgen(catch)] pub async fn receive_signed_packet(self, link: Address) -> Result<UserResponse> { self.subscriber .borrow_mut() .receive_signed_packet(link.as_inner()) .await .map(|(pk, pub_bytes, masked_bytes)| { UserResponse::new( link, None, Some(Message::new( Some(public_key_to_string(&pk)), pub_bytes.0, masked_bytes.0, )), ) }) .into_js_result() } #[wasm_bindgen(catch)] pub async fn receive_sequence(self, link: Address) -> Result<Address> { self.subscriber .borrow_mut() .receive_sequence(link.as_inner()) .await .map(Into::into) .into_js_result() } #[wasm_bindgen(catch)] pub async fn receive_msg(self, link: Address) -> Result<UserResponse> { self.subscriber .borrow_mut() .receive_msg(link.as_inner()) .await .map(|msg| { let msgs = vec![msg]; let responses = get_message_contents(msgs); responses[0].copy() }) .into_js_result() } #[wasm_bindgen(catch)] pub async fn receive_msg_by_sequence_number(self, anchor_link: Address, msg_num: u32) -> Result<UserResponse> { self.subscriber .borrow_mut() .receive_msg_by_sequence_number(anchor_link.as_inner(), msg_num) .await .map(|msg| { let msgs = vec![msg]; let response = get_message_contents(msgs); response[0].copy() }) .into_js_result() } #[wasm_bindgen(catch)] pub async fn send_subscribe(self, link: Address) -> Result<UserResponse> { self.subscriber .borrow_mut() .send_subscribe(link.as_inner()) .await .map(|link| UserResponse::new(link.into(), None, None)) .into_js_result() } #[wasm_bindgen(catch)] pub async fn send_unsubscribe(self, link: Address) -> Result<UserResponse> { self.subscriber .borrow_mut() .send_unsubscribe(link.as_inner()) .await .map(|link| UserResponse::new(link.into(), None, None)) .into_js_result() } #[wasm_bindgen(catch)] pub async fn send_tagged_packet( self, link: Address, public_payload: Vec<u8>, masked_payload: Vec<u8>, ) -> Result<UserResponse> { self.subscriber .borrow_mut() .send_tagged_packet(link.as_inner(), &Bytes(public_payload), &Bytes(masked_payload)) .await .map(|(link, seq_link)| UserResponse::new(link.into(), seq_link.map(Into::into), None)) .into_js_result() } #[wasm_bindgen(catch)] pub async fn send_signed_packet( self, link: Address, public_payload: Vec<u8>, masked_payload: Vec<u8>, ) -> Result<UserResponse> { self.subscriber .borrow_mut() .send_signed_packet(link.as_inner(), &Bytes(public_payload), &Bytes(masked_payload)) .await .map(|(link, seq_link)| UserResponse::new(link.into(), seq_link.map(Into::into), None)) .into_js_result() } #[wasm_bindgen(catch)] pub async fn sync_state(self) -> Result<()> { loop { let msgs = self.subscriber.borrow_mut().fetch_next_msgs().await; if msgs.is_empty() { break; } } Ok(()) } #[wasm_bindgen(catch)] pub async fn fetch_next_msgs(self) -> Result<Array> { let msgs = self.subscriber.borrow_mut().fetch_next_msgs().await; let payloads = get_message_contents(msgs); Ok(payloads.into_iter().map(JsValue::from).collect()) } #[wasm_bindgen(catch)] pub async fn fetch_prev_msg(self, link: Address) -> Result<UserResponse> { self.subscriber .borrow_mut() .fetch_prev_msg(link.as_inner()) .await .map(|msg| { let msgs = vec![msg]; let responses = get_message_contents(msgs); responses[0].copy() }) .into_js_result() } #[wasm_bindgen(catch)] pub async fn fetch_prev_msgs(self, link: Address, num_msgs: usize) -> Result<Array> { self.subscriber .borrow_mut() .fetch_prev_msgs(link.as_inner(), num_msgs) .await .map(|msgs| { let responses = get_message_contents(msgs); responses.into_iter().map(JsValue::from).collect() }) .into_js_result() } #[wasm_bindgen(catch)] pub fn fetch_state(&self) -> Result<Array> { self.subscriber .borrow_mut() .fetch_state() .map(|state_list| { state_list .into_iter() .map(|(id, cursor)| JsValue::from(UserState::new(id, cursor.into()))) .collect() }) .into_js_result() } #[wasm_bindgen(catch)] pub fn reset_state(self) -> Result<()> { self.subscriber.borrow_mut().reset_state().into_js_result() } pub fn remove_psk(&self, pskid_str: String) -> Result<()> { pskid_from_hex_str(&pskid_str) .and_then(|pskid| self.subscriber.borrow_mut().remove_psk(pskid).into()) .into_js_result() } }
use js_sys::Array; use wasm_bindgen::prelude::*; use crate::{ types::*, user::userw::*, }; use core::cell::RefCell; use iota_streams::{ app::{ futures::executor::block_on, transport::{ tangle::client::Client as ApiClient, TransportOptions, }, }, app_channels::api::{ psk_from_seed, pskid_from_psk, tangle::Subscriber as ApiSubscriber, }, core::{ prelude::{ Rc, String, }, psk::{ pskid_from_hex_str, pskid_to_hex_string, }, }, ddml::types::*, }; #[wasm_bindgen] pub struct Subscriber { subscriber: Rc<RefCell<ApiSubscriber<Rc<RefCell<ApiClient>>>>>, } #[wasm_bindgen] impl Subscriber { #[wasm_bindgen(constructor)] pub fn new(seed: String, options: SendOptions) -> Subscriber { let mut client = ApiClient::new_from_url(&options.url()); client.set_send_options(options.into()); let transport = Rc::new(RefCell::new(client)); let subscriber = Rc::new(RefCell::new(ApiSubscriber::new(&seed, transport))); Subscriber { subscriber } } pub fn from_client(client: Client, seed: String) -> Subscriber { let subscriber = Rc::new(RefCell::new(ApiSubscriber::new(&seed, client.to_inner()))); Subscriber { subscriber } } #[wasm_bindgen(catch)] pub fn import(client: Client, bytes: Vec<u8>, password: &str) -> Result<Subscriber> { block_on(ApiSubscriber::import(&bytes, password, client.to_inner())) .map(|v| Subscriber { subscriber: Rc::new(RefCell::new(v)), }) .into_js_result() } pub async fn recover(seed: String, ann_address: Address, options: SendOptions) -> Result<Subscriber> { let mut client = ApiClient::new_from_url(&options.url()); client.set_send_options(options.into()); let transport = Rc::new(RefCell::new(client)); ApiSubscriber::recover(&seed, ann_address.as_inner(), transport) .await .map(|sub| Subscriber { subscriber: Rc::new(RefCell::new(sub)), }) .into_js_result() } pub fn clone(&self) -> Subscriber { Subscriber { subscriber: self.subscriber.clone(), } } #[wasm_bindgen(catch)] pub fn channel_address(&self) -> Result<String> { self.subscriber .borrow_mut() .channel_address() .map(|addr| addr.to_string()) .ok_or("channel not subscribed") .into_js_result() } #[wasm_bindgen(catch)] pub fn get_client(&self) -> Client { Client(self.subscriber.borrow_mut().get_transport().clone()) } #[wasm_bindgen(catch)] pub fn is_multi_branching(&self) -> Result<bool> { Ok(self.subscriber.borrow_mut().is_multi_branching()) } #[wasm_bindgen(catch)] pub fn store_psk(&self, psk_seed_str: String) -> Result<String> { let psk = psk_from_seed(psk_seed_str.as_bytes()); let pskid = pskid_from_psk(&psk); let pskid_str = pskid_to_hex_string(&pskid); self.subscriber.borrow_mut().store_psk(pskid, psk).into_js_result()?; Ok(pskid_str) } #[wasm_bindgen(catch)] pub fn get_public_key(&self) -> Result<String> { Ok(public_key_to_string(self.subscriber.borrow_mut().get_public_key())) } #[wasm_bindgen(catch)] pub fn author_public_key(&self) -> Result<String> { self.subscriber .borrow_mut() .author_public_key() .ok_or("channel not registered, author's public key not found") .map(|author_pk| hex::encode(author_pk.to_bytes())) .into_js_result() } #[wasm_bindgen(catch)] pub fn is_registered(&self) -> Result<bool> { Ok(self.subscriber.borrow_mut().is_registered()) } #[wasm_bindgen(catch)] pub fn unregister(&self) -> Result<()> { self.subscriber.borrow_mut().unregister(); Ok(()) } #[wasm_bindgen(catch)] pub fn export(&self, password: &str) -> Result<Vec<u8>> { block_on(self.subscriber.borrow_mut().export(password)).into_js_result() } #[wasm_bindgen(catch)] pub async fn receive_announcement(self, link: Address) -> Result<()> { self.subscriber .borrow_mut() .receive_announcement(link.as_inner()) .await .into_js_result() } #[wasm_bindgen(catch)] pub async fn receive_keyload(self, link: Address) -> Result<bool> { self.subscriber .borrow_mut() .receive_keyload(link.as_inner()) .await .into_js_result() } #[wasm_bindgen(catch)] pub async fn receive_tagged_packet(self, link: Address) -> Result<UserResponse> { self.subscriber .borrow_mut() .receive_tagged_packet(link.as_inner()) .await .map(|(pub_bytes, masked_bytes)| { UserResponse::new(link, None, Some(Message::new(None, pub_bytes.0, masked_bytes.0))) }) .into_js_result() } #[wasm_bindgen(catch)] pub async fn receive_signed_packet(self, link: Address) -> Result<UserResponse> { self.subscriber .borrow_mut() .receive_signed_packet(link.as_inner()) .await .map(|(pk, pub_bytes, masked_bytes)| { UserResponse::new( link, None, Some(Message::new( Some(public_key_to_string(&pk)), pub_bytes.0, masked_bytes.0, )), ) }) .into_js_result() } #[wasm_bindgen(catch)] pub async fn receive_sequence(self, link: Address) -> Result<Address> { self.subscriber .borrow_mut() .receive_sequence(link.as_inner()) .await .map(Into::into) .into_js_result() } #[wasm_bindgen(catch)] pub async fn receive_msg(self, link: Address) -> Result<UserResponse> { self.subscriber .borrow_mut() .receive_msg(link.as_inner()) .await .map(|msg| { let msgs = vec![msg]; let responses = get_message_contents(msgs); responses[0].copy() }) .into_js_result() } #[wasm_bindgen(catch)] pub async fn receive_msg_by_sequence_number(self, anchor_link: Address, msg_num: u32) -> Result<UserResponse> { self.subscriber .borrow_mut() .receive_msg_by_sequence_number(anchor_link.as_inner(), msg_num) .await .map(|msg| { let msgs = vec![msg]; let response = get_message_contents(msgs); response[0].copy() }) .into_js_result() } #[wasm_bindgen(catch)] pub async fn send_subscribe(self, link: Address) -> Result<UserResponse> { self.subscriber .borrow_mut() .send_subscribe(link.as_inner()) .await .map(|link| UserResponse::new(link.into(), None, None)) .into_js_result() } #[wasm_bindgen(catch)] pub async fn send_unsubscribe(self, link: Address) -> Result<UserResponse> { self.subscriber .borrow_mut() .send_unsubscribe(link.as_inner()) .await .map(|link| UserResponse::new(link.into(), None, None)) .into_js_result() } #[wasm_bindgen(catch)] pub async fn send_tagged_packet( self, link: Address, public_pay
#[wasm_bindgen(catch)] pub async fn send_signed_packet( self, link: Address, public_payload: Vec<u8>, masked_payload: Vec<u8>, ) -> Result<UserResponse> { self.subscriber .borrow_mut() .send_signed_packet(link.as_inner(), &Bytes(public_payload), &Bytes(masked_payload)) .await .map(|(link, seq_link)| UserResponse::new(link.into(), seq_link.map(Into::into), None)) .into_js_result() } #[wasm_bindgen(catch)] pub async fn sync_state(self) -> Result<()> { loop { let msgs = self.subscriber.borrow_mut().fetch_next_msgs().await; if msgs.is_empty() { break; } } Ok(()) } #[wasm_bindgen(catch)] pub async fn fetch_next_msgs(self) -> Result<Array> { let msgs = self.subscriber.borrow_mut().fetch_next_msgs().await; let payloads = get_message_contents(msgs); Ok(payloads.into_iter().map(JsValue::from).collect()) } #[wasm_bindgen(catch)] pub async fn fetch_prev_msg(self, link: Address) -> Result<UserResponse> { self.subscriber .borrow_mut() .fetch_prev_msg(link.as_inner()) .await .map(|msg| { let msgs = vec![msg]; let responses = get_message_contents(msgs); responses[0].copy() }) .into_js_result() } #[wasm_bindgen(catch)] pub async fn fetch_prev_msgs(self, link: Address, num_msgs: usize) -> Result<Array> { self.subscriber .borrow_mut() .fetch_prev_msgs(link.as_inner(), num_msgs) .await .map(|msgs| { let responses = get_message_contents(msgs); responses.into_iter().map(JsValue::from).collect() }) .into_js_result() } #[wasm_bindgen(catch)] pub fn fetch_state(&self) -> Result<Array> { self.subscriber .borrow_mut() .fetch_state() .map(|state_list| { state_list .into_iter() .map(|(id, cursor)| JsValue::from(UserState::new(id, cursor.into()))) .collect() }) .into_js_result() } #[wasm_bindgen(catch)] pub fn reset_state(self) -> Result<()> { self.subscriber.borrow_mut().reset_state().into_js_result() } pub fn remove_psk(&self, pskid_str: String) -> Result<()> { pskid_from_hex_str(&pskid_str) .and_then(|pskid| self.subscriber.borrow_mut().remove_psk(pskid).into()) .into_js_result() } }
load: Vec<u8>, masked_payload: Vec<u8>, ) -> Result<UserResponse> { self.subscriber .borrow_mut() .send_tagged_packet(link.as_inner(), &Bytes(public_payload), &Bytes(masked_payload)) .await .map(|(link, seq_link)| UserResponse::new(link.into(), seq_link.map(Into::into), None)) .into_js_result() }
function_block-function_prefixed
[ { "content": "/// Create a PskId from hex string.\n\npub fn pskid_from_hex_str(hex_str: &str) -> Result<PskId> {\n\n let pskid_bytes =\n\n hex::decode(hex_str).map_err(|e| wrapped_err!(Errors::BadHexFormat(hex_str.into()), WrappedError(e)))?;\n\n try_or!(\n\n PSKID_SIZE == pskid_bytes.len(),...
Rust
src/lib.rs
knoby/printer_operating_box
fe162e9ab06d98999c451feff366f918ad14f623
#![no_std] #![deny(unsafe_code)] #![deny(missing_docs)] #![deny(warnings)] use embedded_hal::digital::v2::{InputPin, OutputPin}; pub struct OpBox<I: InputPin, O: OutputPin> { clk_pin: O, latch_pin: O, output_pin: O, input_pin: I, state_out: [bool; 16], } impl<E, I: InputPin<Error = E>, O: OutputPin<Error = E>> OpBox<I, O> { pub fn new(clk_pin: O, latch_pin: O, output_pin: O, input_pin: I) -> Result<Self, E> { let mut driver = Self { clk_pin, latch_pin, output_pin, input_pin, state_out: [false; 16], }; driver.set_get_shift_registers()?; Ok(driver) } pub fn set_d1(&mut self, state: bool) -> Result<(), E> { self.state_out[0] = state; self.set_get_shift_registers().map(|_| ()) } pub fn set_d2(&mut self, state: bool) -> Result<(), E> { self.state_out[1] = state; self.set_get_shift_registers().map(|_| ()) } pub fn set_d3(&mut self, state: bool) -> Result<(), E> { self.state_out[2] = state; self.set_get_shift_registers().map(|_| ()) } pub fn set_d4(&mut self, state: bool) -> Result<(), E> { self.state_out[3] = state; self.set_get_shift_registers().map(|_| ()) } pub fn set_d5(&mut self, state: bool) -> Result<(), E> { self.state_out[4] = state; self.set_get_shift_registers().map(|_| ()) } pub fn set_d6(&mut self, state: bool) -> Result<(), E> { self.state_out[5] = state; self.set_get_shift_registers().map(|_| ()) } pub fn set_d7(&mut self, state: bool) -> Result<(), E> { self.state_out[6] = state; self.set_get_shift_registers().map(|_| ()) } pub fn set_d8(&mut self, state: bool) -> Result<(), E> { self.state_out[7] = state; self.set_get_shift_registers().map(|_| ()) } pub fn set_d9(&mut self, state: bool) -> Result<(), E> { self.state_out[8] = state; self.set_get_shift_registers().map(|_| ()) } pub fn set_segment_display(&mut self, display: SegmentDisplay) -> Result<(), E> { use SegmentDisplay::*; let display_state = match display { One => [false, false, false, false, true, true, false], _ => [false; 7], }; self.state_out[8..].clone_from_slice(&display_state); self.set_get_shift_registers().map(|_| ()) } pub fn get_s1(&mut self) -> Result<bool, E> { self.set_get_shift_registers().map(|inputs| inputs[0]) } pub fn get_s2(&mut self) -> Result<bool, E> { self.set_get_shift_registers().map(|inputs| inputs[1]) } pub fn get_s3(&mut self) -> Result<bool, E> { self.set_get_shift_registers().map(|inputs| inputs[2]) } pub fn get_s4(&mut self) -> Result<bool, E> { self.set_get_shift_registers().map(|inputs| inputs[3]) } pub fn get_s5(&mut self) -> Result<bool, E> { self.set_get_shift_registers().map(|inputs| inputs[4]) } pub fn get_s6(&mut self) -> Result<bool, E> { self.set_get_shift_registers().map(|inputs| inputs[5]) } pub fn get_s7(&mut self) -> Result<bool, E> { self.set_get_shift_registers().map(|inputs| inputs[6]) } pub fn get_s8(&mut self) -> Result<bool, E> { self.set_get_shift_registers().map(|inputs| inputs[7]) } fn set_get_shift_registers(&mut self) -> Result<[bool; 8], E> { self.latch_pin.set_low()?; let mut inputs = [false; 8]; for (&output, input) in self.state_out[..8].iter().zip(inputs.iter_mut()) { self.clk_pin.set_low()?; if output { self.output_pin.set_high()?; } else { self.output_pin.set_low()?; } self.clk_pin.set_high()?; *input = self.input_pin.is_high()?; } for &output in self.state_out[8..].iter() { self.clk_pin.set_low()?; if output { self.output_pin.set_high()?; } else { self.output_pin.set_low()?; } self.clk_pin.set_high()?; } self.latch_pin.set_high()?; Ok(inputs) } } #[allow(missing_docs, non_camel_case_types)] #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum SegmentDisplay { Zero, One, Two, Thre, Four, Fife, Six, Six_Tail, Seven, Seven_Tail, Eight, Nine, A, b, C, c, d, E, F, G, H, h, I, J, L, n, O, o, P, q, r, S, t, U, u, y, Custom([bool; 7]), }
#![no_std] #![deny(unsafe_code)] #![deny(missing_docs)] #![deny(warnings)] use embedded_hal::digital::v2::{InputPin, OutputPin}; pub struct OpBox<I: InputPin, O: OutputPin> { clk_pin: O, latch_pin: O, output_pin: O, input_pin: I, state_out: [bool; 16], } impl<E, I: InputPin<Error = E>, O: OutputPin<Error = E>> OpBox<I, O> { pub fn new(clk_pin: O, latch_pin: O, output_pin: O, input_pin: I) -> Result<Self, E> { let mut driver = Self { clk_pin, latch_pin, output_pin, input_pin, state_out: [false; 16], }; driver.set_get_shift_registers()?; Ok(driver) } pub fn set_d1(&mut self, state: bool) -> Result<(), E> { self.state_out[0] = state; self.set_get_shift_registers().map(|_| ()) } pub fn set_d2(&mut self, state: bool) -> Result<(), E> { self.state_out[1] = state; self.set_get_shift_registers().map(|_| ()) } pub fn set_d3(&mut self, state: bool) -> Result<(), E> { self.state_out[2] = state; self.set_get_shift_registers().map(|_| ()) } pub fn set_d4(&mut self, state: bool) -> Result<(), E> { self.state_out[3] = state; self.set_get_shift_registers().map(|_| ()) } pub fn set_d5(&mut self, state: bool) -> Result<(), E> { self.state_out[4] = state; self.set_get_shift_registers().map(|_| ()) } pub fn set_d6(&mut self, state: bool) -> Result<(), E> { self.state_out[5] = state; self.set_get_shift_registers().map(|_| ()) } pub fn set_d7(&mut self, state: bool) -> Result<(), E> { self.state_out[6] = state; self.set_get_shift_registers().map(|_| ()) } pub fn set_d8(&mut self, state: bool) -> Result<(), E> { self.state_out[7] = state; self.set_get_shift_registers().map(|_| ()) } pub fn set_d9(&mut self, state: bool) -> Result<(), E> { self.state_out[8] = state; self.set_get_shift_registers().map(|_| ()) } pub fn set_segment_display(&mut self, display: SegmentDisplay) -> Result<(), E> { use SegmentDisplay::*; let display_state = match display { One => [false, false, false, false, true, tru
pub fn get_s1(&mut self) -> Result<bool, E> { self.set_get_shift_registers().map(|inputs| inputs[0]) } pub fn get_s2(&mut self) -> Result<bool, E> { self.set_get_shift_registers().map(|inputs| inputs[1]) } pub fn get_s3(&mut self) -> Result<bool, E> { self.set_get_shift_registers().map(|inputs| inputs[2]) } pub fn get_s4(&mut self) -> Result<bool, E> { self.set_get_shift_registers().map(|inputs| inputs[3]) } pub fn get_s5(&mut self) -> Result<bool, E> { self.set_get_shift_registers().map(|inputs| inputs[4]) } pub fn get_s6(&mut self) -> Result<bool, E> { self.set_get_shift_registers().map(|inputs| inputs[5]) } pub fn get_s7(&mut self) -> Result<bool, E> { self.set_get_shift_registers().map(|inputs| inputs[6]) } pub fn get_s8(&mut self) -> Result<bool, E> { self.set_get_shift_registers().map(|inputs| inputs[7]) } fn set_get_shift_registers(&mut self) -> Result<[bool; 8], E> { self.latch_pin.set_low()?; let mut inputs = [false; 8]; for (&output, input) in self.state_out[..8].iter().zip(inputs.iter_mut()) { self.clk_pin.set_low()?; if output { self.output_pin.set_high()?; } else { self.output_pin.set_low()?; } self.clk_pin.set_high()?; *input = self.input_pin.is_high()?; } for &output in self.state_out[8..].iter() { self.clk_pin.set_low()?; if output { self.output_pin.set_high()?; } else { self.output_pin.set_low()?; } self.clk_pin.set_high()?; } self.latch_pin.set_high()?; Ok(inputs) } } #[allow(missing_docs, non_camel_case_types)] #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum SegmentDisplay { Zero, One, Two, Thre, Four, Fife, Six, Six_Tail, Seven, Seven_Tail, Eight, Nine, A, b, C, c, d, E, F, G, H, h, I, J, L, n, O, o, P, q, r, S, t, U, u, y, Custom([bool; 7]), }
e, false], _ => [false; 7], }; self.state_out[8..].clone_from_slice(&display_state); self.set_get_shift_registers().map(|_| ()) }
function_block-function_prefixed
[ { "content": "### Printer Operating Box\n\n\n\nThis operating panel of an old printer consists of 10 LEDs (green and orange) a 7 segment led display and 8 buttions. \n\nThe LEDs and buttons are controlled with 8bit shift registers (74hc595 and 74hc169). The pinout if the following:\n\n\n\n- Clock\n\n- Latch\n\n...
Rust
strict_encoding/derive/src/derive.rs
zkao/rust-lnpbp
5bbbd2291d35408550eae62bcbd235308070cb02
use proc_macro2::TokenStream as TokenStream2; use syn::spanned::Spanned; use syn::{ Data, DataEnum, DataStruct, DeriveInput, Error, Fields, Ident, Index, Result, }; use crate::util::get_encoding_crate; pub(crate) fn encode_inner(input: DeriveInput) -> Result<TokenStream2> { match input.data { Data::Struct(ref data) => encode_inner_struct(&input, data), Data::Enum(ref data) => encode_inner_enum(&input, data), Data::Union(_) => Err(Error::new_spanned( &input, "Deriving StrictEncode is not supported in unions", )), } } pub(crate) fn decode_inner(input: DeriveInput) -> Result<TokenStream2> { match input.data { Data::Struct(ref data) => decode_inner_struct(&input, data), Data::Enum(ref data) => decode_inner_enum(&input, data), Data::Union(_) => Err(Error::new_spanned( &input, "Deriving StrictDecode is not supported in unions", )), } } fn encode_inner_struct( input: &DeriveInput, data: &DataStruct, ) -> Result<TokenStream2> { let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let ident_name = &input.ident; let import = get_encoding_crate(input); let recurse = match data.fields { Fields::Named(ref fields) => fields .named .iter() .map(|f| { let name = &f.ident; quote_spanned! { f.span() => len += self.#name.strict_encode(&mut e)?; } }) .collect(), Fields::Unnamed(ref fields) => fields .unnamed .iter() .enumerate() .map(|(i, f)| { let index = Index::from(i); quote_spanned! { f.span() => len += self.#index.strict_encode(&mut e)?; } }) .collect(), Fields::Unit => { vec![] } }; let inner = match recurse.len() { 0 => quote! { Ok(0) }, _ => quote! { let mut len = 0; #( #recurse )* Ok(len) }, }; Ok(quote! { #[allow(unused_qualifications)] impl #impl_generics #import::StrictEncode for #ident_name #ty_generics #where_clause { #[inline] fn strict_encode<E: ::std::io::Write>(&self, mut e: E) -> Result<usize, #import::Error> { use #import::StrictEncode; #inner } } }) } fn decode_inner_struct( input: &DeriveInput, data: &DataStruct, ) -> Result<TokenStream2> { let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let ident_name = &input.ident; let import = get_encoding_crate(input); let inner = match data.fields { Fields::Named(ref fields) => { let recurse: Vec<TokenStream2> = fields .named .iter() .map(|f| { let name = &f.ident; quote_spanned! { f.span() => #name: #import::StrictDecode::strict_decode(&mut d)?, } }) .collect(); quote! { Self { #( #recurse )* } } } Fields::Unnamed(ref fields) => { let recurse: Vec<TokenStream2> = fields .unnamed .iter() .map(|f| { quote_spanned! { f.span() => #import::StrictDecode::strict_decode(&mut d)?, } }) .collect(); quote! { Self ( #( #recurse )* ) } } Fields::Unit => { quote! { Self() } } }; Ok(quote! { #[allow(unused_qualifications)] impl #impl_generics #import::StrictDecode for #ident_name #ty_generics #where_clause { #[inline] fn strict_decode<D: ::std::io::Read>(mut d: D) -> Result<Self, #import::Error> { use #import::StrictDecode; Ok(#inner) } } }) } fn encode_inner_enum( input: &DeriveInput, data: &DataEnum, ) -> Result<TokenStream2> { let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let ident_name = &input.ident; let import = get_encoding_crate(input); let mut inner: Vec<TokenStream2> = none!(); for (idx, variant) in data.variants.iter().enumerate() { let idx = idx as u8; let code = match variant.fields { Fields::Named(ref fields) => { let ident = &variant.ident; let f = fields .named .iter() .map(|f| { f.ident.as_ref().expect("named fields are always named") }) .collect::<Vec<_>>(); quote_spanned! { fields.span() => Self::#ident { #( #f ),* } => { len += #idx.strict_encode(&mut e)?; #( len += #f.strict_encode(&mut e)?; )* } } } Fields::Unnamed(ref fields) => { let ident = &variant.ident; let f = fields .unnamed .iter() .enumerate() .map(|(i, _)| { Ident::new(&format!("_{}", i), variant.span()) }) .collect::<Vec<_>>(); quote_spanned! { fields.span() => Self::#ident ( #( #f ),* ) => { len += #idx.strict_encode(&mut e)?; #( len += #f.strict_encode(&mut e)?; )* } } } Fields::Unit => { let ident = &variant.ident; quote_spanned! { variant.span() => Self::#ident => { len += #idx.strict_encode(&mut e)?; } } } }; inner.push(code); } let inner = match inner.len() { 0 => quote! { Ok(0) }, _ => quote! { let mut len = 0; match self { #( #inner )* } Ok(len) }, }; Ok(quote! { #[allow(unused_qualifications)] impl #impl_generics #import::StrictEncode for #ident_name #ty_generics #where_clause { #[inline] fn strict_encode<E: ::std::io::Write>(&self, mut e: E) -> Result<usize, #import::Error> { use #import::StrictEncode; #inner } } }) } fn decode_inner_enum( input: &DeriveInput, data: &DataEnum, ) -> Result<TokenStream2> { let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let ident_name = &input.ident; let import = get_encoding_crate(input); let mut inner: Vec<TokenStream2> = none!(); for (idx, variant) in data.variants.iter().enumerate() { let idx = idx as u8; let code = match variant.fields { Fields::Named(ref fields) => { let ident = &variant.ident; let f = fields .named .iter() .map(|f| { f.ident.as_ref().expect("named fields are always named") }) .collect::<Vec<_>>(); quote_spanned! { fields.span() => #idx => { Self::#ident { #( #f: StrictDecode::strict_decode(&mut d)?, )* } } } } Fields::Unnamed(ref fields) => { let ident = &variant.ident; let f = fields .unnamed .iter() .enumerate() .map(|(i, _)| Index::from(i)) .collect::<Vec<_>>(); quote_spanned! { fields.span() => #idx => { Self::#ident { #( #f: StrictDecode::strict_decode(&mut d)?, )* } } } } Fields::Unit => { let ident = &variant.ident; quote_spanned! { variant.span() => #idx => { Self::#ident } } } }; inner.push(code); } let inner = match inner.len() { 0 => quote! { Ok(0) }, _ => quote! { match u8::strict_decode(&mut d)? { #( #inner )* other => Err(#import::Error::EnumValueNotKnown(stringify!(#ident_name).to_owned(), other))? } }, }; Ok(quote! { #[allow(unused_qualifications)] impl #impl_generics #import::StrictDecode for #ident_name #ty_generics #where_clause { #[inline] fn strict_decode<D: ::std::io::Read>(mut d: D) -> Result<Self, #import::Error> { use #import::StrictDecode; Ok(#inner) } } }) }
use proc_macro2::TokenStream as TokenStream2; use syn::spanned::Spanned; use syn::{ Data, DataEnum, DataStruct, DeriveInput, Error, Fields, Ident, Index, Result, }; use crate::util::get_encoding_crate; pub(crate) fn encode_inner(input: DeriveInput) -> Result<TokenStream2> { match input.data { Data::Struct(ref data) => encode_inner_struct(&input, data), Data::Enum(ref data) => encode_inner_enum(&input, data), Data::Union(_) => Err(Error::new_spanned( &input, "Deriving StrictEncode is not supported in unions", )), } } pub(crate) fn decode_inner(input: DeriveInput) -> Result<TokenStream2> { match input.data { Data::Struct(ref data) => decode_inner_struct(&input, data), Data::Enum(ref data) => decode_inner_enum(&input, data), Data::Union(_) => Err(Error::new_spanned( &input, "Deriving StrictDecode is not supported in unions", )), } } fn encode_inner_struct( input: &DeriveInput, data: &DataStruct, ) -> Result<TokenStream2> { let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let ident_name = &input.ident; let import = get_encoding_crate(input); let recurse = match data.fields { Fields::Named(ref fields) => fields .named .iter() .map(|f| { let name = &f.ident; quote_spanned! { f.span() => len += self.#name.strict_encode(&mut e)?; } }) .collect(), Fields::Unnamed(ref fields) => fields .unnamed .iter() .enumerate() .map(|(i, f)| { let index = Index::from(i); quote_spanned! { f.span() => len += self.#index.strict_encode(&mut e)?; } }) .collect(), Fields::Unit => { vec![] } }; let inner = match recurse.len() { 0 => quote! { Ok(0) }, _ => quote! { let mut len = 0; #( #recurse )* Ok(len) }, }; Ok(quote! { #[allow(unused_qualifications)] impl #impl_generics #import::StrictEncode for #ident_name #ty_generics #where_clause { #[inline] fn strict_encode<E: ::std::io::Write>(&self, mut e: E) -> Result<usize, #import::Error> { use #import::StrictEncode; #inner } } }) } fn decode_inner_struct( input: &DeriveInput, data: &DataStruct, ) -> Result<TokenStream2> { let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let ident_name = &input.ident; let import = get_encoding_crate(input); let inner = match data.fields { Fields::Named(ref fields) => { let recurse: Vec<TokenStream2> = fields .named .iter() .map(|f| { let name = &f.ident; quote_spanned! { f.span() => #name: #import::StrictDecode::strict_decode(&mut d)?, } }) .collect(); quote! { Self { #( #recurse )* } } } Fields::Unnamed(ref fields) => { let recurse: Vec<TokenStream2> = fields .unnamed .iter() .map(|f| { quote_spanned! { f.span() => #import::StrictDecode::strict_decode(&mut d)?, } }) .collect(); quote! { Self ( #( #recurse )* ) } } Fields::Unit => { quote! { Self() } } }; Ok(quote! { #[allow(unused_qualifications)] impl #impl_
fn encode_inner_enum( input: &DeriveInput, data: &DataEnum, ) -> Result<TokenStream2> { let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let ident_name = &input.ident; let import = get_encoding_crate(input); let mut inner: Vec<TokenStream2> = none!(); for (idx, variant) in data.variants.iter().enumerate() { let idx = idx as u8; let code = match variant.fields { Fields::Named(ref fields) => { let ident = &variant.ident; let f = fields .named .iter() .map(|f| { f.ident.as_ref().expect("named fields are always named") }) .collect::<Vec<_>>(); quote_spanned! { fields.span() => Self::#ident { #( #f ),* } => { len += #idx.strict_encode(&mut e)?; #( len += #f.strict_encode(&mut e)?; )* } } } Fields::Unnamed(ref fields) => { let ident = &variant.ident; let f = fields .unnamed .iter() .enumerate() .map(|(i, _)| { Ident::new(&format!("_{}", i), variant.span()) }) .collect::<Vec<_>>(); quote_spanned! { fields.span() => Self::#ident ( #( #f ),* ) => { len += #idx.strict_encode(&mut e)?; #( len += #f.strict_encode(&mut e)?; )* } } } Fields::Unit => { let ident = &variant.ident; quote_spanned! { variant.span() => Self::#ident => { len += #idx.strict_encode(&mut e)?; } } } }; inner.push(code); } let inner = match inner.len() { 0 => quote! { Ok(0) }, _ => quote! { let mut len = 0; match self { #( #inner )* } Ok(len) }, }; Ok(quote! { #[allow(unused_qualifications)] impl #impl_generics #import::StrictEncode for #ident_name #ty_generics #where_clause { #[inline] fn strict_encode<E: ::std::io::Write>(&self, mut e: E) -> Result<usize, #import::Error> { use #import::StrictEncode; #inner } } }) } fn decode_inner_enum( input: &DeriveInput, data: &DataEnum, ) -> Result<TokenStream2> { let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let ident_name = &input.ident; let import = get_encoding_crate(input); let mut inner: Vec<TokenStream2> = none!(); for (idx, variant) in data.variants.iter().enumerate() { let idx = idx as u8; let code = match variant.fields { Fields::Named(ref fields) => { let ident = &variant.ident; let f = fields .named .iter() .map(|f| { f.ident.as_ref().expect("named fields are always named") }) .collect::<Vec<_>>(); quote_spanned! { fields.span() => #idx => { Self::#ident { #( #f: StrictDecode::strict_decode(&mut d)?, )* } } } } Fields::Unnamed(ref fields) => { let ident = &variant.ident; let f = fields .unnamed .iter() .enumerate() .map(|(i, _)| Index::from(i)) .collect::<Vec<_>>(); quote_spanned! { fields.span() => #idx => { Self::#ident { #( #f: StrictDecode::strict_decode(&mut d)?, )* } } } } Fields::Unit => { let ident = &variant.ident; quote_spanned! { variant.span() => #idx => { Self::#ident } } } }; inner.push(code); } let inner = match inner.len() { 0 => quote! { Ok(0) }, _ => quote! { match u8::strict_decode(&mut d)? { #( #inner )* other => Err(#import::Error::EnumValueNotKnown(stringify!(#ident_name).to_owned(), other))? } }, }; Ok(quote! { #[allow(unused_qualifications)] impl #impl_generics #import::StrictDecode for #ident_name #ty_generics #where_clause { #[inline] fn strict_decode<D: ::std::io::Read>(mut d: D) -> Result<Self, #import::Error> { use #import::StrictDecode; Ok(#inner) } } }) }
generics #import::StrictDecode for #ident_name #ty_generics #where_clause { #[inline] fn strict_decode<D: ::std::io::Read>(mut d: D) -> Result<Self, #import::Error> { use #import::StrictDecode; Ok(#inner) } } }) }
function_block-function_prefixed
[ { "content": "/// Convenience method for strict encoding of data structures implementing\n\n/// [StrictEncode] into a byte vector.\n\npub fn strict_serialize<T>(data: &T) -> Result<Vec<u8>, Error>\n\nwhere\n\n T: StrictEncode,\n\n{\n\n let mut encoder = io::Cursor::new(vec![]);\n\n data.strict_encode(&...
Rust
rustbasic/src/hello_world/display.rs
rustshby/rustloveu
96af1750a2e7670e605aa2bf69b963fb8e71f97c
use std::fmt; #[allow(dead_code)] struct DisplayPrintable(i32); impl fmt::Display for DisplayPrintable { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.0) } } #[allow(dead_code)] #[derive(Debug)] struct MinMax(i64, i64); impl fmt::Display for MinMax { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "({},{})", self.0, self.1) } } #[allow(dead_code)] #[derive(Debug)] struct Point2D { x: f64, y: f64, } impl fmt::Display for Point2D { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "x: {}, y: {}", self.x, self.y) } } #[allow(dead_code)] #[derive(Debug)] struct List(Vec<i32>); impl fmt::Display for List { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let vec = &self.0; write!(f, "[")?; for (count, v) in vec.iter().enumerate() { if count != 0 { write!(f, ", ")?; } write!(f, "{}", v)?; } write!(f, "]") } } #[allow(dead_code)] #[derive(Debug)] struct City { name: &'static str, latitude: f32, longitude: f32, } impl fmt::Display for City { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let lat_c = if self.latitude >= 0.0 { 'N' } else { 'S' }; let lon_c = if self.longitude >= 0.0 { 'E' } else { 'W' }; write!( f, "{name}: {lat:.3}°{latc} {lon:.3}°{lonc}", name = self.name, lat = self.latitude.abs(), latc = lat_c, lon = self.longitude.abs(), lonc = lon_c ) } } #[allow(dead_code)] #[derive(Debug)] struct Color { red: u8, green: u8, blue: u8, } impl fmt::Display for Color { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "RGB ({red}, {green}, {blue}) {rgb}", red = self.red, green = self.green, blue = self.blue, rgb = format!("0x{:02X}{:02X}{:02X}", self.red, self.green, self.blue) ) } } #[allow(dead_code)] pub fn display_show() { let minmax = MinMax(0, 14); println!("Compare structures:"); println!("Display: {}", minmax); println!("Debug: {:?}", minmax); let point = Point2D { x: 3.3, y: 7.2 }; println!("Compare points:"); println!("Display: {}", point); println!("Debug: {:?}", point); println!("Debug Beauti: {:#?}", point); let v = List(vec![1, 2, 3]); println!("Compare List:"); println!("Display: {}", v); println!("Debug: {:?}", v); println!("Compare cities:"); for city in [ City { name: "Dublin", latitude: 53.347778, longitude: -6.259722 }, City { name: "Oslo", latitude: 59.95, longitude: 10.75 }, City { name: "Vancouver", latitude: 49.25, longitude: -123.1 }, ].iter() { println!("Display: {}", *city); println!("Debug: {:?}", *city); } println!("Compare colors:"); for color in [ Color { red: 128, green: 255, blue: 90 }, Color { red: 0, green: 3, blue: 254 }, Color { red: 0, green: 0, blue: 0 }, ].iter() { println!("Display: {}", *color); println!("Debug: {:?}", *color); } }
use std::fmt; #[allow(dead_code)] struct DisplayPrintable(i32); impl fmt::Display for DisplayPrintable { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.0) } } #[allow(dead_code)] #[derive(Debug)] struct MinMax(i64, i64); impl fmt::Display for MinMax { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "({},{})", self.0, self.1) } } #[allow(dead_code)] #[derive(Debug)] struct Point2D { x: f64, y: f64, } impl fmt::Display for Point2D { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "x: {}, y: {}", self.x, self.y) } } #[allow(dead_code)] #[derive(Debug)] struct List(Vec<i32>); impl fmt::Display for List { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let vec = &self.0; write!(f, "[")?; for (count, v) in vec.iter().enumerate() { if count != 0 { write!(f, ", ")?; } write!(f, "{}", v)?; } write!(f, "]") } } #[allow(dead_code)] #[derive(Debug)] struct City { name: &'static str, latitude: f32, longitude: f32, } impl fmt::Display for City { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let lat_c = if self.latitude >= 0.0 { 'N' } else { 'S' }; let lon_c = if self.longitude >= 0.0 { 'E' } else { 'W' }; write!( f, "{name}: {lat:.3}°{latc} {lon:.3}°{lonc}", name = self.name, lat = self.latitude.abs(), latc = lat_c, lon = self.longitude.abs(), lonc = lon_c ) } } #[allow(dead_code)] #[derive(Debug)] struct Color { red: u8, green: u8, blue: u8, } impl fmt::Display for Color { fn fmt(&self, f: &mu
ed, self.green, self.blue) ) } } #[allow(dead_code)] pub fn display_show() { let minmax = MinMax(0, 14); println!("Compare structures:"); println!("Display: {}", minmax); println!("Debug: {:?}", minmax); let point = Point2D { x: 3.3, y: 7.2 }; println!("Compare points:"); println!("Display: {}", point); println!("Debug: {:?}", point); println!("Debug Beauti: {:#?}", point); let v = List(vec![1, 2, 3]); println!("Compare List:"); println!("Display: {}", v); println!("Debug: {:?}", v); println!("Compare cities:"); for city in [ City { name: "Dublin", latitude: 53.347778, longitude: -6.259722 }, City { name: "Oslo", latitude: 59.95, longitude: 10.75 }, City { name: "Vancouver", latitude: 49.25, longitude: -123.1 }, ].iter() { println!("Display: {}", *city); println!("Debug: {:?}", *city); } println!("Compare colors:"); for color in [ Color { red: 128, green: 255, blue: 90 }, Color { red: 0, green: 3, blue: 254 }, Color { red: 0, green: 0, blue: 0 }, ].iter() { println!("Display: {}", *color); println!("Debug: {:?}", *color); } }
t fmt::Formatter) -> fmt::Result { write!( f, "RGB ({red}, {green}, {blue}) {rgb}", red = self.red, green = self.green, blue = self.blue, rgb = format!("0x{:02X}{:02X}{:02X}", self.r
function_block-random_span
[]
Rust
src/resolver.rs
htynkn/wkavu
9eae66cd8b2ddcb7f719b03675912856b28740ce
use std::time::Duration; use anyhow::Result; use async_trait::async_trait; use headless_chrome::{Browser, Element}; use log::info; use magnet_url::Magnet; use rbatis::crud::CRUD; use regex::Regex; use scraper::{Html, Selector}; use serde::Deserialize; use serde::Serialize; use thiserror::Error; use crate::global; use crate::model::{Tv, TvSeed}; #[derive(Debug)] pub struct Data { pub(crate) name: String, pub(crate) url: String, pub(crate) ep: i64, } impl Data { fn new(name: &str, url: &str) -> Self { Data { name: name.to_string(), url: url.to_string(), ep: -1, } } } pub struct Resolver {} impl Resolver { pub fn new() -> Self { Resolver {} } pub async fn fetch_by_tv(&self, tv_id: i64) { let wrapper = global::RB.new_wrapper().eq("id", tv_id); let tv: Option<Tv> = global::RB.fetch_by_wrapper(wrapper).await.unwrap(); if tv.is_some() { let tv = tv.unwrap(); let resolver = DefaultResolver::new(); let data = resolver.fetch(&tv).await.unwrap(); let data = resolver.normalize(&tv, data).await.unwrap(); info!("find {:?} for tv:{:?}", data, tv); if data.len() > 0 { let wrapper = global::RB.new_wrapper().eq("tv_id", tv.id.unwrap()); let delete_count = global::RB .remove_by_wrapper::<TvSeed>(wrapper) .await .unwrap(); info!("delete seed for tv count:{}", delete_count); let tv_id = tv.id.unwrap(); for d in data { let seed = TvSeed { id: None, tv_id: Some(tv_id), ep: Some(d.ep), url: Some(d.url), name: Some(d.name), }; global::RB.save(&seed, &[]).await; } } } else { log::error!("found find movie with id:{}", tv_id); } } } fn extra_ep(name: &str) -> Result<i64> { let re = Regex::new(r"第(\d+)集").unwrap(); let option = re.captures(name); if option.is_some() { let captures = option.unwrap(); if captures.len() > 0 { let result: i64 = captures[1].parse().unwrap(); return Ok(result); } } Err(ResolveError::EpParseFailure(name.to_string()).into()) } #[async_trait] trait CommonResolver { fn new() -> Self; async fn fetch(&self, tv: &Tv) -> Result<Vec<Data>>; async fn normalize(&self, tv: &Tv, datas: Vec<Data>) -> Result<Vec<Data>>; } #[derive(Error, Debug)] pub enum ResolveError { #[error("Can't parse ep for name: {0}")] EpParseFailure(String), } #[derive(Debug, Serialize, Deserialize)] pub struct ResolverDefine { id: String, name: String, domains: Vec<String>, timeout: u64, search: ResolverSearchDefine, } #[derive(Debug, Serialize, Deserialize)] pub struct ResolverRowSelectorDefine { attr: String, } #[derive(Debug, Serialize, Deserialize)] pub struct ResolverRowsDefine { selector: String, title: ResolverRowSelectorDefine, url: ResolverRowSelectorDefine, } #[derive(Debug, Serialize, Deserialize)] pub struct ResolverSearchDefine { wait: String, rows: ResolverRowsDefine, } #[derive(RustEmbed)] #[folder = "define/"] struct Define; pub struct DefaultResolver { pub defines: Vec<ResolverDefine>, } #[async_trait] impl CommonResolver for DefaultResolver { fn new() -> Self { let mut defines = vec![]; for file in Define::iter() { let yaml = Define::get(file.as_ref()).unwrap(); let yaml_content = std::str::from_utf8(yaml.data.as_ref()); let define: ResolverDefine = serde_yaml::from_str(yaml_content.unwrap()).unwrap(); info!("load config for {}", define.id); defines.push(define); } DefaultResolver { defines } } async fn fetch(&self, tv: &Tv) -> Result<Vec<Data>> { let url = tv.url.as_ref().unwrap(); let selected_define = self.defines.iter().find(|d| { for domain in &d.domains { if url.starts_with(domain) { return true; } } return false; }); let selected_define = selected_define.unwrap(); info!("starting fetch..."); let mut data = vec![]; let browser = Browser::default().unwrap(); let tab = browser.wait_for_initial_tab().unwrap(); info!("browser tab is ready"); tab.navigate_to(&url).unwrap(); tab.wait_for_element_with_custom_timeout( &selected_define.search.wait, Duration::from_secs(selected_define.timeout), ) .unwrap(); info!("waiting for special button"); let root_div: Element = tab.wait_for_element("body").unwrap(); let html = root_div .call_js_fn("function() { return this.innerHTML;}", true) .unwrap() .value .unwrap(); let document = Html::parse_document(html.as_str().unwrap()); info!("get doc object"); let selector = Selector::parse(&selected_define.search.rows.selector).unwrap(); let list = document.select(&selector); for item in list { let title = item.value().attr(&selected_define.search.rows.title.attr); let url = item.value().attr(&selected_define.search.rows.url.attr); data.push(Data::new(title.unwrap(), url.unwrap())); } Ok(data) } async fn normalize(&self, tv: &Tv, datas: Vec<Data>) -> Result<Vec<Data>> { Ok(datas .into_iter() .map(|d| { let clean_up_name = str::replace( &str::replace(&d.name, "HD1080p", "[HDTV-1080p]"), ".mp4", "", ); let mut magneturl = Magnet::new(&d.url).unwrap(); magneturl.tr.clear(); magneturl.dn = None; let ep = extra_ep(&clean_up_name).expect("can't extra ep"); let clean_up_name = if ep > 0 { format!( "{} S01E{} - {} - [chinese] - {} - Domp4", tv.tvname.as_ref().unwrap(), ep, ep, &clean_up_name ) } else { clean_up_name }; Data { ep, name: clean_up_name, url: magneturl.to_string(), } }) .collect()) } } #[cfg(test)] mod tests { use std::fs; use super::*; #[test] fn test_parse_yml() { let paths = fs::read_dir("./define").unwrap(); for path in paths { let result = std::fs::read_to_string(path.unwrap().path()).unwrap(); let define: ResolverDefine = serde_yaml::from_str(&result).unwrap(); println!("Define:{:?}", define); } } #[test] fn test_load() { let _resolver = DefaultResolver::new(); } }
use std::time::Duration; use anyhow::Result; use async_trait::async_trait; use headless_chrome::{Browser, Element}; use log::info; use magnet_url::Magnet; use rbatis::crud::CRUD; use regex::Regex; use scraper::{Html, Selector}; use serde::Deserialize; use serde::Serialize; use thiserror::Error; use crate::global; use crate::model::{Tv, TvSeed}; #[derive(Debug)] pub struct Data { pub(crate) name: String, pub(crate) url: String, pub(crate) ep: i64, } impl Data { fn new(name: &str, url: &str) -> Self { Data { name: name.to_string(), url: url.to_string(), ep: -1, } } } pub struct Resolver {} impl Resolver { pub fn new() -> Self { Resolver {} } pub async fn fetch_by_tv(&self, tv_id: i64) { let wrapper = global::RB.new_wrapper().eq("id", tv_id); let tv: Option<Tv> = global::RB.fetch_by_wrapper(wrapper).await.unwrap(); if tv.is_some() { let tv = tv.unwrap(); let resolver = DefaultResolver::new(); let data = resolver.fetch(&tv).await.unwrap(); let data = resolver.normalize(&tv, data).await.unwrap(); info!("find {:?} for tv:{:?}", data, tv); if data.len() > 0 { let wrapper = global::RB.new_wrapper().eq("tv_id", tv.id.unwrap()); let delete_count = global::RB .remove_by_wrapper::<TvSeed>(wrapper) .await .unwrap(); info!("delete seed for tv count:{}", delete_count); let tv_id = tv.id.unwrap(); for d in data { let seed = TvSeed { id: None, tv_id: Some(tv_id), ep: Some(d.ep), url: Some(d.url), name: Some(d.name), }; global::RB.save(&seed, &[]).await; } } } else { log::error!("found find movie with id:{}", tv_id); } } } fn extra_ep(name: &str) -> Result<i64> { let re = Regex::new(r"第(\d+)集").unwrap(); let option = re.captures(name); if option.is_some() { let captures = option.unwrap(); if captures.len() > 0 { let result: i64 = captures[1].parse().unwrap(); return Ok(result); } } Err(ResolveError::EpParseFailure(name.to_string()).into()) } #[async_trait] trait CommonResolver { fn new() -> Self; async fn fetch(&self, tv: &Tv) -> Result<Vec<Data>>; async fn normalize(&self, tv: &Tv, datas: Vec<Data>) -> Result<Vec<Data>>; } #[derive(Error, Debug)] pub enum ResolveError { #[error("Can't parse ep for name: {0}")] EpParseFailure(String), } #[derive(Debug, Serialize, Deserialize)] pub struct ResolverDefine { id: String, name: String, domains: Vec<String>, timeout: u64, search: ResolverSearchDefine, } #[derive(Debug, Serialize, Deserialize)] pub struct ResolverRowSelectorDefine { attr: String, } #[derive(Debug, Serialize, Deserialize)] pub struct ResolverRowsDefine { selector: String, title: ResolverRowSelectorDefine, url: ResolverRowSelectorDefine, } #[derive(Debug, Serialize, Deserialize)] pub struct ResolverSearchDefine { wait: String, rows: ResolverRowsDefine, } #[derive(RustEmbed)] #[folder = "define/"] struct Define; pub struct DefaultResolver { pub defines: Vec<ResolverDefine>, } #[async_trait] impl CommonResolver for DefaultResolver { fn new() -> Self { let mut defines = vec![]; for file in Define::iter() { let yaml = Define::get(file.as_ref()).unwrap(); let yaml_content = std::str::from_utf8(yaml.data.as_ref()); let define: ResolverDefine = serde_yaml::from_str(yaml_content.unwrap()).unwrap(); info!("load config for {}", define.id); defines.push(define); } DefaultResolver { defines } } async fn fetch(&self, tv: &Tv) -> Result<Vec<Data>> { let url = tv.url.as_ref().unwrap(); let selected_define = self.defines.iter().find(|d| { for domain in &d.domains { if url.starts_with(domain) { return true; } } return false; }); let selected_define = selected_define.unwrap(); info!("starting fetch..."); let mut data = vec![]; let browser = Browser::default().unwrap(); let tab = browser.wait_for_initial_tab().unwrap(); info!("browser tab is ready"); tab.navigate_to(&url).unwrap(); tab.wait_for_element_with_custom_timeout( &selected_define.search.wait, Duration::from_secs(selected_define.timeout), ) .unwrap(); info!("waiting for special button"); let root_div: Element = tab.wait_for_element("body").unwrap(); let html = root_div .call_js_fn("function() { return this.innerHTML;}", true) .unwrap() .value .unwrap(); let document = Html::parse_document(html.as_str().unwrap()); info!("get doc object"); let selector = Selector::parse(&selected_define.search.rows.selector).unwrap(); let list = document.select(&selector); for item in list { let title = item.value().attr(&selected_define.search.rows.title.attr); let url = item.value().attr(&selected_define.search.rows.url.attr); data.push(Data::new(title.unwrap(), url.unwrap())); } Ok(data) } async fn normalize(&self, tv: &Tv, datas: Vec<Data>) -> Result<Vec<Data>> { Ok(datas .into_iter() .map(|d| { let clean_up_name = str::replace( &str::replace(&d.name, "HD1080p", "[HDTV-1080p]"), ".mp4", "", ); let mut magneturl = Magnet::new(&d.url).unwrap(); magneturl.tr.clear(); magneturl.dn = None; let ep = extra_ep(&clean_up_name).expect("can't extra ep"); let clean_up_name = if ep > 0 { format!( "{} S01E{} - {} - [chinese] - {} - Domp4", tv.tvname.as_ref().unwrap(), ep, ep, &clean_up_name ) } else { clean_up_name }; Data { ep, name: clean_up_name, url: magneturl.to_string(), } }) .collect()) } } #[cfg(test)] mod tests { use std::fs; use super::*; #[test] fn test_parse_yml() { let pat
#[test] fn test_load() { let _resolver = DefaultResolver::new(); } }
hs = fs::read_dir("./define").unwrap(); for path in paths { let result = std::fs::read_to_string(path.unwrap().path()).unwrap(); let define: ResolverDefine = serde_yaml::from_str(&result).unwrap(); println!("Define:{:?}", define); } }
function_block-function_prefixed
[ { "content": "fn pub_date() -> String {\n\n let time = Utc::now() - Duration::days(1);\n\n time.to_rfc2822()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_zero() {\n\n let page = page(0, 50);\n\n\n\n assert_eq!(page.page_no, 1);\n\n }\n\n\n...
Rust
src/lib.rs
JAD3N/java-props
c9f844ed0b3aa7cd3b0ff14fcb6646e2b568d63b
#[macro_use] extern crate lazy_static; mod parser; mod iterator; mod utils; use iterator::Iterator; use std::io::{self, BufReader, prelude::*}; use std::collections::HashMap; use std::fs::File; use std::char; #[derive(PartialEq, Debug)] pub enum PropertyType { Property, Key, Value, Whitespace, Comment, LineBreak, EscapedValue, Separator, Raw, } #[derive(Debug)] pub struct PropertyRange { start: usize, end: usize, } #[derive(Debug)] pub enum PropertyData { Range(PropertyRange), Text(String), } #[derive(Debug)] pub struct PropertyValue { data: PropertyData, children: Option<Vec<PropertyValue>>, type_: PropertyType, } #[derive(Debug)] pub struct Properties { contents: String, values: Vec<PropertyValue>, value_map: HashMap<String, usize>, data: HashMap<String, String>, } impl Properties { pub fn new() -> Properties { Properties { contents: String::new(), values: vec![], value_map: HashMap::new(), data: HashMap::new(), } } pub fn from_str(contents: &String) -> Properties { let contents = contents.clone(); let mut iter = Iterator::new(&contents); let mut value_map = HashMap::new(); let values = Self::build_property_values(&mut iter); let data = Self::build_properties(&values, &iter); for (i, value) in values.iter().enumerate() { if value.type_ == PropertyType::Property { let key = Self::build_property_component(match &value.children { Some(children) => &children[0], None => continue, }, &iter); value_map.insert(key, i); } } Properties { contents, values, value_map, data, } } pub fn from_file(file: &File) -> io::Result<Properties> { let mut reader = BufReader::new(file); let mut contents = String::new(); reader.read_to_string(&mut contents)?; Ok(Self::from_str(&contents)) } pub fn save(&self, file: &mut File) -> io::Result<String> { let contents = self.to_string(); file.write(contents.as_bytes())?; Ok(contents) } pub fn get(&self, key: &str) -> Option<&String> { self.data.get(key) } pub fn set(&mut self, key: &str, value: &str) { let escaped_key = utils::escape_key(key); let escaped_value = utils::escape_value(&value); let seperator = "="; self.data.insert(String::from(key), String::from(value)); let property_value = if self.value_map.contains_key(key) { let index = *self.value_map.get(key).unwrap(); &mut self.values[index] } else { let last_value = self.values.last(); if last_value.is_some() && !Self::is_newline_value(last_value) { self.values.push(PropertyValue { data: PropertyData::Text(String::from("\n")), children: None, type_: PropertyType::Raw, }); } self.values.push(PropertyValue { data: PropertyData::Text(String::new()), children: None, type_: PropertyType::Raw, }); self.value_map.insert(String::from(key), self.values.len() - 1); self.values.last_mut().unwrap() }; if property_value.type_ == PropertyType::Raw { if let PropertyData::Text(text) = &mut property_value.data { text.clear(); text.push_str(&escaped_key); text.push_str(seperator); text.push_str(&escaped_value); } } else if property_value.type_ == PropertyType::Property { if let Some(children) = &mut property_value.children { children[2].data = PropertyData::Text(escaped_value.clone()); children[2].children = None; children[2].type_ = PropertyType::Raw; } } else { panic!("Unknown property type: {:?}", property_value.type_); } } pub fn unset(&mut self, key: &str) { self.data.remove(key); if let Some(index) = self.value_map.get(key) { self.values.remove(*index); self.value_map.remove(key); } } pub fn parse_file(file: &File) -> io::Result<HashMap<String, String>> { let mut reader = BufReader::new(file); let mut contents = String::new(); reader.read_to_string(&mut contents)?; Ok(Self::parse(&contents)) } pub fn parse(contents: &String) -> HashMap<String, String> { let mut iter = Iterator::new(contents); let entries = Self::build_property_values(&mut iter); Self::build_properties(&entries, &iter) } fn build_property_values(iter: &mut Iterator) -> Vec<PropertyValue> { let mut values = Vec::new(); loop { let chr = match iter.peek() { Some(chr) => chr, None => break, }; if chr.is_whitespace() { values.push(parser::read_whitespace(iter)); } else if parser::is_comment_indicator(chr) { values.push(parser::read_comment(iter)); } else { values.push(parser::read_property(iter)); } } values } fn build_property_component(value: &PropertyValue, iter: &Iterator) -> String { let mut component = String::new(); if let PropertyData::Range(range) = &value.data { let mut start = range.start; if value.children.is_some() { for child in value.children.as_ref().unwrap() { let child_range = match &child.data { PropertyData::Range(range) => range, _ => continue, }; component.push_str(&iter.get_range(start, child_range.start)); if let PropertyType::EscapedValue = child.type_ { let chr = iter.get(child_range.start + 1).unwrap(); component.push(match chr { 't' => '\t', 'r' => '\r', 'n' => '\n', 'f' => '\x0c', 'u' => { let num = u32::from_str_radix(&iter.get_range( child_range.start + 2, child_range.start + 6, ), 16).unwrap_or(0); char::from_u32(num).unwrap() }, _ => chr, }); } start = child_range.end; } } component.push_str(&iter.get_range(start, range.end)); } component } fn build_properties(values: &Vec<PropertyValue>, iter: &Iterator) -> HashMap<String, String> { let mut data = HashMap::new(); for value in values { if let PropertyType::Property = value.type_ { let children = value.children.as_ref().unwrap(); let key = Self::build_property_component(&children[0], iter); let value = Self::build_property_component(&children[2], iter); data.insert(key, value); } } data } pub fn is_newline_value(value: Option<&PropertyValue>) -> bool { if value.is_some() { let value = value.unwrap(); if value.type_ == PropertyType::LineBreak { return true; } else if value.type_ == PropertyType::Raw { return match &value.data { PropertyData::Text(s) => s.trim().is_empty() && s.contains("\n"), _ => false, } } } false } } impl ToString for Properties { fn to_string(&self) -> String { let mut buf = String::new(); let mut values = vec![]; for value in self.values.iter().rev() { values.push(value); } while !values.is_empty() { let value = values.pop().unwrap(); match value.type_ { PropertyType::Raw => match &value.data { PropertyData::Text(text) => buf.push_str(text), _ => panic!("Invalid property data for raw property type!"), }, PropertyType::Property => { for child_value in value.children.as_ref().unwrap().iter().rev() { values.push(child_value); } }, _ => if let PropertyData::Range(range) = &value.data { buf.push_str(&self.contents[range.start..range.end]); }, } } buf } } #[cfg(test)] mod test { use super::Properties; use std::fs::File; use std::io; fn get_test_props() -> io::Result<Properties> { let file = File::open("test.properties")?; Properties::from_file(&file) } #[test] fn reads_file() { let props = get_test_props(); assert!(props.is_ok()); } #[test] fn simple_parse_check() { let props = get_test_props().unwrap(); assert_eq!( props.get("language"), Some(&String::from("English")), ); } #[test] fn complex_parse_check() { let props = get_test_props().unwrap(); assert_eq!( props.get("key with spaces"), Some(&String::from("This is the value that could be looked up with the key \"key with spaces\".")), ); } #[test] fn multiline_parse_check() { let props = get_test_props().unwrap(); assert_eq!( props.get("message"), Some(&String::from("Welcome to Wikipedia!")), ); } #[test] fn empty_output_check() { let props_str = String::new(); assert_eq!( Properties::from_str(&props_str).to_string(), props_str, ) } #[test] fn basic_output_check() { let props_str = String::from("simple\\ key = A fun value!\\nWith multiple lines!"); assert_eq!( Properties::from_str(&props_str).to_string(), props_str, ) } }
#[macro_use] extern crate lazy_static; mod parser; mod iterator; mod utils; use iterator::Iterator; use std::io::{self, BufReader, prelude::*}; use std::collections::HashMap; use std::fs::File; use std::char; #[derive(PartialEq, Debug)] pub enum PropertyType { Property, Key, Value, Whitespace, Comment, LineBreak, EscapedValue, Separator, Raw, } #[derive(Debug)] pub struct PropertyRange { start: usize, end: usize, } #[derive(Debug)] pub enum PropertyData { Range(PropertyRange), Text(String), } #[derive(Debug)] pub struct PropertyValue { data: PropertyData, children: Option<Vec<PropertyValue>>, type_: PropertyType, } #[derive(Debug)] pub struct Properties { contents: String, values: Vec<PropertyValue>, value_map: HashMap<String, usize>, data: HashMap<String, String>, } impl Properties { pub fn new() -> Properties { Properties { contents: String::new(), values: vec![], value_map: HashMap::new(), data: HashMap::new(), } } pub fn from_str(contents: &String) -> Properties { let contents = contents.clone(); let mut iter = Iterator::new(&contents); let mut value_map = HashMap::new(); let values = Self::build_property_values(&mut iter); let data = Self::build_properties(&values, &iter); for (i, value) in values.iter().enumerate() { if value.type_ == PropertyType::Property { let key = Self::build_property_component(match &value.children { Some(children) => &children[0], None => continue, }, &iter); value_map.insert(key, i); } } Properties { contents, values, value_map, data, } } pub fn from_file(file: &File) -> io::Result<Properties> { let mut reader = BufReader::new(file); let mut contents = String::new(); reader.read_to_string(&mut contents)?; Ok(Self::from_str(&contents)) } pub fn save(&self, file: &mut File) -> io::Result<String> { let contents = self.to_string(); file.write(contents.as_bytes())?; Ok(contents) } pub fn get(&self, key: &str) -> Option<&String> { self.data.get(key) } pub fn set(&mut self, key: &str, value: &str) { let escaped_key = utils::escape_key(key); let escaped_value = utils::escape_value(&value); let seperator = "="; self.data.insert(String::from(key), String::from(value)); let property_value = if self.value_map.contains_key(key) { let index = *self.value_map.get(key).unwrap(); &mut self.values[index] } else { let last_value = self.values.last(); if last_value.is_some() && !Self::is_newline_value(last_value) { self.values.push(PropertyValue { data: PropertyData::Text(String::from("\n")), children: None, type_: PropertyType::Raw, }); } self.values.push(PropertyValue { data: PropertyData::Text(String::new()), children: None, type_: PropertyType::Raw, }); self.value_map.insert(String::from(key), self.values.len() - 1); self.values.last_mut().unwrap() }; if property_value.type_ == PropertyType::Raw { if let PropertyData::Text(text) = &mut property_value.data { text.clear(); text.push_str(&escaped_key); text.push_str(seperator); text.push_str(&escaped_value); } } else if property_value.type_ == PropertyType::Property { if let Some(children) = &mut property_value.children { children[2].data = PropertyData::Text(escaped_value.clone()); children[2].children = None; children[2].type_ = PropertyType::Raw; } } else { panic!("Unknown property type: {:?}", property_value.type_); } } pub fn unset(&mut self, key: &str) { self.data.remove(key); if let Some(index) = self.value_map.get(key) { self.values.remove(*index); self.value_map.remove(key); } } pub fn parse_file(file: &File) -> io::Result<HashMap<String, String>> { let mut reader = BufReader::new(file); let mut contents = String::new(); reader.read_to_string(&mut contents)?; Ok(Self::parse(&contents)) } pub fn parse(contents: &String) -> HashMap<String, String> { let mut iter = Iterator::new(contents); let entries = Self::build_property_values(&mut iter); Self::build_properties(&entries, &iter) } fn build_property_values(iter: &mut Iterator) -> Vec<PropertyValue> { let mut values = Vec::new(); loop { let chr = match iter.peek() { Some(chr) => chr, None => break, }; if chr.is_whitespace() { values.push(parser::read_whitespace(iter)); } else if parser::is_comment_indicator(chr) { values.push(parser::read_comment(iter)); } else { values.push(parser::read_property(iter)); } } values } fn build_property_component(value: &PropertyValue, iter: &Iterator) -> String { let mut component = String::new(); if let PropertyData::Range(range) = &value.data { let mut start = range.start; if value.children.is_some() { for child in value.children.as_ref().unwrap() { let child_range = match &child.data { PropertyData::Range(range) => range, _ => continue, }; component.push_str(&iter.get_range(start, child_range.start)); if let PropertyType::EscapedValue = child.type_ { let chr = iter.get(child_range.start + 1).unwrap(); component.push(match chr { 't' => '\t', 'r' => '\r', 'n' => '\n', 'f' => '\x0c', 'u' => { let num = u32::from_str_radix(&iter.get_range( child_range.start + 2, child_range.start + 6, ), 16).unwrap_or(0); char::from_u32(num).unwrap() }, _ => chr, }); } start = child_range.end; } } component.push_str(&iter.get_range(start, range.end)); } component } fn build_properties(values: &Vec<PropertyValue>, iter: &Iterator) -> HashMap<String, String> { let mut data = HashMap::new(); for value in values { if let PropertyType::Property = value.type_ { let children = value.children.as_ref().unwrap(); let key = Self::build_property_component(&children[0], iter); let value = Self::build_property_component(&children[2], iter); data.insert(key, value); } } data } pub fn is_newline_value(value: Option<&PropertyValue>) -> bool { if value.is_some() { let value = value.unwrap(); if value.type_ == PropertyType::LineBreak { return true; } else if value.type_ == PropertyType::Raw { return match &value.data { PropertyData::Text(s) => s.trim().is_empty() && s.contains("\n"), _ => false, } } } false } } impl ToString for Properties { fn to_string(&self) -> String { let mut buf = String::new(); let mut values = vec![]; for value in self.values.iter().rev() { values.push(value); } while !values.is_empty() { let value = values.pop().unwrap(); match value.type_ { PropertyType::Raw => match &value.data { PropertyData::Text(text) => buf.push_str(text), _ => panic!("Invalid property data for raw property type!"), }, PropertyType::Property => { for child_value in value.children.as_ref().unwrap().iter().rev() { values.push(child_value); } }, _ => if let PropertyData::Range(range) = &value.data { buf.push_str(&self.contents[range.start..range.end]); }, } } buf } } #[cfg(test)] mod test { use super::Properties; use std::fs::File; use std::io; fn get_test_props() -> io::Result<Properties> { let file = File::open("test.properties")?; Properties::from_file(&file) } #[test] fn reads_file() { let props = get_test_props(); assert!(props.is_ok()); } #[test] fn simple_parse_check() { let props = get_test_props().unwrap(); assert_eq!( props.get("language"), Some(&String::from("English")), ); } #[test] fn complex_parse_check() { let props = get_test_props().unwrap(); assert_eq!( props.get("key with spaces"), Some(&String::from("This is the value that could be looked up with the key \"key with spaces\".")), ); } #[test] fn multiline_parse_check() { let props = get_test_props().unwrap(); assert_eq!( props.get("message"), Some(&String::from("Welcome to Wikipedia!")), ); } #[test]
#[test] fn basic_output_check() { let props_str = String::from("simple\\ key = A fun value!\\nWith multiple lines!"); assert_eq!( Properties::from_str(&props_str).to_string(), props_str, ) } }
fn empty_output_check() { let props_str = String::new(); assert_eq!( Properties::from_str(&props_str).to_string(), props_str, ) }
function_block-function_prefix_line
[ { "content": "pub fn starts_line_break(iter: &mut Iterator) -> bool {\n\n iter.peek().unwrap() == '\\\\' && is_eol(iter.peek_x(1).unwrap())\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 0, "score": 160791.78637780502 }, { "content": "pub fn read_whitespace(iter: &mut Iterator) -> Pro...