lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
Rust
examples/adc.rs
stm32-rs/stm32f072b-disco
8f9af0195f889c2b326a22f9144cef543d9fce86
#![no_main] #![no_std] #[allow(unused)] use panic_halt; use stm32f072b_disco as board; use board::hal::{adc, prelude::*, serial, stm32}; use cortex_m::{interrupt::Mutex, peripheral::syst::SystClkSource::Core, peripheral::Peripherals}; use cortex_m_rt::{entry, exception}; use core::{cell::RefCell, fmt::Write, ptr}; struct Shared { adc: adc::Adc, temp: adc::VTemp, reference: adc::VRef, tx: serial::Tx<stm32::USART2>, } static SHARED: Mutex<RefCell<Option<Shared>>> = Mutex::new(RefCell::new(None)); fn calculate_temperature(reading: u16) -> i16 { const VDD_CALIB: i32 = 330; const VDD_APPLI: i32 = 300; let cal30 = i32::from(unsafe { ptr::read(0x1FFF_F7B8 as *const u16) }); let cal110 = i32::from(unsafe { ptr::read(0x1FFF_F7C2 as *const u16) }); let mut temperature: i32 = ((i32::from(reading) * VDD_APPLI) / VDD_CALIB) - cal30; temperature *= 110 - 30; temperature /= cal110 - cal30; temperature += 30; temperature as i16 } fn calculate_vdda(reading: u16) -> u16 { let vrefint = u32::from(unsafe { ptr::read(0x1FFF_F7BA as *const u16) }); (3250 * vrefint / u32::from(reading)) as u16 } #[entry] fn main() -> ! { if let (Some(mut p), Some(cp)) = (stm32::Peripherals::take(), Peripherals::take()) { cortex_m::interrupt::free(|cs| { let mut rcc = p.RCC.configure().freeze(&mut p.FLASH); let gpioa = p.GPIOA.split(&mut rcc); let mut syst = cp.SYST; syst.set_clock_source(Core); syst.set_reload(8_000_000 - 1); syst.enable_counter(); syst.enable_interrupt(); let tx = gpioa.pa2.into_alternate_af1(cs); let rx = gpioa.pa15.into_alternate_af1(cs); let (mut tx, _) = serial::Serial::usart2(p.USART2, (tx, rx), 115_200.bps(), &mut rcc).split(); let mut adc = adc::Adc::new(p.ADC, &mut rcc); let mut temp = adc::VTemp::new(); let mut reference = adc::VRef::new(); temp.enable(&mut adc); reference.enable(&mut adc); tx.write_str("\n\rThis ADC example will read various values using the ADC and print them out to the serial terminal\r\n").ok(); *SHARED.borrow(cs).borrow_mut() = Some(Shared { adc, temp, reference, tx, }); }); } loop { continue; } } #[exception] fn SysTick() -> ! { use core::ops::DerefMut; cortex_m::interrupt::free(|cs| { if let Some(ref mut shared) = SHARED.borrow(cs).borrow_mut().deref_mut() { let t: Result<u16, _> = shared.adc.read(&mut shared.temp); if let Ok(t) = t { writeln!(shared.tx, "Temperature {}\r", calculate_temperature(t)).ok(); } else { shared.tx.write_str("Error reading temperature").ok(); } let t: Result<u16, _> = shared.adc.read(&mut shared.reference); if let Ok(t) = t { writeln!(shared.tx, "Vdda {}mV\r", calculate_vdda(t)).ok(); } else { shared.tx.write_str("Error reading Vdda").ok(); } } }); }
#![no_main] #![no_std] #[allow(unused)] use panic_halt; use stm32f072b_disco as board; use board::hal::{adc, prelude::*, serial, stm32}; use cortex_m::{interrupt::Mutex, peripheral::syst::SystClkSource::Core, peripheral::Peripherals}; use cortex_m_rt::{entry, exception}; use core::{cell::RefCell, fmt::Write, ptr}; struct Shared { adc: adc::Adc, temp: adc::VTemp, reference: adc::VRef, tx: serial::Tx<stm32::USART2>, } static SHARED: Mutex<RefCell<Option<Shared>>> = Mutex::new(RefCell::new(None)); fn calculate_temperature(reading: u16) -> i16 { const VDD_CALIB: i32 = 330; const VDD_APPLI: i32 = 300; let cal30 = i32::from(unsafe { ptr::read(0x1FFF_F7B8 as *const u16) }); let cal110 = i32::from(unsafe { ptr::read(0x1FFF_F7C2 as *const u16) }); let mut temperature: i32 = ((i32::from(reading) * VDD_APPLI) / VDD_CALIB) - cal30; temperature *= 110 - 30; temperature /= cal110 - cal30; temperature += 30; temperature as i16 } fn calculate_vdda(reading: u16) -> u16 { let vrefint = u32::from(unsafe { ptr::read(0x1FFF_F7BA as *const u16) }); (3250 * vrefint / u32::from(reading)) as u16 } #[entry] fn main() -> ! { if let (Some(mut p), Some(cp)) = (stm32::Peripherals::take(), Peripherals::take()) { cortex_m::interrupt::free(|cs| { let mut rcc = p.RCC.configure().freeze(&mut p.FLASH); let gpioa = p.GPIOA.split(&mut rcc); let mut syst = cp.SYST; syst.set_clock_source(Core); syst.set_reload(8_000_000 - 1); syst.enable_counter(); syst.enable_interrupt(); let tx = gpioa.pa2.into_alternate_af1(cs); let rx = gpioa.pa15.into_alternate_af1(cs); let (mut tx, _) = serial::Serial::usart2(p.USART2, (tx, rx), 115_200.bps(), &mut rcc).split(); let mut adc = adc::Adc::new(p.ADC, &mut rcc); let mut temp = a
ut shared) = SHARED.borrow(cs).borrow_mut().deref_mut() { let t: Result<u16, _> = shared.adc.read(&mut shared.temp); if let Ok(t) = t { writeln!(shared.tx, "Temperature {}\r", calculate_temperature(t)).ok(); } else { shared.tx.write_str("Error reading temperature").ok(); } let t: Result<u16, _> = shared.adc.read(&mut shared.reference); if let Ok(t) = t { writeln!(shared.tx, "Vdda {}mV\r", calculate_vdda(t)).ok(); } else { shared.tx.write_str("Error reading Vdda").ok(); } } }); }
dc::VTemp::new(); let mut reference = adc::VRef::new(); temp.enable(&mut adc); reference.enable(&mut adc); tx.write_str("\n\rThis ADC example will read various values using the ADC and print them out to the serial terminal\r\n").ok(); *SHARED.borrow(cs).borrow_mut() = Some(Shared { adc, temp, reference, tx, }); }); } loop { continue; } } #[exception] fn SysTick() -> ! { use core::ops::DerefMut; cortex_m::interrupt::free(|cs| { if let Some(ref m
random
[ { "content": "#[entry]\n\nfn main() -> ! {\n\n if let Some(mut p) = stm32::Peripherals::take() {\n\n cortex_m::interrupt::free(|cs| {\n\n let mut rcc = p.RCC.configure().sysclk(48.mhz()).freeze(&mut p.FLASH);\n\n let gpioa = p.GPIOA.split(&mut rcc);\n\n\n\n // USART1 a...
Rust
src/lib/input.rs
CoBrooks/hephaestus-rs
56ffb62ecd00113de2501f28938fb7ca388d4714
use std::collections::{ HashSet, HashMap }; use winit::event::{ VirtualKeyCode, ElementState, DeviceEvent, ButtonId, MouseScrollDelta }; pub struct Input { keyboard: HashSet<u32>, keyboard_prev: HashSet<u32>, buttons: HashSet<u32>, buttons_prev: HashSet<u32>, axes: HashMap<String, f32>, mouse_pos: (f32, f32), mouse_delta: (f32, f32), scroll_wheel: f32, window_size: (u32, u32) } impl Input { pub fn new(window_size: (u32, u32)) -> Self { let mut axes = HashMap::new(); axes.insert("horizontal".into(), 0.0); axes.insert("vertical".into(), 0.0); Self { keyboard: HashSet::new(), keyboard_prev: HashSet::new(), buttons: HashSet::new(), buttons_prev: HashSet::new(), axes, mouse_pos: (window_size.0 as f32 / 2.0, window_size.1 as f32 / 2.0), mouse_delta: (0.0, 0.0), scroll_wheel: 0.0, window_size } } pub fn update(&mut self) { self.keyboard_prev = self.keyboard.clone(); self.buttons_prev = self.buttons.clone(); self.mouse_delta = (0.0, 0.0); self.scroll_wheel = 0.0; if self.get_key(VirtualKeyCode::W) || self.get_key(VirtualKeyCode::Up) { *self.axes.get_mut("vertical").unwrap() = 1.0; } else if self.get_key(VirtualKeyCode::S) || self.get_key(VirtualKeyCode::Down) { *self.axes.get_mut("vertical").unwrap() = -1.0; } else { *self.axes.get_mut("vertical").unwrap() = 0.0; } if self.get_key(VirtualKeyCode::D) || self.get_key(VirtualKeyCode::Right) { *self.axes.get_mut("horizontal").unwrap() = 1.0; } else if self.get_key(VirtualKeyCode::A) || self.get_key(VirtualKeyCode::Left) { *self.axes.get_mut("horizontal").unwrap() = -1.0; } else { *self.axes.get_mut("horizontal").unwrap() = 0.0; } } pub fn parse(&mut self, event: &DeviceEvent) { match event { DeviceEvent::Key(input) => { if input.state == ElementState::Pressed { if let Some(vkey) = input.virtual_keycode { self.keyboard.insert(vkey as u32); } else { self.keyboard.insert(input.scancode); } } else { if let Some(vkey) = input.virtual_keycode { self.keyboard.remove(&(vkey as u32)); } else { self.keyboard.remove(&input.scancode); } } }, DeviceEvent::MouseMotion { delta, .. } => { self.mouse_pos.0 += delta.0 as f32; self.mouse_pos.1 += delta.1 as f32; self.mouse_delta = (delta.0 as f32, delta.1 as f32); }, DeviceEvent::Button { button, state } => { if state == &ElementState::Pressed { self.buttons.insert(*button); } else { self.buttons.remove(button); } }, DeviceEvent::MouseWheel { delta } => { if let MouseScrollDelta::LineDelta(_, y) = delta { self.scroll_wheel = -y.signum(); } }, _ => { } } } pub fn get_key(&self, key: VirtualKeyCode) -> bool { self.keyboard.contains(&(key as u32)) } pub fn get_key_down(&self, key: VirtualKeyCode) -> bool { self.keyboard.contains(&(key as u32)) && !self.keyboard_prev.contains(&(key as u32)) } pub fn get_key_up(&self, key: VirtualKeyCode) -> bool { !self.keyboard.contains(&(key as u32)) && self.keyboard_prev.contains(&(key as u32)) } pub fn get_button(&self, button: ButtonId) -> bool { self.buttons.contains(&button) } pub fn get_button_down(&self, button: ButtonId) -> bool { self.buttons.contains(&button) && !self.buttons_prev.contains(&button) } pub fn get_button_up(&self, button: ButtonId) -> bool { !self.buttons.contains(&button) && self.buttons_prev.contains(&button) } pub fn mouse_pos(&self) -> (f32, f32) { self.mouse_pos } pub fn mouse_pos_rel(&self) -> (f32, f32) { let (m_x, m_y) = self.mouse_pos; let (w_x, w_y) = self.window_size; (m_x / w_x as f32, m_y / w_y as f32) } pub fn mouse_delta(&self) -> (f32, f32) { self.mouse_delta } pub fn scroll_wheel(&self) -> f32 { self.scroll_wheel } pub fn get_axis(&self, axis: &str) -> Option<f32> { self.axes.get(&axis.to_lowercase()).cloned() } }
use std::collections::{ HashSet, HashMap }; use winit::event::{ VirtualKeyCode, ElementState, DeviceEvent, ButtonId, MouseScrollDelta }; pub struct Input { keyboard: HashSet<u32>, keyboard_prev: HashSet<u32>, buttons: HashSet<u32>, buttons_prev: HashSet<u32>, axes: HashMap<String, f32>, mouse_pos: (f32, f32), mouse_delta: (f32, f32), scroll_wheel: f32, window_size: (u32, u32) } impl Input { pub fn new(window_size: (u32, u32)) -> Self { let mut axes = HashMap::ne
pub fn update(&mut self) { self.keyboard_prev = self.keyboard.clone(); self.buttons_prev = self.buttons.clone(); self.mouse_delta = (0.0, 0.0); self.scroll_wheel = 0.0; if self.get_key(VirtualKeyCode::W) || self.get_key(VirtualKeyCode::Up) { *self.axes.get_mut("vertical").unwrap() = 1.0; } else if self.get_key(VirtualKeyCode::S) || self.get_key(VirtualKeyCode::Down) { *self.axes.get_mut("vertical").unwrap() = -1.0; } else { *self.axes.get_mut("vertical").unwrap() = 0.0; } if self.get_key(VirtualKeyCode::D) || self.get_key(VirtualKeyCode::Right) { *self.axes.get_mut("horizontal").unwrap() = 1.0; } else if self.get_key(VirtualKeyCode::A) || self.get_key(VirtualKeyCode::Left) { *self.axes.get_mut("horizontal").unwrap() = -1.0; } else { *self.axes.get_mut("horizontal").unwrap() = 0.0; } } pub fn parse(&mut self, event: &DeviceEvent) { match event { DeviceEvent::Key(input) => { if input.state == ElementState::Pressed { if let Some(vkey) = input.virtual_keycode { self.keyboard.insert(vkey as u32); } else { self.keyboard.insert(input.scancode); } } else { if let Some(vkey) = input.virtual_keycode { self.keyboard.remove(&(vkey as u32)); } else { self.keyboard.remove(&input.scancode); } } }, DeviceEvent::MouseMotion { delta, .. } => { self.mouse_pos.0 += delta.0 as f32; self.mouse_pos.1 += delta.1 as f32; self.mouse_delta = (delta.0 as f32, delta.1 as f32); }, DeviceEvent::Button { button, state } => { if state == &ElementState::Pressed { self.buttons.insert(*button); } else { self.buttons.remove(button); } }, DeviceEvent::MouseWheel { delta } => { if let MouseScrollDelta::LineDelta(_, y) = delta { self.scroll_wheel = -y.signum(); } }, _ => { } } } pub fn get_key(&self, key: VirtualKeyCode) -> bool { self.keyboard.contains(&(key as u32)) } pub fn get_key_down(&self, key: VirtualKeyCode) -> bool { self.keyboard.contains(&(key as u32)) && !self.keyboard_prev.contains(&(key as u32)) } pub fn get_key_up(&self, key: VirtualKeyCode) -> bool { !self.keyboard.contains(&(key as u32)) && self.keyboard_prev.contains(&(key as u32)) } pub fn get_button(&self, button: ButtonId) -> bool { self.buttons.contains(&button) } pub fn get_button_down(&self, button: ButtonId) -> bool { self.buttons.contains(&button) && !self.buttons_prev.contains(&button) } pub fn get_button_up(&self, button: ButtonId) -> bool { !self.buttons.contains(&button) && self.buttons_prev.contains(&button) } pub fn mouse_pos(&self) -> (f32, f32) { self.mouse_pos } pub fn mouse_pos_rel(&self) -> (f32, f32) { let (m_x, m_y) = self.mouse_pos; let (w_x, w_y) = self.window_size; (m_x / w_x as f32, m_y / w_y as f32) } pub fn mouse_delta(&self) -> (f32, f32) { self.mouse_delta } pub fn scroll_wheel(&self) -> f32 { self.scroll_wheel } pub fn get_axis(&self, axis: &str) -> Option<f32> { self.axes.get(&axis.to_lowercase()).cloned() } }
w(); axes.insert("horizontal".into(), 0.0); axes.insert("vertical".into(), 0.0); Self { keyboard: HashSet::new(), keyboard_prev: HashSet::new(), buttons: HashSet::new(), buttons_prev: HashSet::new(), axes, mouse_pos: (window_size.0 as f32 / 2.0, window_size.1 as f32 / 2.0), mouse_delta: (0.0, 0.0), scroll_wheel: 0.0, window_size } }
function_block-function_prefixed
[ { "content": "#[proc_macro_derive(Component)]\n\npub fn component_derive(input: TokenStream) -> TokenStream {\n\n let ast: syn::DeriveInput = syn::parse(input).unwrap();\n\n\n\n let name = &ast.ident;\n\n let gen = quote! {\n\n impl Component for #name {\n\n fn get_id(&self) -> usize ...
Rust
src/shard_ctrler/server.rs
gloriallluo/MadRaft
8c2b480b431445f5182791bdbd69b9528e69a021
use std::collections::HashMap; use crate::{ shard_ctrler::{msg::*, N_SHARDS}, kvraft::{server::Server, state::State}, }; use serde::{Deserialize, Serialize}; pub type ShardCtrler = Server<ShardInfo>; #[derive(Debug, Serialize, Deserialize)] pub struct ShardInfo { configs: Vec<Config>, } impl Default for ShardInfo { fn default() -> Self { Self { configs: vec![Config::default()], } } } impl ShardInfo { fn new_config(&self) -> Config { self.configs .last() .map(|config| { let mut config = config.clone(); config.num += 1; config }) .unwrap() } } impl State for ShardInfo { type Command = Op; type Output = Option<Config>; fn apply(&mut self, cmd: Self::Command) -> Self::Output { match cmd { Op::Query { num } => { if num < self.configs.len() as ConfigId { Some(self.configs[num as usize].clone()) } else { self.configs.last().map(|v| v.clone()) } }, Op::Move { shard, gid } => { let mut new_config = self.new_config(); new_config.shards[shard] = gid; self.configs.push(new_config.clone()); Some(new_config) }, Op::Join { groups } => { let mut new_config = self.new_config(); let mut ng = Vec::new(); groups .into_iter() .for_each(|g| { ng.push(g.0); new_config.groups.insert(g.0, g.1); }); new_config.balance_join(ng); self.configs.push(new_config.clone()); Some(new_config) }, Op::Leave { gids } => { let mut new_config = self.new_config(); gids .iter() .for_each(|g| { new_config.groups.remove(g); }); new_config.balance_leave(gids); self.configs.push(new_config.clone()); Some(new_config) }, } } } impl Config { fn balance_join(&mut self, mut new_groups: Vec<Gid>) { let n_groups = self.groups.len(); let opt = N_SHARDS / n_groups; let r = N_SHARDS - n_groups * opt; let mut re_alloc_shards: Vec<usize> = Vec::new(); let mut count = HashMap::new(); for (shard, gid) in self.shards.iter().enumerate() { if *gid == 0 { re_alloc_shards.push(shard); continue; } let &cnt = count.get(gid).unwrap_or(&0usize); if cnt + 1 > opt + 1 { re_alloc_shards.push(shard); } else if cnt + 1 == opt + 1 { re_alloc_shards.insert(0, shard); } count.insert(gid, cnt + 1); } new_groups.sort(); new_groups .iter() .enumerate() .for_each(|(i, gid)| { let c = if i < r { opt + 1 } else { opt }; for _ in 0..c { self.shards[re_alloc_shards.pop().unwrap()] = *gid; } }); } fn balance_leave(&mut self, old_groups: Vec<Gid>) { let n_groups = self.groups.len(); if n_groups == 0 { self.shards.iter_mut().for_each(|g| *g = 0); return; } let opt = N_SHARDS / n_groups; let mut re_alloc_shards: Vec<usize> = Vec::new(); let mut count = HashMap::new(); for (shard, gid) in self.shards.iter().enumerate() { if old_groups.contains(gid) { re_alloc_shards.push(shard); continue; } let &cnt = count.get(gid).unwrap_or(&0usize); count.insert(gid, cnt + 1); } let mut re_alloc_groups: Vec<Gid> = Vec::new(); let mut all_groups: Vec<Gid> = self.groups.iter().map(|v| *v.0).collect(); all_groups.sort(); for gid in all_groups { let cnt = count.get(&gid).map_or(0, |v| *v); for _ in cnt..opt { re_alloc_groups.push(gid); } if cnt < opt + 1 { re_alloc_groups.insert(0, gid); } } re_alloc_shards .iter() .for_each(|&shard| { self.shards[shard] = re_alloc_groups.pop().unwrap(); }); } }
use std::collections::HashMap; use crate::{ shard_ctrler::{msg::*, N_SHARDS}, kvraft::{server::Server, state::State}, }; use serde::{Deserialize, Serialize}; pub type ShardCtrler = Server<ShardInfo>; #[derive(Debug, Serialize, Deserialize)] pub struct ShardInfo { configs: Vec<Config>, } impl Default for ShardInfo { fn default() -> Self { Self { configs: vec![Config::default()], } } } impl ShardInfo { fn new_config(&self) -> Config { self.configs .last() .map(|config| { let mut config = config.clone(); config.num += 1; config }) .unwrap() } } impl State for ShardInfo { type Command = Op; type Output = Option<Config>; fn apply(&mut self, cmd: Self::Command) -> Self::Output { match cmd { Op::Query { num } => { if num < self.configs.len() as ConfigId { Some(self.configs[num as usize].clone()) } else { self.configs.last().map(|v| v.clone()) } }, Op::Move { shard, gid } => { let mut new_config = self.new_config(); new_config.shards[shard] = gid; self.configs.push(new_config.clone()); Some(new_config) }, Op::Join { groups } => { let mut new_config = self.new_config(); let mut ng = Vec::new(); groups .into_iter() .for_each(|g| { ng.push(g.0); new_config.grou
.iter() .enumerate() .for_each(|(i, gid)| { let c = if i < r { opt + 1 } else { opt }; for _ in 0..c { self.shards[re_alloc_shards.pop().unwrap()] = *gid; } }); } fn balance_leave(&mut self, old_groups: Vec<Gid>) { let n_groups = self.groups.len(); if n_groups == 0 { self.shards.iter_mut().for_each(|g| *g = 0); return; } let opt = N_SHARDS / n_groups; let mut re_alloc_shards: Vec<usize> = Vec::new(); let mut count = HashMap::new(); for (shard, gid) in self.shards.iter().enumerate() { if old_groups.contains(gid) { re_alloc_shards.push(shard); continue; } let &cnt = count.get(gid).unwrap_or(&0usize); count.insert(gid, cnt + 1); } let mut re_alloc_groups: Vec<Gid> = Vec::new(); let mut all_groups: Vec<Gid> = self.groups.iter().map(|v| *v.0).collect(); all_groups.sort(); for gid in all_groups { let cnt = count.get(&gid).map_or(0, |v| *v); for _ in cnt..opt { re_alloc_groups.push(gid); } if cnt < opt + 1 { re_alloc_groups.insert(0, gid); } } re_alloc_shards .iter() .for_each(|&shard| { self.shards[shard] = re_alloc_groups.pop().unwrap(); }); } }
ps.insert(g.0, g.1); }); new_config.balance_join(ng); self.configs.push(new_config.clone()); Some(new_config) }, Op::Leave { gids } => { let mut new_config = self.new_config(); gids .iter() .for_each(|g| { new_config.groups.remove(g); }); new_config.balance_leave(gids); self.configs.push(new_config.clone()); Some(new_config) }, } } } impl Config { fn balance_join(&mut self, mut new_groups: Vec<Gid>) { let n_groups = self.groups.len(); let opt = N_SHARDS / n_groups; let r = N_SHARDS - n_groups * opt; let mut re_alloc_shards: Vec<usize> = Vec::new(); let mut count = HashMap::new(); for (shard, gid) in self.shards.iter().enumerate() { if *gid == 0 { re_alloc_shards.push(shard); continue; } let &cnt = count.get(gid).unwrap_or(&0usize); if cnt + 1 > opt + 1 { re_alloc_shards.push(shard); } else if cnt + 1 == opt + 1 { re_alloc_shards.insert(0, shard); } count.insert(gid, cnt + 1); } new_groups.sort(); new_groups
random
[ { "content": "pub trait State: net::Message + Default {\n\n type Command: net::Message + Clone;\n\n type Output: net::Message + Clone;\n\n fn apply(&mut self, cmd: Self::Command) -> Self::Output;\n\n}\n\n\n\n\n\n#[derive(Debug, Default, Serialize, Deserialize)]\n\npub struct Kv {\n\n data: HashMap<S...
Rust
core/src/ops/cnn/conv/depth_wise.rs
mithril-security/tract-sgx-xargo
ea0d7cc5a81f413250dbfca89d5d876e76746b89
use crate::internal::*; use crate::ops::cnn::patches::{Zone, ZoneScanner}; use crate::ops::cnn::Patch; use crate::ops::nn::DataShape; #[derive(Debug, Clone, new, Hash)] pub struct DepthWise { patch: Patch, input_shape: DataShape, output_shape: DataShape, kernel_chw: Arc<Tensor>, bias: Arc<Tensor>, } impl_dyn_hash!(DepthWise); impl Op for DepthWise { fn name(&self) -> Cow<str> { "DepthWiseConv".into() } fn info(&self) -> TractResult<Vec<String>> { Ok(vec![format!("{:?}", self.patch)]) } fn validation(&self) -> Validation { Validation::Rounding } op_core_lir!(); op_as_typed_op!(); } impl EvalOp for DepthWise { fn is_stateless(&self) -> bool { true } fn eval(&self, inputs: TVec<Arc<Tensor>>) -> TractResult<TVec<Arc<Tensor>>> { dispatch_floatlike!(Self::eval_t(inputs[0].datum_type())(self, inputs)) } } impl DepthWise { fn eval_t<T: Datum + Copy + num_traits::Zero + ndarray::LinalgScalar>( &self, mut inputs: TVec<Arc<Tensor>>, ) -> TractResult<TVec<Arc<Tensor>>> { let img = args_1!(inputs); let mut output = unsafe { Tensor::uninitialized::<T>(&*self.output_shape.shape)? }; let iptr = img.as_ptr::<T>()?; let optr = output.as_ptr_mut::<T>()?; let k_stride_i = self.kernel_chw.strides()[1]; let n = *self.input_shape.n().unwrap_or(&1); let n_stride_i = *self.input_shape.n_stride().unwrap_or(&0) as isize; let n_stride_o = *self.output_shape.n_stride().unwrap_or(&0) as isize; let c_stride_i = *self.input_shape.c_stride() as isize; let c_stride_o = *self.output_shape.c_stride() as isize; let bias = self.bias.as_ptr::<T>()?; let kptr = self.kernel_chw.as_ptr::<T>()?; unsafe { for n in 0..n as isize { let iptr = iptr.offset(n_stride_i * n); let optr = optr.offset(n_stride_o * n); for zone in &self.patch.zones { self.process_zone( zone, c_stride_i, c_stride_o, k_stride_i, iptr, kptr, bias, optr, ) } } } Ok(tvec!(output.into_arc_tensor())) } #[inline(never)] unsafe fn process_zone<T: Datum + Copy + ndarray::LinalgScalar>( &self, zone: &Zone, c_stride_i: isize, c_stride_o: isize, k_stride_i: isize, iptr: *const T, kptr: *const T, bias: *const T, optr: *mut T, ) { if zone.values_offsets.len() == 4 { self.process_zone_4(zone, c_stride_i, c_stride_o, k_stride_i, iptr, kptr, bias, optr) } else { zone.visit_output(&self.patch, |visitor| { for c in 0..*self.input_shape.c() as isize { let iptr = iptr.offset(c_stride_i * c); let optr = optr.offset(c_stride_o * c); let kptr = kptr.offset(k_stride_i * c); Self::inner_loop::<T>(iptr, kptr, bias, optr, c, visitor) } }) } } #[inline(never)] unsafe fn process_zone_4<T: Datum + Copy + ndarray::LinalgScalar>( &self, zone: &Zone, c_stride_i: isize, c_stride_o: isize, k_stride_i: isize, iptr: *const T, kptr: *const T, bias: *const T, optr: *mut T, ) { let mut visitor = ZoneScanner::new(zone, &self.patch); let ioffset0 = zone.values_offsets[0].1; let ioffset1 = zone.values_offsets[1].1; let ioffset2 = zone.values_offsets[2].1; let ioffset3 = zone.values_offsets[3].1; for c in 0..*self.input_shape.c() as isize { visitor.reset(); let kptr = kptr.offset(k_stride_i * c); let iptr = iptr.offset(c_stride_i * c); let optr = optr.offset(c_stride_o * c); let k0 = *kptr.offset(zone.values_offsets[0].0 as isize); let k1 = *kptr.offset(zone.values_offsets[1].0 as isize); let k2 = *kptr.offset(zone.values_offsets[2].0 as isize); let k3 = *kptr.offset(zone.values_offsets[3].0 as isize); let bias = *bias.offset(c); while !visitor.done { let iptr = iptr.offset(visitor.input_center_offset); let optr = optr.offset(visitor.output_offset); let mut i = 0isize; while i + 4 < visitor.inner_loop_len as isize { let iptr_a = iptr.offset(visitor.inner_loop_input_full_stride * i); let iptr_b = iptr.offset(visitor.inner_loop_input_full_stride * (i + 1)); let iptr_c = iptr.offset(visitor.inner_loop_input_full_stride * (i + 2)); let iptr_d = iptr.offset(visitor.inner_loop_input_full_stride * (i + 3)); let optr_a = optr.offset(visitor.inner_loop_output_stride * i); let optr_b = optr.offset(visitor.inner_loop_output_stride * (i + 1)); let optr_c = optr.offset(visitor.inner_loop_output_stride * (i + 2)); let optr_d = optr.offset(visitor.inner_loop_output_stride * (i + 3)); let i0_a = *iptr_a.offset(ioffset0); let i0_b = *iptr_b.offset(ioffset0); let i0_c = *iptr_c.offset(ioffset0); let i0_d = *iptr_d.offset(ioffset0); let i1_a = *iptr_a.offset(ioffset1); let i1_b = *iptr_b.offset(ioffset1); let i1_c = *iptr_c.offset(ioffset1); let i1_d = *iptr_d.offset(ioffset1); let i2_a = *iptr_a.offset(ioffset2); let i2_b = *iptr_b.offset(ioffset2); let i2_c = *iptr_c.offset(ioffset2); let i2_d = *iptr_d.offset(ioffset2); let i3_a = *iptr_a.offset(ioffset3); let i3_b = *iptr_b.offset(ioffset3); let i3_c = *iptr_c.offset(ioffset3); let i3_d = *iptr_d.offset(ioffset3); let p0_a = i0_a * k0; let p1_a = i1_a * k1; let p2_a = i2_a * k2; let p3_a = i3_a * k3; let p0_b = i0_b * k0; let p1_b = i1_b * k1; let p2_b = i2_b * k2; let p3_b = i3_b * k3; let p0_c = i0_c * k0; let p1_c = i1_c * k1; let p2_c = i2_c * k2; let p3_c = i3_c * k3; let p0_d = i0_d * k0; let p1_d = i1_d * k1; let p2_d = i2_d * k2; let p3_d = i3_d * k3; *optr_a = bias + p0_a + p1_a + p2_a + p3_a; *optr_b = bias + p0_b + p1_b + p2_b + p3_b; *optr_c = bias + p0_c + p1_c + p2_c + p3_c; *optr_d = bias + p0_d + p1_d + p2_d + p3_d; i += 4; } while i < visitor.inner_loop_len as isize { let iptr = iptr.offset(visitor.inner_loop_input_full_stride * i); let optr = optr.offset(visitor.inner_loop_output_stride * i); let i0 = *iptr.offset(ioffset0); let i1 = *iptr.offset(ioffset1); let i2 = *iptr.offset(ioffset2); let i3 = *iptr.offset(ioffset3); let p0 = i0 * k0; let p1 = i1 * k1; let p2 = i2 * k2; let p3 = i3 * k3; let sum = bias + p0 + p1 + p2 + p3; *optr = sum; i += 1; } visitor.next_non_inner_axis() } } } #[inline(never)] unsafe fn inner_loop<T: Datum + Copy + ndarray::LinalgScalar>( iptr: *const T, kptr: *const T, bias: *const T, optr: *mut T, c: isize, visitor: &ZoneScanner, ) { let mut sum = *bias.offset(c); let mut iter = visitor.valid_offsets_ker_in(); if iter.size_hint() == (4, Some(4)) { let (ix, v) = iter.next().unwrap(); let k0 = *kptr.offset(ix as isize); let i0 = *iptr.offset(v as isize); let (ix, v) = iter.next().unwrap(); let k1 = *kptr.offset(ix as isize); let i1 = *iptr.offset(v as isize); let (ix, v) = iter.next().unwrap(); let k2 = *kptr.offset(ix as isize); let i2 = *iptr.offset(v as isize); let (ix, v) = iter.next().unwrap(); let k3 = *kptr.offset(ix as isize); let i3 = *iptr.offset(v as isize); sum = sum + k0 * i0 + k1 * i1 + k2 * i2 + k3 * i3; } else if iter.size_hint() == (3, Some(3)) { let (ix, v) = iter.next().unwrap(); let k0 = *kptr.offset(ix as isize); let i0 = *iptr.offset(v as isize); let (ix, v) = iter.next().unwrap(); let k1 = *kptr.offset(ix as isize); let i1 = *iptr.offset(v as isize); let (ix, v) = iter.next().unwrap(); let k2 = *kptr.offset(ix as isize); let i2 = *iptr.offset(v as isize); sum = sum + k0 * i0 + k1 * i1 + k2 * i2; } else { for (ix, v) in iter { let k = *kptr.offset(ix as isize); let i = *iptr.offset(v as isize); sum = sum + k * i; } } let optr = optr.offset(visitor.output_offset); *optr = sum; } } impl TypedOp for DepthWise { fn output_facts(&self, inputs: &[&TypedFact]) -> TractResult<TVec<TypedFact>> { anyhow::ensure!( self.input_shape.c() == self.output_shape.c(), "DepthWiseConv must have same input and output channels" ); anyhow::ensure!( *self.input_shape.c() == self.bias.len(), "DepthWiseConv data has {} channels, bias has {}", self.input_shape.c(), self.bias.len() ); Ok(tvec!(TypedFact::dt_shape(inputs[0].datum_type, &self.output_shape.shape))) } fn cost(&self, inputs: &[&TypedFact]) -> TractResult<TVec<(Cost, TDim)>> { let n_output_points = self.patch.output_shape.iter().cloned().product::<usize>(); Ok(tvec!(( Cost::FMA(inputs[0].datum_type), (self.input_shape.n().unwrap_or(&1) * n_output_points * self.kernel_chw.len()).to_dim() ))) } as_op!(); } /* partial alternative impl that may be relevant when simd gets better */ /* #[inline(never)] unsafe fn process_zone_4_f32( &self, zone: &Zone, c_stride_i: isize, c_stride_o: isize, k_stride_i: isize, iptr: *const f32, kptr: *const f32, bias: *const f32, optr: *mut f32, ) { use std::simd::*; let mut visitor = ZoneScanner::new(zone, &self.patch); let ioffset0 = zone.values_offsets[0].1; let ioffset1 = zone.values_offsets[1].1; let ioffset2 = zone.values_offsets[2].1; let ioffset3 = zone.values_offsets[3].1; for c in 0..*self.input_shape.c() as isize { visitor.reset(); let kptr = kptr.offset(k_stride_i * c); let iptr = iptr.offset(c_stride_i * c); let optr = optr.offset(c_stride_o * c); let k0 = *kptr.offset(zone.values_offsets[0].0 as isize); let k1 = *kptr.offset(zone.values_offsets[1].0 as isize); let k2 = *kptr.offset(zone.values_offsets[2].0 as isize); let k3 = *kptr.offset(zone.values_offsets[3].0 as isize); let k0 = f32x4::splat(k0); let k1 = f32x4::splat(k1); let k2 = f32x4::splat(k2); let k3 = f32x4::splat(k3); let bias = f32x4::splat(*bias.offset(c)); while !visitor.done { let iptr = iptr.offset(visitor.input_center_offset); let optr = optr.offset(visitor.output_offset); let mut i = 0; while i + 4 < for i in 0..visitor.inner_loop_len as isize { let iptr = iptr.offset(visitor.inner_loop_input_full_stride * i); let optr = optr.offset(visitor.inner_loop_output_stride * i); let i0 = *iptr.offset(ioffset0); let i1 = *iptr.offset(ioffset1); let i2 = *iptr.offset(ioffset2); let i3 = *iptr.offset(ioffset3); let i = f32x4::from_array([i0, i1, i2, i3]); let p = (i * k).reduce_sum(); let sum = bias + p; *optr = sum } visitor.next_non_inner_axis() } } } */ /* #[inline(never)] unsafe fn process_zone_4_f32( &self, zone: &Zone, c_stride_i: isize, c_stride_o: isize, k_stride_i: isize, iptr: *const f32, kptr: *const f32, bias: *const f32, optr: *mut f32, ) { use std::simd::*; let mut visitor = ZoneScanner::new(zone, &self.patch); let ioffset0 = zone.values_offsets[0].1; let ioffset1 = zone.values_offsets[1].1; let ioffset2 = zone.values_offsets[2].1; let ioffset3 = zone.values_offsets[3].1; for c in 0..*self.input_shape.c() as isize { visitor.reset(); let kptr = kptr.offset(k_stride_i * c); let iptr = iptr.offset(c_stride_i * c); let optr = optr.offset(c_stride_o * c); let k0 = *kptr.offset(zone.values_offsets[0].0 as isize); let k1 = *kptr.offset(zone.values_offsets[1].0 as isize); let k2 = *kptr.offset(zone.values_offsets[2].0 as isize); let k3 = *kptr.offset(zone.values_offsets[3].0 as isize); let k = f32x4::from_array([k0, k1, k2, k3]); let bias = *bias.offset(c); while !visitor.done { let iptr = iptr.offset(visitor.input_center_offset); let optr = optr.offset(visitor.output_offset); for i in 0..visitor.inner_loop_len as isize { let iptr = iptr.offset(visitor.inner_loop_input_full_stride * i); let optr = optr.offset(visitor.inner_loop_output_stride * i); let i0 = *iptr.offset(ioffset0); let i1 = *iptr.offset(ioffset1); let i2 = *iptr.offset(ioffset2); let i3 = *iptr.offset(ioffset3); let i = f32x4::from_array([i0, i1, i2, i3]); let p = (i * k).reduce_sum(); let sum = bias + p; *optr = sum } visitor.next_non_inner_axis() } } } */ /* #[inline(never)] unsafe fn process_zone_4<T: Datum + Copy + ndarray::LinalgScalar>( &self, zone: &Zone, c_stride_i: isize, c_stride_o: isize, k_stride_i: isize, iptr: *const T, kptr: *const T, bias: *const T, optr: *mut T, ) { let mut visitor = ZoneScanner::new(zone, &self.patch); let ioffset0 = zone.values_offsets[0].1; let ioffset1 = zone.values_offsets[1].1; let ioffset2 = zone.values_offsets[2].1; let ioffset3 = zone.values_offsets[3].1; for c in 0..*self.input_shape.c() as isize { visitor.reset(); let kptr = kptr.offset(k_stride_i * c); let iptr = iptr.offset(c_stride_i * c); let optr = optr.offset(c_stride_o * c); let k0 = *kptr.offset(zone.values_offsets[0].0 as isize); let k1 = *kptr.offset(zone.values_offsets[1].0 as isize); let k2 = *kptr.offset(zone.values_offsets[2].0 as isize); let k3 = *kptr.offset(zone.values_offsets[3].0 as isize); let bias = *bias.offset(c); while !visitor.done { let iptr = iptr.offset(visitor.input_center_offset); let optr = optr.offset(visitor.output_offset); for i in 0..visitor.inner_loop_len as isize { let iptr = iptr.offset(visitor.inner_loop_input_full_stride * i); let optr = optr.offset(visitor.inner_loop_output_stride * i); let i0 = *iptr.offset(ioffset0); let i1 = *iptr.offset(ioffset1); let i2 = *iptr.offset(ioffset2); let i3 = *iptr.offset(ioffset3); let p0 = i0 * k0; let p1 = i1 * k1; let p2 = i2 * k2; let p3 = i3 * k3; let sum = bias + p0 + p1 + p2 + p3; *optr = sum } visitor.next_non_inner_axis() } } } */
use crate::internal::*; use crate::ops::cnn::patches::{Zone, ZoneScanner}; use crate::ops::cnn::Patch; use crate::ops::nn::DataShape; #[derive(Debug, Clone, new, Hash)] pub struct DepthWise { patch: Patch, input_shape: DataShape, output_shape: DataShape, kernel_chw: Arc<Tensor>, bias: Arc<Tensor>, } impl_dyn_hash!(DepthWise); impl Op for DepthWise { fn name(&self) -> Cow<str> { "DepthWiseConv".into() } fn info(&self) -> TractResult<Vec<String>> { Ok(vec![format!("{:?}", self.patch)]) } fn validation(&self) -> Validation { Validation::Rounding } op_core_lir!(); op_as_typed_op!(); } impl EvalOp for DepthWise { fn is_stateless(&self) -> bool { true } fn eval(&self, inputs: TVec<Arc<Tensor>>) -> TractResult<TVec<Arc<Tensor>>> { dispatch_floatlike!(Self::eval_t(inputs[0].datum_type())(self, inputs)) } } impl DepthWise { fn eval_t<T: Datum + Copy + num_traits::Zero + ndarray::LinalgScalar>( &self, mut inputs: TVec<Arc<Tensor>>, ) -> TractResult<TVec<Arc<Tensor>>> { let img = args_1!(inputs); let mut output = unsafe { Tensor::uninitialized::<T>(&*self.output_shape.shape)? }; let iptr = img.as_ptr::<T>()?; let optr = output.as_ptr_mut::<T>()?; let k_stride_i = self.kernel_chw.strides()[1]; let n = *self.input_shape.n().unwrap_or(&1); let n_stride_i = *self.input_shape.n_stride().unwrap_or(&0) as isize; let n_stride_o = *self.output_shape.n_stride().unwrap_or(&0) as isize; let c_stride_i = *self.input_shape.c_stride() as isize; let c_stride_o = *self.output_shape.c_stride() as isize; let bias = self.bias.as_ptr::<T>()?; let kptr = self.kernel_chw.as_ptr::<T>()?; unsafe { for n in 0..n as isize { let iptr = iptr.offset(n_stride_i * n); let optr = optr.offset(n_stride_o * n); for zone in &self.patch.zones { self.process_zone( zone, c_stride_i, c_stride_o, k_stride_i, iptr, kptr, bias, optr, ) } } } Ok(tvec!(output.into_arc_tensor())) } #[inline(never)] unsafe fn process_zone<T: Datum + Copy + ndarray::LinalgScalar>( &self, zone: &Zone, c_stride_i: isize, c_stride_o: isize, k_stride_i: isize, iptr: *const T, kptr: *const T, bias: *const T, optr: *mut T, ) { if zone.values_offsets.len() == 4 { self.
#[inline(never)] unsafe fn process_zone_4<T: Datum + Copy + ndarray::LinalgScalar>( &self, zone: &Zone, c_stride_i: isize, c_stride_o: isize, k_stride_i: isize, iptr: *const T, kptr: *const T, bias: *const T, optr: *mut T, ) { let mut visitor = ZoneScanner::new(zone, &self.patch); let ioffset0 = zone.values_offsets[0].1; let ioffset1 = zone.values_offsets[1].1; let ioffset2 = zone.values_offsets[2].1; let ioffset3 = zone.values_offsets[3].1; for c in 0..*self.input_shape.c() as isize { visitor.reset(); let kptr = kptr.offset(k_stride_i * c); let iptr = iptr.offset(c_stride_i * c); let optr = optr.offset(c_stride_o * c); let k0 = *kptr.offset(zone.values_offsets[0].0 as isize); let k1 = *kptr.offset(zone.values_offsets[1].0 as isize); let k2 = *kptr.offset(zone.values_offsets[2].0 as isize); let k3 = *kptr.offset(zone.values_offsets[3].0 as isize); let bias = *bias.offset(c); while !visitor.done { let iptr = iptr.offset(visitor.input_center_offset); let optr = optr.offset(visitor.output_offset); let mut i = 0isize; while i + 4 < visitor.inner_loop_len as isize { let iptr_a = iptr.offset(visitor.inner_loop_input_full_stride * i); let iptr_b = iptr.offset(visitor.inner_loop_input_full_stride * (i + 1)); let iptr_c = iptr.offset(visitor.inner_loop_input_full_stride * (i + 2)); let iptr_d = iptr.offset(visitor.inner_loop_input_full_stride * (i + 3)); let optr_a = optr.offset(visitor.inner_loop_output_stride * i); let optr_b = optr.offset(visitor.inner_loop_output_stride * (i + 1)); let optr_c = optr.offset(visitor.inner_loop_output_stride * (i + 2)); let optr_d = optr.offset(visitor.inner_loop_output_stride * (i + 3)); let i0_a = *iptr_a.offset(ioffset0); let i0_b = *iptr_b.offset(ioffset0); let i0_c = *iptr_c.offset(ioffset0); let i0_d = *iptr_d.offset(ioffset0); let i1_a = *iptr_a.offset(ioffset1); let i1_b = *iptr_b.offset(ioffset1); let i1_c = *iptr_c.offset(ioffset1); let i1_d = *iptr_d.offset(ioffset1); let i2_a = *iptr_a.offset(ioffset2); let i2_b = *iptr_b.offset(ioffset2); let i2_c = *iptr_c.offset(ioffset2); let i2_d = *iptr_d.offset(ioffset2); let i3_a = *iptr_a.offset(ioffset3); let i3_b = *iptr_b.offset(ioffset3); let i3_c = *iptr_c.offset(ioffset3); let i3_d = *iptr_d.offset(ioffset3); let p0_a = i0_a * k0; let p1_a = i1_a * k1; let p2_a = i2_a * k2; let p3_a = i3_a * k3; let p0_b = i0_b * k0; let p1_b = i1_b * k1; let p2_b = i2_b * k2; let p3_b = i3_b * k3; let p0_c = i0_c * k0; let p1_c = i1_c * k1; let p2_c = i2_c * k2; let p3_c = i3_c * k3; let p0_d = i0_d * k0; let p1_d = i1_d * k1; let p2_d = i2_d * k2; let p3_d = i3_d * k3; *optr_a = bias + p0_a + p1_a + p2_a + p3_a; *optr_b = bias + p0_b + p1_b + p2_b + p3_b; *optr_c = bias + p0_c + p1_c + p2_c + p3_c; *optr_d = bias + p0_d + p1_d + p2_d + p3_d; i += 4; } while i < visitor.inner_loop_len as isize { let iptr = iptr.offset(visitor.inner_loop_input_full_stride * i); let optr = optr.offset(visitor.inner_loop_output_stride * i); let i0 = *iptr.offset(ioffset0); let i1 = *iptr.offset(ioffset1); let i2 = *iptr.offset(ioffset2); let i3 = *iptr.offset(ioffset3); let p0 = i0 * k0; let p1 = i1 * k1; let p2 = i2 * k2; let p3 = i3 * k3; let sum = bias + p0 + p1 + p2 + p3; *optr = sum; i += 1; } visitor.next_non_inner_axis() } } } #[inline(never)] unsafe fn inner_loop<T: Datum + Copy + ndarray::LinalgScalar>( iptr: *const T, kptr: *const T, bias: *const T, optr: *mut T, c: isize, visitor: &ZoneScanner, ) { let mut sum = *bias.offset(c); let mut iter = visitor.valid_offsets_ker_in(); if iter.size_hint() == (4, Some(4)) { let (ix, v) = iter.next().unwrap(); let k0 = *kptr.offset(ix as isize); let i0 = *iptr.offset(v as isize); let (ix, v) = iter.next().unwrap(); let k1 = *kptr.offset(ix as isize); let i1 = *iptr.offset(v as isize); let (ix, v) = iter.next().unwrap(); let k2 = *kptr.offset(ix as isize); let i2 = *iptr.offset(v as isize); let (ix, v) = iter.next().unwrap(); let k3 = *kptr.offset(ix as isize); let i3 = *iptr.offset(v as isize); sum = sum + k0 * i0 + k1 * i1 + k2 * i2 + k3 * i3; } else if iter.size_hint() == (3, Some(3)) { let (ix, v) = iter.next().unwrap(); let k0 = *kptr.offset(ix as isize); let i0 = *iptr.offset(v as isize); let (ix, v) = iter.next().unwrap(); let k1 = *kptr.offset(ix as isize); let i1 = *iptr.offset(v as isize); let (ix, v) = iter.next().unwrap(); let k2 = *kptr.offset(ix as isize); let i2 = *iptr.offset(v as isize); sum = sum + k0 * i0 + k1 * i1 + k2 * i2; } else { for (ix, v) in iter { let k = *kptr.offset(ix as isize); let i = *iptr.offset(v as isize); sum = sum + k * i; } } let optr = optr.offset(visitor.output_offset); *optr = sum; } } impl TypedOp for DepthWise { fn output_facts(&self, inputs: &[&TypedFact]) -> TractResult<TVec<TypedFact>> { anyhow::ensure!( self.input_shape.c() == self.output_shape.c(), "DepthWiseConv must have same input and output channels" ); anyhow::ensure!( *self.input_shape.c() == self.bias.len(), "DepthWiseConv data has {} channels, bias has {}", self.input_shape.c(), self.bias.len() ); Ok(tvec!(TypedFact::dt_shape(inputs[0].datum_type, &self.output_shape.shape))) } fn cost(&self, inputs: &[&TypedFact]) -> TractResult<TVec<(Cost, TDim)>> { let n_output_points = self.patch.output_shape.iter().cloned().product::<usize>(); Ok(tvec!(( Cost::FMA(inputs[0].datum_type), (self.input_shape.n().unwrap_or(&1) * n_output_points * self.kernel_chw.len()).to_dim() ))) } as_op!(); } /* partial alternative impl that may be relevant when simd gets better */ /* #[inline(never)] unsafe fn process_zone_4_f32( &self, zone: &Zone, c_stride_i: isize, c_stride_o: isize, k_stride_i: isize, iptr: *const f32, kptr: *const f32, bias: *const f32, optr: *mut f32, ) { use std::simd::*; let mut visitor = ZoneScanner::new(zone, &self.patch); let ioffset0 = zone.values_offsets[0].1; let ioffset1 = zone.values_offsets[1].1; let ioffset2 = zone.values_offsets[2].1; let ioffset3 = zone.values_offsets[3].1; for c in 0..*self.input_shape.c() as isize { visitor.reset(); let kptr = kptr.offset(k_stride_i * c); let iptr = iptr.offset(c_stride_i * c); let optr = optr.offset(c_stride_o * c); let k0 = *kptr.offset(zone.values_offsets[0].0 as isize); let k1 = *kptr.offset(zone.values_offsets[1].0 as isize); let k2 = *kptr.offset(zone.values_offsets[2].0 as isize); let k3 = *kptr.offset(zone.values_offsets[3].0 as isize); let k0 = f32x4::splat(k0); let k1 = f32x4::splat(k1); let k2 = f32x4::splat(k2); let k3 = f32x4::splat(k3); let bias = f32x4::splat(*bias.offset(c)); while !visitor.done { let iptr = iptr.offset(visitor.input_center_offset); let optr = optr.offset(visitor.output_offset); let mut i = 0; while i + 4 < for i in 0..visitor.inner_loop_len as isize { let iptr = iptr.offset(visitor.inner_loop_input_full_stride * i); let optr = optr.offset(visitor.inner_loop_output_stride * i); let i0 = *iptr.offset(ioffset0); let i1 = *iptr.offset(ioffset1); let i2 = *iptr.offset(ioffset2); let i3 = *iptr.offset(ioffset3); let i = f32x4::from_array([i0, i1, i2, i3]); let p = (i * k).reduce_sum(); let sum = bias + p; *optr = sum } visitor.next_non_inner_axis() } } } */ /* #[inline(never)] unsafe fn process_zone_4_f32( &self, zone: &Zone, c_stride_i: isize, c_stride_o: isize, k_stride_i: isize, iptr: *const f32, kptr: *const f32, bias: *const f32, optr: *mut f32, ) { use std::simd::*; let mut visitor = ZoneScanner::new(zone, &self.patch); let ioffset0 = zone.values_offsets[0].1; let ioffset1 = zone.values_offsets[1].1; let ioffset2 = zone.values_offsets[2].1; let ioffset3 = zone.values_offsets[3].1; for c in 0..*self.input_shape.c() as isize { visitor.reset(); let kptr = kptr.offset(k_stride_i * c); let iptr = iptr.offset(c_stride_i * c); let optr = optr.offset(c_stride_o * c); let k0 = *kptr.offset(zone.values_offsets[0].0 as isize); let k1 = *kptr.offset(zone.values_offsets[1].0 as isize); let k2 = *kptr.offset(zone.values_offsets[2].0 as isize); let k3 = *kptr.offset(zone.values_offsets[3].0 as isize); let k = f32x4::from_array([k0, k1, k2, k3]); let bias = *bias.offset(c); while !visitor.done { let iptr = iptr.offset(visitor.input_center_offset); let optr = optr.offset(visitor.output_offset); for i in 0..visitor.inner_loop_len as isize { let iptr = iptr.offset(visitor.inner_loop_input_full_stride * i); let optr = optr.offset(visitor.inner_loop_output_stride * i); let i0 = *iptr.offset(ioffset0); let i1 = *iptr.offset(ioffset1); let i2 = *iptr.offset(ioffset2); let i3 = *iptr.offset(ioffset3); let i = f32x4::from_array([i0, i1, i2, i3]); let p = (i * k).reduce_sum(); let sum = bias + p; *optr = sum } visitor.next_non_inner_axis() } } } */ /* #[inline(never)] unsafe fn process_zone_4<T: Datum + Copy + ndarray::LinalgScalar>( &self, zone: &Zone, c_stride_i: isize, c_stride_o: isize, k_stride_i: isize, iptr: *const T, kptr: *const T, bias: *const T, optr: *mut T, ) { let mut visitor = ZoneScanner::new(zone, &self.patch); let ioffset0 = zone.values_offsets[0].1; let ioffset1 = zone.values_offsets[1].1; let ioffset2 = zone.values_offsets[2].1; let ioffset3 = zone.values_offsets[3].1; for c in 0..*self.input_shape.c() as isize { visitor.reset(); let kptr = kptr.offset(k_stride_i * c); let iptr = iptr.offset(c_stride_i * c); let optr = optr.offset(c_stride_o * c); let k0 = *kptr.offset(zone.values_offsets[0].0 as isize); let k1 = *kptr.offset(zone.values_offsets[1].0 as isize); let k2 = *kptr.offset(zone.values_offsets[2].0 as isize); let k3 = *kptr.offset(zone.values_offsets[3].0 as isize); let bias = *bias.offset(c); while !visitor.done { let iptr = iptr.offset(visitor.input_center_offset); let optr = optr.offset(visitor.output_offset); for i in 0..visitor.inner_loop_len as isize { let iptr = iptr.offset(visitor.inner_loop_input_full_stride * i); let optr = optr.offset(visitor.inner_loop_output_stride * i); let i0 = *iptr.offset(ioffset0); let i1 = *iptr.offset(ioffset1); let i2 = *iptr.offset(ioffset2); let i3 = *iptr.offset(ioffset3); let p0 = i0 * k0; let p1 = i1 * k1; let p2 = i2 * k2; let p3 = i3 * k3; let sum = bias + p0 + p1 + p2 + p3; *optr = sum } visitor.next_non_inner_axis() } } } */
process_zone_4(zone, c_stride_i, c_stride_o, k_stride_i, iptr, kptr, bias, optr) } else { zone.visit_output(&self.patch, |visitor| { for c in 0..*self.input_shape.c() as isize { let iptr = iptr.offset(c_stride_i * c); let optr = optr.offset(c_stride_o * c); let kptr = kptr.offset(k_stride_i * c); Self::inner_loop::<T>(iptr, kptr, bias, optr, c, visitor) } }) } }
function_block-function_prefixed
[ { "content": "pub fn output_type(input: DatumType) -> DatumType {\n\n if input.is_float() {\n\n input\n\n } else {\n\n i32::datum_type()\n\n }\n\n}\n\n\n\npub(super) fn eval(\n\n a: &Tensor,\n\n b: &Tensor,\n\n a_trans: bool,\n\n b_trans: bool,\n\n c_trans: bool,\n\n) -> Tr...
Rust
cglue-gen/src/util.rs
ko1N/cglue
040074eed961d4783a4c9c8a1c084441c0dcb301
use proc_macro2::TokenStream; use proc_macro_crate::{crate_name, FoundCrate}; use quote::{format_ident, quote}; use syn::parse::{Parse, ParseStream}; use syn::punctuated::Punctuated; use syn::token::Colon2; use syn::token::Comma; use syn::*; pub fn crate_path() -> TokenStream { let (col, ident) = crate_path_ident(); quote!(#col #ident) } pub fn crate_path_ident() -> (Option<Colon2>, Ident) { match crate_path_fixed() { Some(FoundCrate::Itself) => (None, format_ident!("crate")), Some(FoundCrate::Name(name)) => (Some(Default::default()), format_ident!("{}", name)), None => (None, format_ident!("cglue")), } } pub fn crate_path_fixed() -> Option<FoundCrate> { let found_crate = crate_name("cglue").ok()?; let ret = match found_crate { FoundCrate::Itself => { let has_doc_env = std::env::vars().any(|(k, _)| { k == "UNSTABLE_RUSTDOC_TEST_LINE" || k == "UNSTABLE_RUSTDOC_TEST_PATH" }); if has_doc_env { FoundCrate::Name("cglue".to_string()) } else { FoundCrate::Itself } } x => x, }; Some(ret) } pub fn parse_maybe_braced<T: Parse>(input: ParseStream) -> Result<Vec<T>> { let mut ret = vec![]; if let Ok(braces) = syn::group::parse_braces(&input) { let content = braces.content; while !content.is_empty() { let val = content.parse()?; ret.push(val); if !content.is_empty() { content.parse::<Token![,]>()?; } } } else { ret.push(input.parse()?) } Ok(ret) } pub type GenericsOut = Option<Punctuated<GenericArgument, Comma>>; pub fn split_path_ident(in_path: &Path) -> Result<(Path, Ident, GenericsOut)> { let mut path = Path { leading_colon: in_path.leading_colon, segments: Default::default(), }; let mut ident = None; let mut generics = None; for part in in_path.segments.pairs() { match part { punctuated::Pair::Punctuated(p, _) => { path.segments.push_value(p.clone()); path.segments.push_punct(Default::default()); } punctuated::Pair::End(p) => { if let PathArguments::AngleBracketed(arg) = &p.arguments { generics = Some(arg.args.clone()); } ident = Some(p.ident.clone()); } } } let ident = ident.ok_or_else(|| Error::new(proc_macro2::Span::call_site(), "Ident not found!"))?; Ok((path, ident, generics)) } pub fn is_null_pointer_optimizable(ty: &Type, custom_types: &[&'static str]) -> bool { match ty { Type::Reference(_) => true, Type::BareFn(_) => true, Type::Path(path) => { let last = path.path.segments.last(); last.map(|l| { let s = &l.ident.to_string(); ["NonNull", "Box"].contains(&s.as_str()) || custom_types.contains(&s.as_str()) || (s.starts_with("NonZero") && [ "I8", "U8", "I16", "U16", "I32", "U32", "I64", "U64", "I128", "U128", ] .contains(&s.split_at("NonZero".len()).1)) }) == Some(true) } _ => false, } }
use proc_macro2::TokenStream; use proc_macro_crate::{crate_name, FoundCrate}; use quote::{format_ident, quote}; use syn::parse::{Parse, ParseStream}; use syn::punctuated::Punctuated; use syn::token::Colon2; use syn::token::Comma; use syn::*; pub fn crate_path() -> TokenStream { let (col, ident) = crate_path_ident(); quote!(#col #ident) } pub fn crate_path_ident() -> (Option<Colon2>, Ident) { match crate_path_fixed() { Some(FoundCrate::Itself) => (None, format_ident!("crate")), Some(FoundCrate::Name(name)) => (Some(Default::default()), format_ident!("{}", name)), None => (None, format_ident!("cglue")), } } pub fn crate_path_fixed() -> Option<FoundCrate> { let found_crate = crate_name("cglue").ok()?; let ret = match found_crate { FoundCrate::Itself => { let has_doc_env = std::env::vars().any(|(k, _)| { k == "UNSTABLE_RUSTDOC_TEST_LINE" || k == "UNSTABLE_RUSTDOC_TEST_PATH" });
} x => x, }; Some(ret) } pub fn parse_maybe_braced<T: Parse>(input: ParseStream) -> Result<Vec<T>> { let mut ret = vec![]; if let Ok(braces) = syn::group::parse_braces(&input) { let content = braces.content; while !content.is_empty() { let val = content.parse()?; ret.push(val); if !content.is_empty() { content.parse::<Token![,]>()?; } } } else { ret.push(input.parse()?) } Ok(ret) } pub type GenericsOut = Option<Punctuated<GenericArgument, Comma>>; pub fn split_path_ident(in_path: &Path) -> Result<(Path, Ident, GenericsOut)> { let mut path = Path { leading_colon: in_path.leading_colon, segments: Default::default(), }; let mut ident = None; let mut generics = None; for part in in_path.segments.pairs() { match part { punctuated::Pair::Punctuated(p, _) => { path.segments.push_value(p.clone()); path.segments.push_punct(Default::default()); } punctuated::Pair::End(p) => { if let PathArguments::AngleBracketed(arg) = &p.arguments { generics = Some(arg.args.clone()); } ident = Some(p.ident.clone()); } } } let ident = ident.ok_or_else(|| Error::new(proc_macro2::Span::call_site(), "Ident not found!"))?; Ok((path, ident, generics)) } pub fn is_null_pointer_optimizable(ty: &Type, custom_types: &[&'static str]) -> bool { match ty { Type::Reference(_) => true, Type::BareFn(_) => true, Type::Path(path) => { let last = path.path.segments.last(); last.map(|l| { let s = &l.ident.to_string(); ["NonNull", "Box"].contains(&s.as_str()) || custom_types.contains(&s.as_str()) || (s.starts_with("NonZero") && [ "I8", "U8", "I16", "U16", "I32", "U32", "I64", "U64", "I128", "U128", ] .contains(&s.split_at("NonZero".len()).1)) }) == Some(true) } _ => false, } }
if has_doc_env { FoundCrate::Name("cglue".to_string()) } else { FoundCrate::Itself }
if_condition
[ { "content": "pub fn gen_trait(mut tr: ItemTrait, ext_name: Option<&Ident>) -> TokenStream {\n\n // Path to trait group import.\n\n let crate_path = crate::util::crate_path();\n\n let trg_path: TokenStream = quote!(#crate_path::trait_group);\n\n\n\n // Need to preserve the same visibility as the tra...
Rust
hfo2/src/init.rs
Dongjoo-Kim/hafnium-verification
6071eff162148e4d25a0fedaea003addac242ace
/* * Copyright 2019 Sanguk Park * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use core::mem::MaybeUninit; use core::ptr; use crate::addr::*; use crate::arch::*; use crate::boot_flow::*; use crate::boot_params::*; use crate::cpu::*; use crate::hypervisor::*; use crate::load::*; use crate::manifest::*; use crate::memiter::*; use crate::mm::*; use crate::mpool::*; use crate::page::*; use crate::types::*; use crate::vm::*; extern "C" { fn plat_console_init(); fn arch_one_time_init(); fn dlog_enable_lock(); static callstacks: [[u8; STACK_SIZE]; MAX_CPUS]; static boot_cpu: Cpu; } static mut PTABLE_BUF: MaybeUninit<[RawPage; HEAP_PAGES]> = MaybeUninit::uninit(); static mut INITED: bool = false; static mut HYPERVISOR: MaybeUninit<Hypervisor> = MaybeUninit::uninit(); #[no_mangle] unsafe extern "C" fn one_time_init(c: *const Cpu) -> *const Cpu { if &boot_cpu as *const _ != c || INITED { return c; } plat_console_init(); dlog!("Initialising hafnium\n"); arch_one_time_init(); arch_cpu_module_init(); let ppool = MPool::new(); ppool.free_pages(Pages::from_raw( PTABLE_BUF.get_mut().as_mut_ptr(), HEAP_PAGES, )); let mm = MemoryManager::new(&ppool).expect("mm_init failed"); mm.cpu_init(); dlog_enable_lock(); mpool_enable_locks(); static mut MANIFEST: MaybeUninit<Manifest> = MaybeUninit::uninit(); let mut manifest = MANIFEST.get_mut(); let mut params: BootParams = MaybeUninit::uninit().assume_init(); boot_flow_init( &mut mm.hypervisor_ptable.lock(), &mut manifest, &mut params, &ppool, ) .expect("Could not parse data from FDT."); let cpum = CpuManager::new( &params.cpu_ids[..params.cpu_count], boot_cpu.id, &callstacks, ); ptr::write( HYPERVISOR.get_mut(), Hypervisor::new(ppool, mm, cpum, VmManager::new()), ); for i in 0..params.mem_ranges_count { dlog!( "Memory range: {:#x} - {:#x}\n", pa_addr(params.mem_ranges[i].begin), pa_addr(params.mem_ranges[i].end) - 1 ); } dlog!( "Ramdisk range: {:#x} - {:#x}\n", pa_addr(params.initrd_begin), pa_addr(params.initrd_end) - 1 ); let mut hypervisor_ptable = hypervisor().memory_manager.hypervisor_ptable.lock(); hypervisor_ptable .identity_map( params.initrd_begin, params.initrd_end, Mode::R, &hypervisor().mpool, ) .expect("unable to map initrd in"); let initrd = pa_addr(params.initrd_begin) as *mut _; let cpio = MemIter::from_raw( initrd, pa_difference(params.initrd_begin, params.initrd_end), ); let primary_initrd = load_primary( &mut HYPERVISOR.get_mut().vm_manager, &mut hypervisor_ptable, &cpio, params.kernel_arg, &hypervisor().mpool, ) .expect("unable to load primary VM"); let mut update: BootParamsUpdate = BootParamsUpdate::new( pa_from_va(va_from_ptr(primary_initrd.get_next() as usize as *const _)), pa_from_va(va_from_ptr(primary_initrd.get_limit() as usize as *const _)), ); load_secondary( &mut HYPERVISOR.get_mut().vm_manager, &mut hypervisor_ptable, &mut manifest, &cpio, &params, &mut update, &hypervisor().mpool, ) .expect("unable to load secondary VMs"); boot_params_patch_fdt(&mut hypervisor_ptable, &mut update, &hypervisor().mpool) .expect("plat_update_boot_params failed"); hypervisor_ptable.defrag(&hypervisor().mpool); mm_vm_enable_invalidation(); dlog!("Hafnium initialisation completed\n"); INITED = true; hypervisor().cpu_manager.get_boot_cpu() } pub fn hypervisor() -> &'static Hypervisor { unsafe { HYPERVISOR.get_ref() } } #[no_mangle] pub unsafe extern "C" fn cpu_main(c: *const Cpu) -> *const VCpu { if hypervisor().cpu_manager.index_of(c) != 0 { hypervisor().memory_manager.cpu_init(); } let primary = hypervisor().vm_manager.get_primary(); let vcpu = &primary.vcpus[hypervisor().cpu_manager.index_of(c)]; let vcpu_inner = vcpu.inner.get_mut_unchecked(); vcpu_inner.cpu = c; vcpu_inner.regs.reset(true, vcpu.vm(), (*c).id); vcpu }
/* * Copyright 2019 Sanguk Park * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use core::mem::MaybeUninit; use core::ptr; use crate::addr::*; use crate::arch::*; use crate::boot_flow::*; use crate::boot_params::*; use crate::cpu::*; use crate::hypervisor::*; use crate::load::*; use crate::manifest::*; use crate::memiter::*; use crate::mm::*; use crate::mpool::*; use crate::page::*; use crate::types::*; use crate::vm::*; extern "C" { fn plat_console_init(); fn arch_one_time_init(); fn dlog_enable_lock(); static callstacks: [[u8; STACK_SIZE]; MAX_CPUS]; static boot_cpu: Cpu; } static mut PTABLE_BUF: MaybeUninit<[RawPage; HEAP_PAGES]> = MaybeUninit::uninit(); static mut INITED: bool = false; static mut HYPERVISOR: MaybeUninit<Hypervisor> = MaybeUninit::uninit(); #[no_mangle] unsafe extern "C" fn one_time_init(c: *const Cpu) -> *const Cpu { if &boot_cpu as *const _ != c || INITED { return c; } plat_console_init(); dlog!("Initialising hafnium\n"); arch_one_time_init(); arch_cpu_module_init(); let ppool = MPool::new(); ppool.free_pages(Pages::from_raw( PTABLE_BUF.get_mut().as_mut_ptr(), HEAP_PAGES, )); let mm = MemoryManager::new(&ppool).expect("mm_init failed"); mm.cpu_init(); dlog_enable_lock(); mpool_enable_locks(); static mut MANIFEST: MaybeUninit<Manifest> = MaybeUninit::uninit(); let mut manifest = MANIFEST.get_mut(); let mut params: BootParams = MaybeUninit::uninit().assume_init(); boot_flow_init( &mut mm.hypervisor_ptable.lock(), &mut manifest, &mut params, &ppool, ) .expect("Could not parse data from FDT."); let cpum = CpuManager::new( &params.cpu_ids[..params.cpu_count], boot_cpu.id, &callstacks, ); ptr::write( HYPERVISOR.get_mut(), Hypervisor::new(ppool, mm, cpum, VmManager::new()), );
pub fn hypervisor() -> &'static Hypervisor { unsafe { HYPERVISOR.get_ref() } } #[no_mangle] pub unsafe extern "C" fn cpu_main(c: *const Cpu) -> *const VCpu { if hypervisor().cpu_manager.index_of(c) != 0 { hypervisor().memory_manager.cpu_init(); } let primary = hypervisor().vm_manager.get_primary(); let vcpu = &primary.vcpus[hypervisor().cpu_manager.index_of(c)]; let vcpu_inner = vcpu.inner.get_mut_unchecked(); vcpu_inner.cpu = c; vcpu_inner.regs.reset(true, vcpu.vm(), (*c).id); vcpu }
for i in 0..params.mem_ranges_count { dlog!( "Memory range: {:#x} - {:#x}\n", pa_addr(params.mem_ranges[i].begin), pa_addr(params.mem_ranges[i].end) - 1 ); } dlog!( "Ramdisk range: {:#x} - {:#x}\n", pa_addr(params.initrd_begin), pa_addr(params.initrd_end) - 1 ); let mut hypervisor_ptable = hypervisor().memory_manager.hypervisor_ptable.lock(); hypervisor_ptable .identity_map( params.initrd_begin, params.initrd_end, Mode::R, &hypervisor().mpool, ) .expect("unable to map initrd in"); let initrd = pa_addr(params.initrd_begin) as *mut _; let cpio = MemIter::from_raw( initrd, pa_difference(params.initrd_begin, params.initrd_end), ); let primary_initrd = load_primary( &mut HYPERVISOR.get_mut().vm_manager, &mut hypervisor_ptable, &cpio, params.kernel_arg, &hypervisor().mpool, ) .expect("unable to load primary VM"); let mut update: BootParamsUpdate = BootParamsUpdate::new( pa_from_va(va_from_ptr(primary_initrd.get_next() as usize as *const _)), pa_from_va(va_from_ptr(primary_initrd.get_limit() as usize as *const _)), ); load_secondary( &mut HYPERVISOR.get_mut().vm_manager, &mut hypervisor_ptable, &mut manifest, &cpio, &params, &mut update, &hypervisor().mpool, ) .expect("unable to load secondary VMs"); boot_params_patch_fdt(&mut hypervisor_ptable, &mut update, &hypervisor().mpool) .expect("plat_update_boot_params failed"); hypervisor_ptable.defrag(&hypervisor().mpool); mm_vm_enable_invalidation(); dlog!("Hafnium initialisation completed\n"); INITED = true; hypervisor().cpu_manager.get_boot_cpu() }
function_block-function_prefix_line
[ { "content": "/// Helper method for parsing 32/64-bit units from FDT data.\n\npub fn fdt_parse_number(data: &[u8]) -> Option<u64> {\n\n #[repr(C, align(8))]\n\n struct T {\n\n a: [u8; 8],\n\n }\n\n\n\n // FDT values should be aligned to 32-bit boundary.\n\n assert!(is_aligned(data.as_ptr()...
Rust
src/diff.rs
cysp/git2-rs
78759d028e815954bf750279472d890d14104c93
use std::kinds::marker; use std::str; use {raw, StatusEntry, Delta, Oid}; pub struct DiffDelta<'a> { raw: *mut raw::git_diff_delta, marker1: marker::ContravariantLifetime<'a>, marker2: marker::NoSend, marker3: marker::NoSync, } pub struct DiffFile<'a> { raw: *const raw::git_diff_file, marker1: marker::ContravariantLifetime<'a>, marker2: marker::NoSend, marker3: marker::NoSync, } impl<'a> DiffDelta<'a> { pub unsafe fn from_raw(_entry: &'a StatusEntry, raw: *mut raw::git_diff_delta) -> DiffDelta<'a> { DiffDelta { raw: raw, marker1: marker::ContravariantLifetime, marker2: marker::NoSend, marker3: marker::NoSync, } } pub fn nfiles(&self) -> u16 { unsafe { (*self.raw).nfiles } } pub fn status(&self) -> Delta { match unsafe { (*self.raw).status } { raw::GIT_DELTA_UNMODIFIED => Delta::Unmodified, raw::GIT_DELTA_ADDED => Delta::Added, raw::GIT_DELTA_DELETED => Delta::Deleted, raw::GIT_DELTA_MODIFIED => Delta::Modified, raw::GIT_DELTA_RENAMED => Delta::Renamed, raw::GIT_DELTA_COPIED => Delta::Copied, raw::GIT_DELTA_IGNORED => Delta::Ignored, raw::GIT_DELTA_UNTRACKED => Delta::Untracked, raw::GIT_DELTA_TYPECHANGE => Delta::Typechange, raw::GIT_DELTA_UNREADABLE => Delta::Unreadable, } } pub fn old_file(&self) -> DiffFile { unsafe { DiffFile::from_raw(self, &(*self.raw).old_file) } } pub fn new_file(&self) -> DiffFile { unsafe { DiffFile::from_raw(self, &(*self.raw).new_file) } } } impl<'a> DiffFile<'a> { pub unsafe fn from_raw(_entry: &'a DiffDelta, raw: *const raw::git_diff_file) -> DiffFile<'a> { DiffFile { raw: raw, marker1: marker::ContravariantLifetime, marker2: marker::NoSend, marker3: marker::NoSync, } } pub fn id(&self) -> Oid { unsafe { Oid::from_raw(&(*self.raw).id) } } pub fn path_bytes(&self) -> &[u8] { unsafe { ::opt_bytes(self, (*self.raw).path).unwrap() } } pub fn path(&self) -> Option<&str> { str::from_utf8(self.path_bytes()) } pub fn size(&self) -> u64 { unsafe { (*self.raw).size as u64 } } }
use std::kinds::marker; use std::str; use {raw, StatusEntry, Delta, Oid}; pub struct DiffDelta<'a> { raw: *mut raw::git_diff_delta, marker1: marker::ContravariantLifetime<'a>, marker2: marker::NoSend, marker3: marker::NoSync, } pub struct DiffFile<'a> { raw: *const raw::git_diff_file, marker1: marker::ContravariantLifetime<'a>, marker2: marker::NoSend, marker3: marker::NoSync, } impl<'a> DiffDelta<'a> { pub unsafe fn from_raw(_entry: &'a StatusEntry, raw: *mut raw::git_diff_delta) -> DiffDelta<'a> { DiffDelta { raw: raw, marker1: marker::ContravariantLifetime, marker2: marker::NoSend, marker3: marker::NoSync, } } pub fn nfiles(&self) -> u16 { unsafe { (*self.raw).nfiles } } pub fn status(&self) -> Delta {
} pub fn old_file(&self) -> DiffFile { unsafe { DiffFile::from_raw(self, &(*self.raw).old_file) } } pub fn new_file(&self) -> DiffFile { unsafe { DiffFile::from_raw(self, &(*self.raw).new_file) } } } impl<'a> DiffFile<'a> { pub unsafe fn from_raw(_entry: &'a DiffDelta, raw: *const raw::git_diff_file) -> DiffFile<'a> { DiffFile { raw: raw, marker1: marker::ContravariantLifetime, marker2: marker::NoSend, marker3: marker::NoSync, } } pub fn id(&self) -> Oid { unsafe { Oid::from_raw(&(*self.raw).id) } } pub fn path_bytes(&self) -> &[u8] { unsafe { ::opt_bytes(self, (*self.raw).path).unwrap() } } pub fn path(&self) -> Option<&str> { str::from_utf8(self.path_bytes()) } pub fn size(&self) -> u64 { unsafe { (*self.raw).size as u64 } } }
match unsafe { (*self.raw).status } { raw::GIT_DELTA_UNMODIFIED => Delta::Unmodified, raw::GIT_DELTA_ADDED => Delta::Added, raw::GIT_DELTA_DELETED => Delta::Deleted, raw::GIT_DELTA_MODIFIED => Delta::Modified, raw::GIT_DELTA_RENAMED => Delta::Renamed, raw::GIT_DELTA_COPIED => Delta::Copied, raw::GIT_DELTA_IGNORED => Delta::Ignored, raw::GIT_DELTA_UNTRACKED => Delta::Untracked, raw::GIT_DELTA_TYPECHANGE => Delta::Typechange, raw::GIT_DELTA_UNREADABLE => Delta::Unreadable, }
if_condition
[ { "content": "#[cfg(windows)]\n\npub fn openssl_init() {}\n\n\n\nextern {\n\n // threads\n\n pub fn git_libgit2_init() -> c_int;\n\n pub fn git_libgit2_shutdown();\n\n\n\n // repository\n\n pub fn git_repository_free(repo: *mut git_repository);\n\n pub fn git_repository_open(repo: *mut *mut gi...
Rust
src/lib.rs
bugagashenkj/rust-salsa20
07d50a6997af0fe455d0ee505d6f4cb7ed870e01
#![no_std] mod utils; use core::fmt; use crate::utils::{u8_to_u32, xor_from_slice}; fn quarterround(y0: u32, y1: u32, y2: u32, y3: u32) -> [u32; 4] { let y1 = y1 ^ y0.wrapping_add(y3).rotate_left(7); let y2 = y2 ^ y1.wrapping_add(y0).rotate_left(9); let y3 = y3 ^ y2.wrapping_add(y1).rotate_left(13); let y0 = y0 ^ y3.wrapping_add(y2).rotate_left(18); [y0, y1, y2, y3] } fn columnround(y: [u32; 16]) -> [u32; 16] { let [ [z0, z4, z8, z12], [z5, z9, z13, z1], [z10, z14, z2, z6], [z15, z3, z7, z11] ] = [ quarterround(y[0], y[4], y[8], y[12]), quarterround(y[5], y[9], y[13], y[1]), quarterround(y[10], y[14], y[2], y[6]), quarterround(y[15], y[3], y[7], y[11]), ]; [z0, z1, z2, z3, z4, z5, z6, z7, z8, z9, z10, z11, z12, z13, z14, z15] } fn rowround(y: [u32; 16]) -> [u32; 16] { let [ [z0, z1, z2, z3], [z5, z6, z7, z4], [z10, z11, z8, z9], [z15, z12, z13, z14] ] = [ quarterround(y[0], y[1], y[2], y[3]), quarterround(y[5], y[6], y[7], y[4]), quarterround(y[10], y[11], y[8], y[9]), quarterround(y[15], y[12], y[13], y[14]) ]; [z0, z1, z2, z3, z4, z5, z6, z7, z8, z9, z10, z11, z12, z13, z14, z15] } fn doubleround(y: [u32; 16]) -> [u32; 16] { rowround(columnround(y)) } #[derive(Clone, Copy)] struct Overflow { buffer: [u8; 64], offset: usize } impl Overflow { fn new(buffer: [u8; 64], offset: usize) -> Overflow { Overflow { buffer, offset } } fn modify<F>(&mut self, buffer: &mut [u8], modifier: F) where F: Fn(&mut [u8], &[u8]) { let offset = self.offset; self.offset += buffer.len(); modifier(buffer, &self.buffer[offset..self.offset]); } } impl fmt::Debug for Overflow { fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter .debug_struct("Overflow") .field("buffer", &&self.buffer[..]) .field("offset", &self.offset) .finish() } } #[derive(Clone, Copy, Debug)] pub enum Key { Key16([u8; 16]), Key32([u8; 32]) } #[derive(Clone, Copy, Debug)] struct Generator { init_matrix: [u32; 16], cround_matrix: [u32; 16], dround_values: [u32; 4], counter: u64 } impl Generator { fn new(key: Key, nonce: [u8; 8], counter: u64) -> Generator { let mut init_matrix = [0; 16]; init_matrix[0] = 1634760805; init_matrix[15] = 1797285236; init_matrix[8] = counter as u32; init_matrix[9] = (counter >> 32) as u32; u8_to_u32(&nonce[..], &mut init_matrix[6..8]); match key { Key::Key16(key) => { u8_to_u32(&key[..], &mut init_matrix[1..5]); u8_to_u32(&key[..], &mut init_matrix[11..15]); init_matrix[5] = 824206446; init_matrix[10] = 2036477238; } Key::Key32(key) => { u8_to_u32(&key[..16], &mut init_matrix[1..5]); u8_to_u32(&key[16..], &mut init_matrix[11..15]); init_matrix[5] = 857760878; init_matrix[10] = 2036477234; } } let cround_matrix = columnround(init_matrix); let dround_values = quarterround( cround_matrix[5], cround_matrix[6], cround_matrix[7], cround_matrix[4] ); Generator { init_matrix, cround_matrix, dround_values, counter } } fn first_doubleround(&self) -> [u32; 16] { let [r5, r6, r7, r4] = self.dround_values; let [ [r0, r1, r2, r3], [r10, r11, r8, r9], [r15, r12, r13, r14] ] = [ quarterround( self.cround_matrix[0], self.cround_matrix[1], self.cround_matrix[2], self.cround_matrix[3] ), quarterround( self.cround_matrix[10], self.cround_matrix[11], self.cround_matrix[8], self.cround_matrix[9] ), quarterround( self.cround_matrix[15], self.cround_matrix[12], self.cround_matrix[13], self.cround_matrix[14] ) ]; [r0, r1, r2, r3, r4, r5, r6, r7, r8, r9, r10, r11, r12, r13, r14, r15] } fn set_counter(&mut self, counter: u64) { self.counter = counter; self.init_matrix[8] = counter as u32; let [z0, z4, z8, z12] = quarterround( self.init_matrix[0], self.init_matrix[4], self.init_matrix[8], self.init_matrix[12] ); self.cround_matrix[0] = z0; self.cround_matrix[8] = z8; self.cround_matrix[12] = z12; if counter > 0xffffffff_u64 { self.init_matrix[9] = (counter >> 32) as u32; let [z5, z9, z13, z1] = quarterround( self.init_matrix[5], self.init_matrix[9], self.init_matrix[13], self.init_matrix[1] ); self.cround_matrix[1] = z1; self.cround_matrix[9] = z9; self.cround_matrix[13] = z13; self.dround_values = quarterround( z5, self.cround_matrix[6], self.cround_matrix[7], z4 ); } } fn next(&mut self) -> [u8; 64] { let mut buffer = [0; 64]; (0..9) .fold(self.first_doubleround(), |block, _| doubleround(block)) .iter() .zip(self.init_matrix.iter()) .enumerate() .for_each(|(index, (drounds_value, &init_value))| { let offset = index * 4; let sum = drounds_value.wrapping_add(init_value); buffer[offset..offset + 4].copy_from_slice(&sum.to_le_bytes()); }); self.set_counter(self.counter.wrapping_add(1)); buffer } } #[derive(Clone, Copy, Debug)] pub struct Salsa20 { generator: Generator, overflow: Overflow } impl Salsa20 { pub fn new(key: Key, nonce: [u8; 8], counter: u64) -> Salsa20 { let overflow = Overflow::new([0; 64], 64); let generator = Generator::new(key, nonce, counter); Salsa20 { generator, overflow } } fn modify<F>(&mut self, buffer: &mut [u8], modifier: &F) where F: Fn(&mut [u8], &[u8]) { let buffer_len = buffer.len(); let overflow_len = 64 - self.overflow.offset; if overflow_len != 0 { if buffer_len >= overflow_len { self.overflow.modify(&mut buffer[..overflow_len], modifier); } else { self.overflow.modify(&mut buffer[..], modifier); return; } } let last_block_offset = buffer_len - (buffer_len - overflow_len) % 64; for offset in (overflow_len..last_block_offset).step_by(64) { modifier(&mut buffer[offset..offset + 64], &self.generator.next()); } if last_block_offset != buffer_len { self.overflow = Overflow::new(self.generator.next(), 0); self.overflow.modify(&mut buffer[last_block_offset..], modifier); } } pub fn set_counter(&mut self, counter: u64) { if counter != self.generator.counter { self.generator.set_counter(counter); } self.overflow = Overflow::new([0; 64], 64); } pub fn generate(&mut self, buffer: &mut [u8]) { self.modify(buffer, &<[u8]>::copy_from_slice); } pub fn encrypt(&mut self, buffer: &mut [u8]) { self.modify(buffer, &xor_from_slice); } } #[cfg(test)] mod tests { use super::*; #[test] fn quarterround_test() { assert_eq!( quarterround(0x00000000, 0x00000000, 0x00000000, 0x00000000), [0x00000000, 0x00000000, 0x00000000, 0x00000000] ); assert_eq!( quarterround(0xe7e8c006, 0xc4f9417d, 0x6479b4b2, 0x68c67137), [0xe876d72b, 0x9361dfd5, 0xf1460244, 0x948541a3] ); } #[test] fn rowround_test() { test([ 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000000, 0x00000000, 0x00000000 ], [ 0x08008145, 0x00000080, 0x00010200, 0x20500000, 0x20100001, 0x00048044, 0x00000080, 0x00010000, 0x00000001, 0x00002000, 0x80040000, 0x00000000, 0x00000001, 0x00000200, 0x00402000, 0x88000100 ]); test([ 0x08521bd6, 0x1fe88837, 0xbb2aa576, 0x3aa26365, 0xc54c6a5b, 0x2fc74c2f, 0x6dd39cc3, 0xda0a64f6, 0x90a2f23d, 0x067f95a6, 0x06b35f61, 0x41e4732e, 0xe859c100, 0xea4d84b7, 0x0f619bff, 0xbc6e965a ], [ 0xa890d39d, 0x65d71596, 0xe9487daa, 0xc8ca6a86, 0x949d2192, 0x764b7754, 0xe408d9b9, 0x7a41b4d1, 0x3402e183, 0x3c3af432, 0x50669f96, 0xd89ef0a8, 0x0040ede5, 0xb545fbce, 0xd257ed4f, 0x1818882d ]); fn test(input_data: [u32; 16], expected_data: [u32; 16]) { assert_eq!(rowround(input_data), expected_data); } } #[test] fn columnround_test() { test([ 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000000, 0x00000000, 0x00000000 ], [ 0x10090288, 0x00000000, 0x00000000, 0x00000000, 0x00000101, 0x00000000, 0x00000000, 0x00000000, 0x00020401, 0x00000000, 0x00000000, 0x00000000, 0x40a04001, 0x00000000, 0x00000000, 0x00000000 ]); test([ 0x08521bd6, 0x1fe88837, 0xbb2aa576, 0x3aa26365, 0xc54c6a5b, 0x2fc74c2f, 0x6dd39cc3, 0xda0a64f6, 0x90a2f23d, 0x067f95a6, 0x06b35f61, 0x41e4732e, 0xe859c100, 0xea4d84b7, 0x0f619bff, 0xbc6e965a ], [ 0x8c9d190a, 0xce8e4c90, 0x1ef8e9d3, 0x1326a71a, 0x90a20123, 0xead3c4f3, 0x63a091a0, 0xf0708d69, 0x789b010c, 0xd195a681, 0xeb7d5504, 0xa774135c, 0x481c2027, 0x53a8e4b5, 0x4c1f89c5, 0x3f78c9c8 ]); fn test(input_data: [u32; 16], expected_data: [u32; 16]) { assert_eq!(columnround(input_data), expected_data); } } #[test] fn doubleround_test() { test([ 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 ], [ 0x8186a22d, 0x0040a284, 0x82479210, 0x06929051, 0x08000090, 0x02402200, 0x00004000, 0x00800000, 0x00010200, 0x20400000, 0x08008104, 0x00000000, 0x20500000, 0xa0000040, 0x0008180a, 0x612a8020 ]); test([ 0xde501066, 0x6f9eb8f7, 0xe4fbbd9b, 0x454e3f57, 0xb75540d3, 0x43e93a4c, 0x3a6f2aa0, 0x726d6b36, 0x9243f484, 0x9145d1e8, 0x4fa9d247, 0xdc8dee11, 0x054bf545, 0x254dd653, 0xd9421b6d, 0x67b276c1 ], [ 0xccaaf672, 0x23d960f7, 0x9153e63a, 0xcd9a60d0, 0x50440492, 0xf07cad19, 0xae344aa0, 0xdf4cfdfc, 0xca531c29, 0x8e7943db, 0xac1680cd, 0xd503ca00, 0xa74b2ad6, 0xbc331c5c, 0x1dda24c7, 0xee928277 ]); fn test(input_data: [u32; 16], expected_data: [u32; 16]) { assert_eq!(doubleround(input_data), expected_data); } } #[test] fn create_init_matrix_test() { test(Key::Key16([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 ]), [ 101, 120, 112, 97, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 110, 100, 32, 49, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 54, 45, 98, 121, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 116, 101, 32, 107 ]); test(Key::Key32([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216 ]), [ 101, 120, 112, 97, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 110, 100, 32, 51, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 50, 45, 98, 121, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 116, 101, 32, 107 ]); fn test(key: Key, expected_data: [u8; 64]) { let nonce = [101, 102, 103, 104, 105, 106, 107, 108]; let counter = u64::from_le_bytes( [109, 110, 111, 112, 113, 114, 115, 116] ); let generator = Generator::new(key, nonce, counter); let mut expected_data_u32 = [0; 16]; u8_to_u32(&expected_data, &mut expected_data_u32); assert_eq!(generator.init_matrix, expected_data_u32); } } #[test] fn first_doubleround_test() { test(0x00000000, [0x00000000, 0x00000000]); test(0x00000001, [0x00000001, 0x00000000]); test(0x1234567f, [0x1234567f, 0x00000000]); test(0xffffffff, [0xffffffff, 0x00000000]); test(0x100000000, [0x00000000, 0x00000001]); test(0x012345678abcdef, [0x78abcdef, 0x123456]); fn test(counter: u64, counter_as_u32: [u32; 2]) { let key = Key::Key16([0; 16]); let mut generator = Generator::new(key, [0; 8], 0); generator.set_counter(counter); assert_eq!(generator.init_matrix[8..10], counter_as_u32); assert_eq!( generator.first_doubleround(), doubleround(generator.init_matrix) ); }; } #[test] fn generate_test() { test(Key::Key16([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 ]), [ 39, 173, 46, 248, 30, 200, 82, 17, 48, 67, 254, 239, 37, 18, 13, 247, 241, 200, 61, 144, 10, 55, 50, 185, 6, 47, 246, 253, 143, 86, 187, 225, 134, 85, 110, 246, 161, 163, 43, 235, 231, 94, 171, 51, 145, 214, 112, 29, 14, 232, 5, 16, 151, 140, 183, 141, 171, 9, 122, 181, 104, 182, 177, 193 ]); test(Key::Key32([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216 ]), [ 69, 37, 68, 39, 41, 15, 107, 193, 255, 139, 122, 6, 170, 233, 217, 98, 89, 144, 182, 106, 21, 51, 200, 65, 239, 49, 222, 34, 215, 114, 40, 126, 104, 197, 7, 225, 197, 153, 31, 2, 102, 78, 76, 176, 84, 245, 246, 184, 177, 160, 133, 130, 6, 72, 149, 119, 192, 195, 132, 236, 234, 103, 246, 74 ]); fn test(key: Key, expected_data: [u8; 64]) { let nonce = [101, 102, 103, 104, 105, 106, 107, 108]; let counter = u64::from_le_bytes( [109, 110, 111, 112, 113, 114, 115, 116] ); let mut generator = Generator::new(key, nonce, counter); let buffer = generator.next(); assert_eq!(buffer.to_vec(), expected_data.to_vec()); } } }
#![no_std] mod utils; use core::fmt; use crate::utils::{u8_to_u32, xor_from_slice}; fn quarterround(y0: u32, y1: u32, y2: u32, y3: u32) -> [u32; 4] { let y1 = y1 ^ y0.wrapping_add(y3).rotate_left(7); let y2 = y2 ^ y1.wrapping_add(y0).rotate_left(9); let y3 = y3 ^ y2.wrapping_add(y1).rotate_left(13); let y0 = y0 ^ y3.wrapping_add(y2).rotate_left(18); [y0, y1, y2, y3] } fn columnround(y: [u32; 16]) -> [u32; 16] { let [ [z0, z4, z8, z12], [z5, z9, z13, z1], [z10, z14, z2, z6], [z15, z3, z7, z11] ] = [ quarterround(y[0], y[4], y[8], y[12]), quarterround(y[5], y[9], y[13], y[1]), quarterround(y[10], y[14], y[2], y[6]), quarterround(y[15], y[3], y[7], y[11]), ]; [z0, z1, z2, z3, z4, z5, z6, z7, z8, z9, z10, z11, z12, z13, z14, z15] } fn rowround(y: [u32; 16]) -> [u32; 16] { let [ [z0, z1, z2, z3], [z5, z6, z7, z4], [z10, z11, z8, z9], [z15, z12, z13, z14] ] = [ quarterround(y[0], y[1], y[2], y[3]), quarterround(y[5], y[6], y[7], y[4]), quarterround(y[10], y[11], y[8], y[9]), quarterround(y[15], y[12], y[13], y[14]) ]; [z0, z1, z2, z3, z4, z5, z6, z7, z8, z9, z10, z11, z12, z13, z14, z15] } fn doubleround(y: [u32; 16]) -> [u32; 16] { rowround(columnround(y)) } #[derive(Clone, Copy)] struct Overflow { buffer: [u8; 64], offset: usize } impl Overflow { fn new(buffer: [u8; 64], offset: usize) -> Overflow { Overflow { buffer, offset } } fn modify<F>(&mut self, buffer: &mut [u8], modifier: F) where F: Fn(&mut [u8], &[u8]) { let offset = self.offset; self.offset += buffer.len(); modifier(buffer, &self.buffer[offset..self.offset]); } } impl fmt::Debug for Overflow { fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter .debug_struct("Overflow") .field("buffer", &&self.buffer[..]) .field("offset", &self.offset) .finish() } } #[derive(Clone, Copy, Debug)] pub enum Key { Key16([u8; 16]), Key32([u8; 32]) } #[derive(Clone, Copy, Debug)] struct Generator { init_matrix: [u32; 16], cround_matrix: [u32; 16], dround_values: [u32; 4], counter: u64 } impl Generator { fn new(key: Key, nonce: [u8; 8], counter: u64) -> Generator { let mut init_matrix = [0; 16]; init_matrix[0] = 1634760805; init_matrix[15] = 1797285236; init_matrix[8] = counter as u32; init_matrix[9] = (counter >> 32) as u32; u8_to_u32(&nonce[..], &mut init_matrix[6..8]); match key { Key::Key16(key) => { u8_to_u32(&key[..], &mut init_matrix[1..5]); u8_to_u32(&key[..], &mut init_matrix[11..15]); init_matrix[5] = 824206446; init_matrix[10] = 2036477238; } Key::Key32(key) => { u8_to_u32(&key[..16], &mut init_matrix[1..5]); u8_to_u32(&key[16..], &mut init_matrix[11..15]); init_matrix[5] = 857760878; init_matrix[10] = 2036477234; } } let cround_matrix = columnround(init_matrix); let dround_values = quarterround( cround_matrix[5], cround_matrix[6], cround_matrix[7], cround_matrix[4] ); Generator { init_matrix, cround_matrix, dround_values, counter } } fn first_doubleround(&self) -> [u32; 16] { let [r5, r6, r7, r4] = self.dround_values; let [ [r0, r1, r2, r3], [r10, r11, r8, r9], [r15, r12, r13, r14] ] = [ quarterround( self.cround_matrix[0], self.cround_matrix[1], self.cround_matrix[2], self.cround_matrix[3] ), quarterround( self.cround_matrix[10], self.cround_matrix[11], self.cround_matrix[8], self.cround_matrix[9] ), quarterround( self.cround_matrix[15], self.cround_matrix[12], self.cround_matrix[13], self.cround_matrix[14] ) ]; [r0, r1, r2, r3, r4, r5, r6, r7, r8, r9, r10, r11, r12, r13, r14, r15] } fn set_counter(&mut self, counter: u64) { self.counter = counter; self.init_matrix[8] = counter as u32; let [z0, z4, z8, z12] = quarterround( self.init_matrix[0], self.init_matrix[4], self.init_matrix[8], self.init_matrix[12] ); self.cround_matrix[0] = z0; self.cround_matrix[8] = z8; self.cround_matrix[12] = z12; if counter > 0xffffffff_u64 { self.init_matrix[9] = (counter >> 32) as u32; let [z5, z9, z13, z1] = quarterround( self.init_matrix[5], self.init_matrix[9], self.init_matrix[13], self.init_matrix[1] ); self.cround_matrix[1] = z1; self.cround_matrix[9] = z9; self.cround_matrix[13] = z13; self.dround_values = quarterround( z5, self.cround_matrix[6], self.cround_matrix[7], z4 ); } } fn next(&mut self) -> [u8; 64] { let mut buffer = [0; 64]; (0..9) .fold(self.first_doubleround(), |block, _| doubleround(block)) .iter() .zip(self.init_matrix.iter()) .enumerate() .for_each(|(index, (drounds_value, &init_value))| { let offset = index * 4; let sum = drounds_value.wrapping_add(init_value); buffer[offset..offset + 4].copy_from_slice(&sum.to_le_bytes()); }); self.set_counter(self.counter.wrapping_add(1)); buffer } } #[derive(Clone, Copy, Debug)] pub struct Salsa20 { generator: Generator, overflow: Overflow } impl Salsa20 { pub fn new(key: Key, nonce: [u8; 8], counter: u64) -> Salsa20 { let overflow = Overflow::new([0; 64], 64); let generator = Generator::new(key, nonce, counter); Salsa20 { generator, overflow } } fn modify<F>(&mut self, buffer: &mut [u8], modifier: &F) where F: Fn(&mut [u8], &[u8]) { let buffer_len = buffer.len(); let overflow_len = 64 - self.overflow.offset; if overflow_len != 0 { if buffer_len >= overflow_len { self.overflow.modify(&mut buffer[..overflow_len], modifier); } else { self.overflow.modify(&mut buffer[..], modifier); return; } } let last_block_offset = buffer_len - (buffer_len - overflow_len) % 64; for offset in (overflow_len..last_block_offset).step_by(64) { modifier(&mut buffer[offset..offset + 64], &self.generator.next()); } if last_block_offset != buffer_len { self.overflow = Overflow::new(self.generator.next(), 0); self.overflow.modify(&mut buffer[last_block_offset..], modifier); } } pub fn set_counter(&mut self, counter: u64) { if counter != self.generator.counter { self.generator.set_counter(counter); } self.overflow = Overflow::new([0; 64], 64); } pub fn generate(&mut self, buffer: &mut [u8]) { self.modify(buffer, &<[u8]>::copy_from_slice); } pub fn encrypt(&mut self, buffer: &mut [u8]) { self.modify(buffer, &xor_from_slice); } } #[cfg(test)] mod tests { use super::*; #[test] fn quarterround_test() { assert_eq!( quarterround(0x00000000, 0x00000000, 0x00000000, 0x00000000), [0x00000000, 0x00000000, 0x00000000, 0x00000000] ); assert_eq!( quarterround(0xe7e8c006, 0xc4f9417d, 0x6479b4b2, 0x68c67137), [0xe876d72b, 0x9361dfd5, 0xf1460244, 0x948541a3] ); } #[test] fn rowround_test() { test([ 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000000, 0x00000000, 0x00000000 ], [ 0x08008145, 0x00000080, 0x00010200, 0x20500000, 0x20100001, 0x00048044, 0x00000080, 0x00010000, 0x00000001, 0x00002000, 0x80040000, 0x00000000, 0x00000001, 0x00000200, 0x00402000, 0x88000100 ]); test([ 0x08521bd6, 0x1fe88837, 0xbb2aa576, 0x3aa26365, 0xc54c6a5b, 0x2fc74c2f, 0x6dd39cc3, 0xda0a64f6, 0x90a2f23d, 0x067f95a6, 0x06b35f61, 0x41e4732e, 0xe859c100, 0xea4d84b7, 0x0f619bff, 0xbc6e965a ], [ 0xa890d39d, 0x65d71596, 0xe9487daa, 0xc8ca6a86, 0x949d2192, 0x764b7754, 0xe408d9b9, 0x7a41b4d1, 0x3402e183, 0x3c3af432, 0x50669f96, 0xd89ef0a8, 0x0040ede5, 0xb545fbce, 0xd257ed4f, 0x1818882d ]); fn test(input_data: [u32; 16], expected_data: [u32; 16]) { assert_eq!(rowround(input_data), expected_data); } } #[test] fn columnround_test() { test([ 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000000, 0x00000000, 0x00000000 ], [ 0x10090288, 0x00000000, 0x00000000, 0x00000000, 0x00000101, 0x00000000, 0x00000000, 0x00000000, 0x00020401, 0x00000000, 0x00000000, 0x00000000, 0x40a04001, 0x00000000, 0x0000000
#[test] fn doubleround_test() { test([ 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 ], [ 0x8186a22d, 0x0040a284, 0x82479210, 0x06929051, 0x08000090, 0x02402200, 0x00004000, 0x00800000, 0x00010200, 0x20400000, 0x08008104, 0x00000000, 0x20500000, 0xa0000040, 0x0008180a, 0x612a8020 ]); test([ 0xde501066, 0x6f9eb8f7, 0xe4fbbd9b, 0x454e3f57, 0xb75540d3, 0x43e93a4c, 0x3a6f2aa0, 0x726d6b36, 0x9243f484, 0x9145d1e8, 0x4fa9d247, 0xdc8dee11, 0x054bf545, 0x254dd653, 0xd9421b6d, 0x67b276c1 ], [ 0xccaaf672, 0x23d960f7, 0x9153e63a, 0xcd9a60d0, 0x50440492, 0xf07cad19, 0xae344aa0, 0xdf4cfdfc, 0xca531c29, 0x8e7943db, 0xac1680cd, 0xd503ca00, 0xa74b2ad6, 0xbc331c5c, 0x1dda24c7, 0xee928277 ]); fn test(input_data: [u32; 16], expected_data: [u32; 16]) { assert_eq!(doubleround(input_data), expected_data); } } #[test] fn create_init_matrix_test() { test(Key::Key16([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 ]), [ 101, 120, 112, 97, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 110, 100, 32, 49, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 54, 45, 98, 121, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 116, 101, 32, 107 ]); test(Key::Key32([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216 ]), [ 101, 120, 112, 97, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 110, 100, 32, 51, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 50, 45, 98, 121, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 116, 101, 32, 107 ]); fn test(key: Key, expected_data: [u8; 64]) { let nonce = [101, 102, 103, 104, 105, 106, 107, 108]; let counter = u64::from_le_bytes( [109, 110, 111, 112, 113, 114, 115, 116] ); let generator = Generator::new(key, nonce, counter); let mut expected_data_u32 = [0; 16]; u8_to_u32(&expected_data, &mut expected_data_u32); assert_eq!(generator.init_matrix, expected_data_u32); } } #[test] fn first_doubleround_test() { test(0x00000000, [0x00000000, 0x00000000]); test(0x00000001, [0x00000001, 0x00000000]); test(0x1234567f, [0x1234567f, 0x00000000]); test(0xffffffff, [0xffffffff, 0x00000000]); test(0x100000000, [0x00000000, 0x00000001]); test(0x012345678abcdef, [0x78abcdef, 0x123456]); fn test(counter: u64, counter_as_u32: [u32; 2]) { let key = Key::Key16([0; 16]); let mut generator = Generator::new(key, [0; 8], 0); generator.set_counter(counter); assert_eq!(generator.init_matrix[8..10], counter_as_u32); assert_eq!( generator.first_doubleround(), doubleround(generator.init_matrix) ); }; } #[test] fn generate_test() { test(Key::Key16([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 ]), [ 39, 173, 46, 248, 30, 200, 82, 17, 48, 67, 254, 239, 37, 18, 13, 247, 241, 200, 61, 144, 10, 55, 50, 185, 6, 47, 246, 253, 143, 86, 187, 225, 134, 85, 110, 246, 161, 163, 43, 235, 231, 94, 171, 51, 145, 214, 112, 29, 14, 232, 5, 16, 151, 140, 183, 141, 171, 9, 122, 181, 104, 182, 177, 193 ]); test(Key::Key32([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216 ]), [ 69, 37, 68, 39, 41, 15, 107, 193, 255, 139, 122, 6, 170, 233, 217, 98, 89, 144, 182, 106, 21, 51, 200, 65, 239, 49, 222, 34, 215, 114, 40, 126, 104, 197, 7, 225, 197, 153, 31, 2, 102, 78, 76, 176, 84, 245, 246, 184, 177, 160, 133, 130, 6, 72, 149, 119, 192, 195, 132, 236, 234, 103, 246, 74 ]); fn test(key: Key, expected_data: [u8; 64]) { let nonce = [101, 102, 103, 104, 105, 106, 107, 108]; let counter = u64::from_le_bytes( [109, 110, 111, 112, 113, 114, 115, 116] ); let mut generator = Generator::new(key, nonce, counter); let buffer = generator.next(); assert_eq!(buffer.to_vec(), expected_data.to_vec()); } } }
0, 0x00000000 ]); test([ 0x08521bd6, 0x1fe88837, 0xbb2aa576, 0x3aa26365, 0xc54c6a5b, 0x2fc74c2f, 0x6dd39cc3, 0xda0a64f6, 0x90a2f23d, 0x067f95a6, 0x06b35f61, 0x41e4732e, 0xe859c100, 0xea4d84b7, 0x0f619bff, 0xbc6e965a ], [ 0x8c9d190a, 0xce8e4c90, 0x1ef8e9d3, 0x1326a71a, 0x90a20123, 0xead3c4f3, 0x63a091a0, 0xf0708d69, 0x789b010c, 0xd195a681, 0xeb7d5504, 0xa774135c, 0x481c2027, 0x53a8e4b5, 0x4c1f89c5, 0x3f78c9c8 ]); fn test(input_data: [u32; 16], expected_data: [u32; 16]) { assert_eq!(columnround(input_data), expected_data); } }
function_block-function_prefixed
[ { "content": "#[test]\n\nfn generate_test() {\n\n test(\n\n 0x00000fff,\n\n vec![\n\n 134, 70, 88, 94, 194, 104, 117, 219, 33, 198, 116, 85, 32, 52, 54,\n\n 214, 98, 231, 58, 191, 69, 243, 75, 142, 233, 245, 119, 223, 113,\n\n 31, 50, 172, 218, 9, 93, 192, 217, ...
Rust
querier/src/namespace/mod.rs
r4ntix/influxdb_iox
5ff874925101e2afbafa6853385260a2ba044394
use crate::{ cache::CatalogCache, chunk::ChunkAdapter, ingester::IngesterConnection, query_log::QueryLog, table::QuerierTable, }; use data_types::{NamespaceId, NamespaceSchema}; use iox_query::exec::Executor; use parquet_file::storage::ParquetStorage; use schema::Schema; use std::{collections::HashMap, sync::Arc}; mod query_access; #[cfg(test)] mod test_util; #[derive(Debug)] pub struct QuerierNamespace { id: NamespaceId, name: Arc<str>, tables: Arc<HashMap<Arc<str>, Arc<QuerierTable>>>, exec: Arc<Executor>, catalog_cache: Arc<CatalogCache>, query_log: Arc<QueryLog>, } impl QuerierNamespace { pub fn new( chunk_adapter: Arc<ChunkAdapter>, schema: Arc<NamespaceSchema>, name: Arc<str>, exec: Arc<Executor>, ingester_connection: Arc<dyn IngesterConnection>, query_log: Arc<QueryLog>, ) -> Self { let tables: HashMap<_, _> = schema .tables .iter() .map(|(table_name, table_schema)| { let table_name = Arc::from(table_name.clone()); let id = table_schema.id; let schema = Schema::try_from(table_schema.clone()).expect("cannot build schema"); let table = Arc::new(QuerierTable::new( Arc::clone(&name), id, Arc::clone(&table_name), Arc::new(schema), Arc::clone(&ingester_connection), Arc::clone(&chunk_adapter), )); (table_name, table) }) .collect(); let id = schema.id; Self { id, name, tables: Arc::new(tables), exec, catalog_cache: Arc::clone(chunk_adapter.catalog_cache()), query_log, } } #[allow(clippy::too_many_arguments)] pub fn new_testing( catalog_cache: Arc<CatalogCache>, store: ParquetStorage, metric_registry: Arc<metric::Registry>, name: Arc<str>, schema: Arc<NamespaceSchema>, exec: Arc<Executor>, ingester_connection: Arc<dyn IngesterConnection>, ) -> Self { let time_provider = catalog_cache.time_provider(); let chunk_adapter = Arc::new(ChunkAdapter::new( catalog_cache, store, metric_registry, Arc::clone(&time_provider), )); let query_log = Arc::new(QueryLog::new(10, time_provider)); Self::new( chunk_adapter, schema, name, exec, ingester_connection, query_log, ) } pub fn name(&self) -> Arc<str> { Arc::clone(&self.name) } #[must_use] pub fn catalog_cache(&self) -> &Arc<CatalogCache> { &self.catalog_cache } } #[cfg(test)] mod tests { use super::*; use crate::namespace::test_util::querier_namespace; use data_types::ColumnType; use iox_tests::util::TestCatalog; use schema::{builder::SchemaBuilder, InfluxColumnType, InfluxFieldType}; #[tokio::test] async fn test_sync_tables() { let catalog = TestCatalog::new(); let ns = catalog.create_namespace("ns").await; let qns = querier_namespace(&ns).await; assert_eq!(tables(&qns), Vec::<String>::new()); ns.create_table("table1").await; ns.create_table("table2").await; let qns = querier_namespace(&ns).await; assert_eq!( tables(&qns), vec![String::from("table1"), String::from("table2")] ); ns.create_table("table3").await; let qns = querier_namespace(&ns).await; assert_eq!( tables(&qns), vec![ String::from("table1"), String::from("table2"), String::from("table3") ] ); } #[tokio::test] async fn test_sync_schemas() { let catalog = TestCatalog::new(); let ns = catalog.create_namespace("ns").await; let table = ns.create_table("table").await; let qns = querier_namespace(&ns).await; let expected_schema = SchemaBuilder::new().build().unwrap(); let actual_schema = schema(&qns, "table"); assert_eq!(actual_schema.as_ref(), &expected_schema,); table.create_column("col1", ColumnType::I64).await; table.create_column("col2", ColumnType::Bool).await; table.create_column("col3", ColumnType::Tag).await; let qns = querier_namespace(&ns).await; let expected_schema = SchemaBuilder::new() .influx_column("col1", InfluxColumnType::Field(InfluxFieldType::Integer)) .influx_column("col2", InfluxColumnType::Field(InfluxFieldType::Boolean)) .influx_column("col3", InfluxColumnType::Tag) .build() .unwrap(); let actual_schema = schema(&qns, "table"); assert_eq!(actual_schema.as_ref(), &expected_schema,); table.create_column("col4", ColumnType::Tag).await; table.create_column("col5", ColumnType::Time).await; let qns = querier_namespace(&ns).await; let expected_schema = SchemaBuilder::new() .influx_column("col1", InfluxColumnType::Field(InfluxFieldType::Integer)) .influx_column("col2", InfluxColumnType::Field(InfluxFieldType::Boolean)) .influx_column("col3", InfluxColumnType::Tag) .influx_column("col4", InfluxColumnType::Tag) .influx_column("col5", InfluxColumnType::Timestamp) .build() .unwrap(); let actual_schema = schema(&qns, "table"); assert_eq!(actual_schema.as_ref(), &expected_schema,); } fn sorted<T>(mut v: Vec<T>) -> Vec<T> where T: Ord, { v.sort(); v } fn tables(querier_namespace: &QuerierNamespace) -> Vec<String> { sorted( querier_namespace .tables .keys() .map(|s| s.to_string()) .collect(), ) } fn schema(querier_namespace: &QuerierNamespace, table: &str) -> Arc<Schema> { Arc::clone(querier_namespace.tables.get(table).unwrap().schema()) } }
use crate::{ cache::CatalogCache, chunk::ChunkAdapter, ingester::IngesterConnection, query_log::QueryLog, table::QuerierTable, }; use data_types::{Nam
await; let expected_schema = SchemaBuilder::new().build().unwrap(); let actual_schema = schema(&qns, "table"); assert_eq!(actual_schema.as_ref(), &expected_schema,); table.create_column("col1", ColumnType::I64).await; table.create_column("col2", ColumnType::Bool).await; table.create_column("col3", ColumnType::Tag).await; let qns = querier_namespace(&ns).await; let expected_schema = SchemaBuilder::new() .influx_column("col1", InfluxColumnType::Field(InfluxFieldType::Integer)) .influx_column("col2", InfluxColumnType::Field(InfluxFieldType::Boolean)) .influx_column("col3", InfluxColumnType::Tag) .build() .unwrap(); let actual_schema = schema(&qns, "table"); assert_eq!(actual_schema.as_ref(), &expected_schema,); table.create_column("col4", ColumnType::Tag).await; table.create_column("col5", ColumnType::Time).await; let qns = querier_namespace(&ns).await; let expected_schema = SchemaBuilder::new() .influx_column("col1", InfluxColumnType::Field(InfluxFieldType::Integer)) .influx_column("col2", InfluxColumnType::Field(InfluxFieldType::Boolean)) .influx_column("col3", InfluxColumnType::Tag) .influx_column("col4", InfluxColumnType::Tag) .influx_column("col5", InfluxColumnType::Timestamp) .build() .unwrap(); let actual_schema = schema(&qns, "table"); assert_eq!(actual_schema.as_ref(), &expected_schema,); } fn sorted<T>(mut v: Vec<T>) -> Vec<T> where T: Ord, { v.sort(); v } fn tables(querier_namespace: &QuerierNamespace) -> Vec<String> { sorted( querier_namespace .tables .keys() .map(|s| s.to_string()) .collect(), ) } fn schema(querier_namespace: &QuerierNamespace, table: &str) -> Arc<Schema> { Arc::clone(querier_namespace.tables.get(table).unwrap().schema()) } }
espaceId, NamespaceSchema}; use iox_query::exec::Executor; use parquet_file::storage::ParquetStorage; use schema::Schema; use std::{collections::HashMap, sync::Arc}; mod query_access; #[cfg(test)] mod test_util; #[derive(Debug)] pub struct QuerierNamespace { id: NamespaceId, name: Arc<str>, tables: Arc<HashMap<Arc<str>, Arc<QuerierTable>>>, exec: Arc<Executor>, catalog_cache: Arc<CatalogCache>, query_log: Arc<QueryLog>, } impl QuerierNamespace { pub fn new( chunk_adapter: Arc<ChunkAdapter>, schema: Arc<NamespaceSchema>, name: Arc<str>, exec: Arc<Executor>, ingester_connection: Arc<dyn IngesterConnection>, query_log: Arc<QueryLog>, ) -> Self { let tables: HashMap<_, _> = schema .tables .iter() .map(|(table_name, table_schema)| { let table_name = Arc::from(table_name.clone()); let id = table_schema.id; let schema = Schema::try_from(table_schema.clone()).expect("cannot build schema"); let table = Arc::new(QuerierTable::new( Arc::clone(&name), id, Arc::clone(&table_name), Arc::new(schema), Arc::clone(&ingester_connection), Arc::clone(&chunk_adapter), )); (table_name, table) }) .collect(); let id = schema.id; Self { id, name, tables: Arc::new(tables), exec, catalog_cache: Arc::clone(chunk_adapter.catalog_cache()), query_log, } } #[allow(clippy::too_many_arguments)] pub fn new_testing( catalog_cache: Arc<CatalogCache>, store: ParquetStorage, metric_registry: Arc<metric::Registry>, name: Arc<str>, schema: Arc<NamespaceSchema>, exec: Arc<Executor>, ingester_connection: Arc<dyn IngesterConnection>, ) -> Self { let time_provider = catalog_cache.time_provider(); let chunk_adapter = Arc::new(ChunkAdapter::new( catalog_cache, store, metric_registry, Arc::clone(&time_provider), )); let query_log = Arc::new(QueryLog::new(10, time_provider)); Self::new( chunk_adapter, schema, name, exec, ingester_connection, query_log, ) } pub fn name(&self) -> Arc<str> { Arc::clone(&self.name) } #[must_use] pub fn catalog_cache(&self) -> &Arc<CatalogCache> { &self.catalog_cache } } #[cfg(test)] mod tests { use super::*; use crate::namespace::test_util::querier_namespace; use data_types::ColumnType; use iox_tests::util::TestCatalog; use schema::{builder::SchemaBuilder, InfluxColumnType, InfluxFieldType}; #[tokio::test] async fn test_sync_tables() { let catalog = TestCatalog::new(); let ns = catalog.create_namespace("ns").await; let qns = querier_namespace(&ns).await; assert_eq!(tables(&qns), Vec::<String>::new()); ns.create_table("table1").await; ns.create_table("table2").await; let qns = querier_namespace(&ns).await; assert_eq!( tables(&qns), vec![String::from("table1"), String::from("table2")] ); ns.create_table("table3").await; let qns = querier_namespace(&ns).await; assert_eq!( tables(&qns), vec![ String::from("table1"), String::from("table2"), String::from("table3") ] ); } #[tokio::test] async fn test_sync_schemas() { let catalog = TestCatalog::new(); let ns = catalog.create_namespace("ns").await; let table = ns.create_table("table").await; let qns = querier_namespace(&ns).
random
[ { "content": "//! Code for defining values and tag sets with tags that are dependent on other tags.\n\n\n\nuse crate::now_ns;\n\nuse crate::specification::{DataSpec, ValuesSpec};\n\nuse crate::substitution::new_handlebars_registry;\n\nuse crate::tag_pair::StaticTagPair;\n\nuse handlebars::Handlebars;\n\nuse ite...
Rust
ruzzt_engine/src/tests/world_tester.rs
yokljo/ruzzt
e76b28d5f78e8f51ba78e557c82f5f172523a1c5
pub use crate::engine::RuzztEngine; pub use crate::event::Event; pub use crate::board_simulator::*; pub use zzt_file_format::*; pub use zzt_file_format::dosstring::*; use std::collections::HashMap; #[derive(Clone)] pub struct TestWorld { pub engine: RuzztEngine, pub event: Event, } impl TestWorld { pub fn new() -> TestWorld { let mut cursor = std::io::Cursor::new(include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/src/tests/data/DEFAULT.ZZT")).to_vec()); let mut world = World::parse(&mut cursor).unwrap(); world.boards[1].status_elements.clear(); world.boards[1].tiles[29 + 11*BOARD_WIDTH] = BoardTile::new(ElementType::Empty, 0); let mut engine = RuzztEngine::new(); engine.load_world(world, None); engine.set_in_title_screen(false); engine.is_paused = false; TestWorld { engine, event: Event::None, } } pub fn new_with_player(x: i16, y: i16) -> TestWorld { let mut test_world = TestWorld::new(); test_world.add_player(x, y); test_world } pub fn add_player(&mut self, x: i16, y: i16) { let mut tile_set = TileSet::new(); tile_set.add('&', BoardTile::new(ElementType::Player, 0x1f), Some(StatusElement { cycle: 1, .. StatusElement::default() })); let player_template = TileTemplate::from_text(&tile_set, "&"); self.insert_template(&player_template, x, y); } pub fn insert_tile_and_status(&mut self, tile_and_status: &TileAndStatus, x: i16, y: i16) { self.engine.board_simulator.set_tile(x, y, tile_and_status.tile); if let Some(ref status) = tile_and_status.status { let mut new_status = status.clone(); new_status.location_x = x as u8; new_status.location_y = y as u8; self.engine.board_simulator.status_elements.push(new_status); } } pub fn insert_template(&mut self, template: &TileTemplate, left_x: i16, top_y: i16) { let mut it = template.tiles.iter(); for y in 0 .. template.height as i16 { for x in 0 .. template.width as i16 { if let Some(tile_and_status) = it.next().as_mut().unwrap() { self.engine.board_simulator.set_tile(left_x + x, top_y + y, tile_and_status.tile); if let Some(ref status) = tile_and_status.status { let mut new_status = status.clone(); new_status.location_x = (left_x + x) as u8; new_status.location_y = (top_y + y) as u8; self.engine.board_simulator.status_elements.push(new_status); } } } } } pub fn simulate(&mut self, step_count: usize) { for _ in 0 .. step_count { self.engine.step(self.event, 0.); self.event = Event::None; } } pub fn current_board_equals(&self, expected_world: TestWorld) -> bool { let mut result = true; let selfsim = &self.engine.board_simulator; let othersim = &expected_world.engine.board_simulator; if selfsim.world_header != othersim.world_header { println!("World headers differ"); println!("Actual: {:?}", selfsim.world_header); println!("Expected: {:?}", othersim.world_header); result = false; } if selfsim.board_meta_data != othersim.board_meta_data { println!("Board meta data differs"); println!("Actual: {:?}", selfsim.board_meta_data); println!("Expected: {:?}", othersim.board_meta_data); result = false; } if selfsim.status_elements != othersim.status_elements { println!("Status elements differ"); println!("Actual: {:?}", selfsim.status_elements); println!("Expected: {:?}", othersim.status_elements); result = false; } result = result && self.current_board_tiles_equals(expected_world); result } pub fn current_board_tiles_equals(&self, expected_world: TestWorld) -> bool { let selfsim = &self.engine.board_simulator; let othersim = &expected_world.engine.board_simulator; if selfsim.tiles != othersim.tiles { let mut min_diff_x = BOARD_WIDTH as i16; let mut min_diff_y = BOARD_HEIGHT as i16; let mut max_diff_x = 0; let mut max_diff_y = 0; for x in 0 .. BOARD_WIDTH as i16 { for y in 0 .. BOARD_HEIGHT as i16 { let selftile = selfsim.get_tile(x, y).unwrap(); let othertile = othersim.get_tile(x, y).unwrap(); if selftile != othertile { max_diff_x = max_diff_x.max(x); max_diff_y = max_diff_y.max(y); min_diff_x = min_diff_x.min(x); min_diff_y = min_diff_y.min(y); } } } println!("Board differ from ({}, {}) to ({}, {}). Top lines are self, bottom lines are expected", min_diff_x, min_diff_y, max_diff_x, max_diff_y); for y in min_diff_y ..= max_diff_y { let mut self_line = "".to_string(); let mut other_line = "".to_string(); for x in min_diff_x ..= max_diff_x { let selftile = selfsim.get_tile(x, y).unwrap(); let othertile = othersim.get_tile(x, y).unwrap(); if selftile != othertile { self_line += &format!("{:02x},{:02x} ", selftile.element_id, selftile.colour); other_line += &format!("{:02x},{:02x} ", othertile.element_id, othertile.colour); } else { self_line += "==,== "; other_line += "==,== "; } } println!("{}", self_line); println!("{}", other_line); println!(""); } false } else { true } } pub fn status_at(&mut self, x: i16, y: i16) -> &mut StatusElement { self.engine.board_simulator.get_first_status_for_pos_mut(x, y).unwrap().1 } pub fn world_header(&self) -> &WorldHeader { &self.engine.board_simulator.world_header } } #[derive(Debug, Clone)] pub struct TileAndStatus { pub tile: BoardTile, pub status: Option<StatusElement>, } pub struct TileSet { tile_map: HashMap<char, TileAndStatus>, } impl TileSet { pub fn new() -> TileSet { TileSet { tile_map: HashMap::new(), } } pub fn add(&mut self, c: char, tile: BoardTile, status: Option<StatusElement>) { self.tile_map.insert(c, TileAndStatus { tile, status }); } pub fn add_object(&mut self, c: char, code: &str) { self.add(c, BoardTile::new(ElementType::Object, 0xff), Some(StatusElement { cycle: 1, code_source: CodeSource::Owned(DosString::from_str(code)), .. StatusElement::default() })); } pub fn get(&self, c: char) -> &TileAndStatus { self.tile_map.get(&c).ok_or_else(|| format!("TileSet::get: Tile not found for: {:?}", c)).unwrap() } } #[derive(Debug, Clone)] pub struct TileTemplate { width: usize, height: usize, tiles: Vec<Option<TileAndStatus>>, } impl TileTemplate { pub fn from_text(tile_set: &TileSet, text: &str) -> TileTemplate { let mut height = 0; let mut width = 0; let mut tiles = vec![]; for line in text.lines() { let trimmed = line.trim().to_string(); if !trimmed.is_empty() { let mut current_width = 0; for c in trimmed.chars() { if c == '.' { tiles.push(None); } else { tiles.push(Some(tile_set.get(c).clone())); } current_width += 1; } if width == 0 { width = current_width; } else if width != current_width { panic!("TileTemplate::from_text: Lines are inconsistent lengths"); } height += 1; } } TileTemplate { width, height, tiles, } } }
pub use crate::engine::RuzztEngine; pub use crate::event::Event; pub use crate::board_simulator::*; pub use zzt_file_format::*; pub use zzt_file_format::dosstring::*; use std::collections::HashMap; #[derive(Clone)] pub struct TestWorld { pub engine: RuzztEngine, pub event: Event, } impl TestWorld { pub fn new() -> TestWorld { let mut cursor = std::io::Cursor::new(include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/src/tests/data/DEFAULT.ZZT")).to_vec()); let mut w
al: {:?}", selfsim.status_elements); println!("Expected: {:?}", othersim.status_elements); result = false; } result = result && self.current_board_tiles_equals(expected_world); result } pub fn current_board_tiles_equals(&self, expected_world: TestWorld) -> bool { let selfsim = &self.engine.board_simulator; let othersim = &expected_world.engine.board_simulator; if selfsim.tiles != othersim.tiles { let mut min_diff_x = BOARD_WIDTH as i16; let mut min_diff_y = BOARD_HEIGHT as i16; let mut max_diff_x = 0; let mut max_diff_y = 0; for x in 0 .. BOARD_WIDTH as i16 { for y in 0 .. BOARD_HEIGHT as i16 { let selftile = selfsim.get_tile(x, y).unwrap(); let othertile = othersim.get_tile(x, y).unwrap(); if selftile != othertile { max_diff_x = max_diff_x.max(x); max_diff_y = max_diff_y.max(y); min_diff_x = min_diff_x.min(x); min_diff_y = min_diff_y.min(y); } } } println!("Board differ from ({}, {}) to ({}, {}). Top lines are self, bottom lines are expected", min_diff_x, min_diff_y, max_diff_x, max_diff_y); for y in min_diff_y ..= max_diff_y { let mut self_line = "".to_string(); let mut other_line = "".to_string(); for x in min_diff_x ..= max_diff_x { let selftile = selfsim.get_tile(x, y).unwrap(); let othertile = othersim.get_tile(x, y).unwrap(); if selftile != othertile { self_line += &format!("{:02x},{:02x} ", selftile.element_id, selftile.colour); other_line += &format!("{:02x},{:02x} ", othertile.element_id, othertile.colour); } else { self_line += "==,== "; other_line += "==,== "; } } println!("{}", self_line); println!("{}", other_line); println!(""); } false } else { true } } pub fn status_at(&mut self, x: i16, y: i16) -> &mut StatusElement { self.engine.board_simulator.get_first_status_for_pos_mut(x, y).unwrap().1 } pub fn world_header(&self) -> &WorldHeader { &self.engine.board_simulator.world_header } } #[derive(Debug, Clone)] pub struct TileAndStatus { pub tile: BoardTile, pub status: Option<StatusElement>, } pub struct TileSet { tile_map: HashMap<char, TileAndStatus>, } impl TileSet { pub fn new() -> TileSet { TileSet { tile_map: HashMap::new(), } } pub fn add(&mut self, c: char, tile: BoardTile, status: Option<StatusElement>) { self.tile_map.insert(c, TileAndStatus { tile, status }); } pub fn add_object(&mut self, c: char, code: &str) { self.add(c, BoardTile::new(ElementType::Object, 0xff), Some(StatusElement { cycle: 1, code_source: CodeSource::Owned(DosString::from_str(code)), .. StatusElement::default() })); } pub fn get(&self, c: char) -> &TileAndStatus { self.tile_map.get(&c).ok_or_else(|| format!("TileSet::get: Tile not found for: {:?}", c)).unwrap() } } #[derive(Debug, Clone)] pub struct TileTemplate { width: usize, height: usize, tiles: Vec<Option<TileAndStatus>>, } impl TileTemplate { pub fn from_text(tile_set: &TileSet, text: &str) -> TileTemplate { let mut height = 0; let mut width = 0; let mut tiles = vec![]; for line in text.lines() { let trimmed = line.trim().to_string(); if !trimmed.is_empty() { let mut current_width = 0; for c in trimmed.chars() { if c == '.' { tiles.push(None); } else { tiles.push(Some(tile_set.get(c).clone())); } current_width += 1; } if width == 0 { width = current_width; } else if width != current_width { panic!("TileTemplate::from_text: Lines are inconsistent lengths"); } height += 1; } } TileTemplate { width, height, tiles, } } }
orld = World::parse(&mut cursor).unwrap(); world.boards[1].status_elements.clear(); world.boards[1].tiles[29 + 11*BOARD_WIDTH] = BoardTile::new(ElementType::Empty, 0); let mut engine = RuzztEngine::new(); engine.load_world(world, None); engine.set_in_title_screen(false); engine.is_paused = false; TestWorld { engine, event: Event::None, } } pub fn new_with_player(x: i16, y: i16) -> TestWorld { let mut test_world = TestWorld::new(); test_world.add_player(x, y); test_world } pub fn add_player(&mut self, x: i16, y: i16) { let mut tile_set = TileSet::new(); tile_set.add('&', BoardTile::new(ElementType::Player, 0x1f), Some(StatusElement { cycle: 1, .. StatusElement::default() })); let player_template = TileTemplate::from_text(&tile_set, "&"); self.insert_template(&player_template, x, y); } pub fn insert_tile_and_status(&mut self, tile_and_status: &TileAndStatus, x: i16, y: i16) { self.engine.board_simulator.set_tile(x, y, tile_and_status.tile); if let Some(ref status) = tile_and_status.status { let mut new_status = status.clone(); new_status.location_x = x as u8; new_status.location_y = y as u8; self.engine.board_simulator.status_elements.push(new_status); } } pub fn insert_template(&mut self, template: &TileTemplate, left_x: i16, top_y: i16) { let mut it = template.tiles.iter(); for y in 0 .. template.height as i16 { for x in 0 .. template.width as i16 { if let Some(tile_and_status) = it.next().as_mut().unwrap() { self.engine.board_simulator.set_tile(left_x + x, top_y + y, tile_and_status.tile); if let Some(ref status) = tile_and_status.status { let mut new_status = status.clone(); new_status.location_x = (left_x + x) as u8; new_status.location_y = (top_y + y) as u8; self.engine.board_simulator.status_elements.push(new_status); } } } } } pub fn simulate(&mut self, step_count: usize) { for _ in 0 .. step_count { self.engine.step(self.event, 0.); self.event = Event::None; } } pub fn current_board_equals(&self, expected_world: TestWorld) -> bool { let mut result = true; let selfsim = &self.engine.board_simulator; let othersim = &expected_world.engine.board_simulator; if selfsim.world_header != othersim.world_header { println!("World headers differ"); println!("Actual: {:?}", selfsim.world_header); println!("Expected: {:?}", othersim.world_header); result = false; } if selfsim.board_meta_data != othersim.board_meta_data { println!("Board meta data differs"); println!("Actual: {:?}", selfsim.board_meta_data); println!("Expected: {:?}", othersim.board_meta_data); result = false; } if selfsim.status_elements != othersim.status_elements { println!("Status elements differ"); println!("Actu
random
[ { "content": "pub fn load_zzt_behaviours(sim: &mut BoardSimulator) {\n\n\tsim.set_behaviour(ElementType::Player, Box::new(items::PlayerBehaviour));\n\n\tsim.set_behaviour(ElementType::Ammo, Box::new(items::AmmoBehaviour));\n\n\tsim.set_behaviour(ElementType::Torch, Box::new(items::TorchBehaviour));\n\n\tsim.set...
Rust
src/config/file.rs
lachesis/roughenough
a5e29a47646cc57bdd8e3603818cc9bd46f81bfc
use std::fs::File; use std::io::Read; use std::time::Duration; use data_encoding::{Encoding, HEXLOWER_PERMISSIVE}; use yaml_rust::YamlLoader; use crate::config::{DEFAULT_BATCH_SIZE, DEFAULT_STATUS_INTERVAL}; use crate::config::ServerConfig; use crate::Error; use crate::key::KmsProtection; const HEX: Encoding = HEXLOWER_PERMISSIVE; pub struct FileConfig { port: u16, interface: String, seed: Vec<u8>, batch_size: u8, status_interval: Duration, kms_protection: KmsProtection, health_check_port: Option<u16>, client_stats: bool, fault_percentage: u8, } impl FileConfig { pub fn new(config_file: &str) -> Result<Self, Error> { let mut infile = File::open(config_file) .unwrap_or_else(|_| panic!("failed to open config file '{}'", config_file)); let mut contents = String::new(); infile .read_to_string(&mut contents) .unwrap_or_else(|_| panic!("could not read config file '{}'", config_file)); let cfg = YamlLoader::load_from_str(&contents) .unwrap_or_else(|_| panic!("could not parse config file '{}'", config_file)); if cfg.len() != 1 { return Err(Error::InvalidConfiguration(format!( "Empty or malformed config file '{}'", config_file ))); } let mut config = FileConfig { port: 0, interface: "".to_string(), seed: Vec::new(), batch_size: DEFAULT_BATCH_SIZE, status_interval: DEFAULT_STATUS_INTERVAL, kms_protection: KmsProtection::Plaintext, health_check_port: None, client_stats: false, fault_percentage: 0, }; for (key, value) in cfg[0].as_hash().unwrap() { match key.as_str().unwrap() { "port" => config.port = value.as_i64().unwrap() as u16, "interface" => config.interface = value.as_str().unwrap().to_string(), "batch_size" => config.batch_size = value.as_i64().unwrap() as u8, "seed" => { let val = value.as_str().unwrap().to_string(); config.seed = HEX .decode(val.as_bytes()) .expect("seed value invalid; 'seed' must be a valid hex value"); } "status_interval" => { let val = value.as_i64().expect("status_interval value invalid"); config.status_interval = Duration::from_secs(val as u64) } "kms_protection" => { let val = value.as_str().unwrap().parse().unwrap_or_else(|_| { panic!("invalid kms_protection value: {:?}", value) }); config.kms_protection = val } "health_check_port" => { let val = value.as_i64().unwrap() as u16; config.health_check_port = Some(val); } "client_stats" => { let val = value.as_str().unwrap().to_ascii_lowercase(); config.client_stats = val == "yes" || val == "on"; } "fault_percentage" => { let val = value.as_i64().unwrap() as u8; config.fault_percentage = val; } unknown => { return Err(Error::InvalidConfiguration(format!( "unknown config key: {}", unknown ))); } } } Ok(config) } } impl ServerConfig for FileConfig { fn interface(&self) -> &str { self.interface.as_ref() } fn port(&self) -> u16 { self.port } fn seed(&self) -> Vec<u8> { self.seed.clone() } fn batch_size(&self) -> u8 { self.batch_size } fn status_interval(&self) -> Duration { self.status_interval } fn kms_protection(&self) -> &KmsProtection { &self.kms_protection } fn health_check_port(&self) -> Option<u16> { self.health_check_port } fn client_stats_enabled(&self) -> bool { self.client_stats } fn fault_percentage(&self) -> u8 { self.fault_percentage } }
use std::fs::File; use std::io::Read; use std::time::Duration; use data_encoding::{Encoding, HEXLOWER_PERMISSIVE}; use yaml_rust::YamlLoader; use crate::config::{DEFAULT_BATCH_SIZE, DEFAULT_STATUS_INTERVAL}; use crate::config::ServerConfig; use crate::Error; use crate::key::KmsProtection; const HEX: Encoding = HEXLOWER_PERMISSIVE; pub struct FileConfig { port: u16, interface: String, seed: Vec<u8>, batch_size: u8, status_interval: Duration, kms_protection: KmsProtection, health_check_port: Option<u16>, client_stats: bool, fault_percentage: u8, } impl FileConfig { pub fn new(config_file: &str) -> Result<Self, Error> { let mut infile = File::open(config_file) .unwrap_or_else(|_| panic!("failed to open config file '{}'", config_file)); let mut contents = String::new(); infile .read_to_string(&mut contents) .unwrap_or_else(|_| panic!("could not read config file '{}'", config_file)); let cfg = YamlLoader::load_from_str(&contents) .unwrap_or_else(|_| panic!("could not parse config file '{}'", config_file)); if cfg.len() != 1 { return Err(Error::InvalidConfiguration(format!( "Empty or malformed config file '{}'", config_file ))); } let mut config = FileConfig { port: 0, interface: "".to_string(), seed: Vec::new(), batch_size: DEFAULT_BATCH_SIZE, status_interval: DEFAULT_STATUS_INTERVAL, kms_protection: KmsProtection::Plaintext, health_check_port: None, client_stats: false, fault_percentage: 0, }; for (key, value) in cfg[0].as_hash().unwrap() { match key.as_str().unwrap() { "port" => config.port = value.as_i64().unwrap() as u16, "interface" => config.interface = value.as_str().unwrap().to_string(), "batch_size" => config.batch_size = value.as_i64().unwrap() as u8, "seed" => { let val = value.as_str().unwrap().to_string(); config.seed = HEX .decode(val.as_bytes()) .expect("seed value invalid; 'seed' must be a valid hex value"); } "status_interval" => { let val = value.as_i64().expect("status_interval value invalid"); config.status_interval = Duration::from_secs(val as u6
} impl ServerConfig for FileConfig { fn interface(&self) -> &str { self.interface.as_ref() } fn port(&self) -> u16 { self.port } fn seed(&self) -> Vec<u8> { self.seed.clone() } fn batch_size(&self) -> u8 { self.batch_size } fn status_interval(&self) -> Duration { self.status_interval } fn kms_protection(&self) -> &KmsProtection { &self.kms_protection } fn health_check_port(&self) -> Option<u16> { self.health_check_port } fn client_stats_enabled(&self) -> bool { self.client_stats } fn fault_percentage(&self) -> u8 { self.fault_percentage } }
4) } "kms_protection" => { let val = value.as_str().unwrap().parse().unwrap_or_else(|_| { panic!("invalid kms_protection value: {:?}", value) }); config.kms_protection = val } "health_check_port" => { let val = value.as_i64().unwrap() as u16; config.health_check_port = Some(val); } "client_stats" => { let val = value.as_str().unwrap().to_ascii_lowercase(); config.client_stats = val == "yes" || val == "on"; } "fault_percentage" => { let val = value.as_i64().unwrap() as u8; config.fault_percentage = val; } unknown => { return Err(Error::InvalidConfiguration(format!( "unknown config key: {}", unknown ))); } } } Ok(config) }
function_block-function_prefixed
[ { "content": "#[allow(clippy::useless_let_if_seq)]\n\npub fn is_valid_config(cfg: &dyn ServerConfig) -> bool {\n\n let mut is_valid = true;\n\n\n\n if cfg.port() == 0 {\n\n error!(\"server port not set: {}\", cfg.port());\n\n is_valid = false;\n\n }\n\n\n\n if cfg.interface().is_empty(...
Rust
source-code/parser.rs
adrianbielsa1/nabe
3355ecb7b5ba8e21a1db256144935af46f506eca
use crate::token::Token; use crate::statement::*; struct Parser<'a> { tokens: &'a Vec<Token>, tokens_position: usize, } impl<'a> Parser<'a> { pub fn new(tokens: &'a Vec<Token>) -> Parser { return Parser { tokens: tokens, tokens_position: 0, }; } pub fn parse(&mut self) -> Vec<Statement> { let parsers = [ Parser::parse_type, Parser::parse_variable, Parser::parse_constant, Parser::parse_subroutine, Parser::parse_function, Parser::parse_enum, Parser::parse_attribute, Parser::parse_option, ]; let mut statements = vec!(); 'parse_next_statement: while self.tokens_position < self.tokens.len() { for parser in &parsers { let position_before_parsing = self.tokens_position; if let Some(statement) = parser(self) { statements.push(statement); continue 'parse_next_statement; } else { self.tokens_position = position_before_parsing; } } break; } return statements; } fn compare(first: &Token, second: &Token) -> bool { let left_discriminant = std::mem::discriminant(first); let right_discriminant = std::mem::discriminant(second); return left_discriminant == right_discriminant; } fn consume(&mut self, expected_token: Token) -> Option<Token> { if self.tokens_position >= self.tokens.len() { return None; } if Self::compare(&self.tokens[self.tokens_position], &expected_token) { self.tokens_position += 1; return Some(self.tokens[self.tokens_position - 1].clone()); } else { return None; } } fn parse_type(&mut self) -> Option<Statement> { let _ = self.consume(Token::Type)?; let name = self.consume(Token::Identifier(vec!()))?; let mut attributes = vec!(); while let Some(attribute) = self.parse_type_attribute() { attributes.push(attribute); } let _ = self.consume(Token::End)?; let _ = self.consume(Token::Type)?; return Some(Statement::Type(TypeStatement { name: name, attributes: attributes, })); } fn parse_type_attribute(&mut self) -> Option<Statement> { let name = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::As)?; let kind = self.consume(Token::Identifier(vec!()))?; return Some(Statement::TypeAttribute(TypeAttributeStatement { name: name, kind: kind, })); } fn parse_enum(&mut self) -> Option<Statement> { let possible_scopes = [Token::Public, Token::Private]; let scope = std::array::IntoIter::new(possible_scopes).find_map(|t| self.consume(t)); let _ = self.consume(Token::Enum)?; let name = self.consume(Token::Identifier(vec!()))?; let mut attributes = vec!(); while let Some(attribute) = self.parse_enum_attribute() { attributes.push(attribute); } let _ = self.consume(Token::End)?; let _ = self.consume(Token::Enum)?; return Some(Statement::Enum(EnumStatement { scope: scope, name: name, attributes: attributes, })); } fn parse_enum_attribute(&mut self) -> Option<Statement> { let name = self.consume(Token::Identifier(vec!()))?; let value = match self.consume(Token::Assignment) { Some(_) => Some(self.consume(Token::Number(vec!()))?), None => None, }; return Some(Statement::EnumAttribute(EnumAttributeStatement { name: name, value: value, })); } fn parse_variable(&mut self) -> Option<Statement> { let possible_scopes = [Token::Public, Token::Private, Token::Static, Token::Dim]; let scope = std::array::IntoIter::new(possible_scopes).find_map(|t| self.consume(t))?; let name = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::As)?; let kind = self.consume(Token::Identifier(vec!()))?; return Some(Statement::Variable(VariableStatement { scope: scope.clone(), name: name, kind: kind, })); } fn parse_constant(&mut self) -> Option<Statement> { let possible_scopes = [Token::Public, Token::Private, Token::Static, Token::Dim]; let scope = std::array::IntoIter::new(possible_scopes).find_map(|t| self.consume(t))?; let _ = self.consume(Token::Const)?; let name = self.consume(Token::Identifier(vec!()))?; let kind = match self.consume(Token::As) { Some(_) => Some(self.consume(Token::Identifier(vec!()))?), None => None, }; let length = match self.consume(Token::Times) { Some(_) => Some(self.consume(Token::Number(vec!()))?), None => None, }; let _ = self.consume(Token::Assignment)?; let possible_values = [ Token::Identifier(vec!()), Token::Number(vec!()), Token::String(vec!()) ]; let value = std::array::IntoIter::new(possible_values).find_map(|t| self.consume(t))?; return Some(Statement::Constant(ConstantStatement { scope: scope.clone(), name: name, kind: kind, length: length, value: value, })); } fn parse_subroutine(&mut self) -> Option<Statement> { let possible_scopes = [Token::Public, Token::Private, Token::Static]; let scope = std::array::IntoIter::new(possible_scopes).find_map(|t| self.consume(t))?; let _ = self.consume(Token::Sub)?; let name = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::LeftParentheses)?; let mut arguments = vec!(); while let Some(argument) = self.parse_callable_argument() { arguments.push(argument); } let _ = self.consume(Token::RightParentheses)?; let body = self.parse_callable_body(); let _ = self.consume(Token::End)?; let _ = self.consume(Token::Sub)?; return Some(Statement::Subroutine(SubroutineStatement { scope: scope, name: name, arguments: arguments, body: body, })); } fn parse_function(&mut self) -> Option<Statement> { let possible_scopes = [Token::Public, Token::Private, Token::Static]; let scope = std::array::IntoIter::new(possible_scopes).find_map(|t| self.consume(t))?; let _ = self.consume(Token::Function)?; let name = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::LeftParentheses)?; let mut arguments = vec!(); while let Some(argument) = self.parse_callable_argument() { arguments.push(argument); } let _ = self.consume(Token::RightParentheses)?; let kind = match self.consume(Token::As) { Some(_) => self.consume(Token::Identifier(vec!())), None => None, }; let body = self.parse_callable_body(); let _ = self.consume(Token::End)?; let _ = self.consume(Token::Function)?; return Some(Statement::Function(FunctionStatement { scope: scope, name: name, arguments: arguments, kind: kind, body: body, })); } fn parse_callable_argument(&mut self) -> Option<Statement> { let possible_modifiers = [Token::ByVal, Token::ByRef,]; let modifier = std::array::IntoIter::new(possible_modifiers).find_map(|t| self.consume(t)); let name = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::As)?; let kind = self.consume(Token::Identifier(vec!()))?; return Some(Statement::Argument(ArgumentStatement { modifier: modifier, name: name, kind: kind, })); } fn parse_callable_body(&mut self) -> Vec<Statement> { let parsers = [ Parser::parse_variable, Parser::parse_constant, Parser::parse_assignment, Parser::parse_exit, Parser::parse_return, Parser::parse_attribute, ]; let mut statements = vec!(); 'parse_next_statement: while self.tokens_position < self.tokens.len() { for parser in &parsers { let position_before_parsing = self.tokens_position; if let Some(statement) = parser(self) { statements.push(statement); continue 'parse_next_statement; } else { self.tokens_position = position_before_parsing; } } break; } return statements; } fn parse_assignment(&mut self) -> Option<Statement> { let left = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::Assignment)?; let possible_values = [ Token::Identifier(vec!()), Token::Number(vec!()), Token::String(vec!()) ]; let right = std::array::IntoIter::new(possible_values).find_map(|t| self.consume(t))?; return Some(Statement::Assignment(AssignmentStatement { left: left, right: Box::new(right), })); } fn parse_exit(&mut self) -> Option<Statement> { let _ = self.consume(Token::Exit)?; let possible_blocks = [ Token::Sub, Token::Function, ]; let block = std::array::IntoIter::new(possible_blocks).find_map(|t| self.consume(t))?; return Some(Statement::Exit(ExitStatement { block: block, })); } fn parse_return(&mut self) -> Option<Statement> { let _ = self.consume(Token::Return)?; let possible_values = [ Token::Identifier(vec!()), Token::Number(vec!()), Token::String(vec!()) ]; let value = std::array::IntoIter::new(possible_values).find_map(|t| self.consume(t)); return Some(Statement::Return(ReturnStatement { value: value, })); } fn parse_attribute(&mut self) -> Option<Statement> { let _ = self.consume(Token::Attribute)?; let name = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::Assignment)?; let possible_values = [ Token::Identifier(vec!()), Token::Number(vec!()), Token::String(vec!()) ]; let value = std::array::IntoIter::new(possible_values).find_map(|t| self.consume(t))?; return Some(Statement::Attribute(AttributeStatement { name: name, value: value, })); } fn parse_option(&mut self) -> Option<Statement> { let _ = self.consume(Token::Option)?; let possible_configurations = [ Token::Explicit, Token::Base, Token::Compare, Token::Private, ]; let configuration = std::array::IntoIter::new(possible_configurations).find_map(|t| self.consume(t))?; let value = match configuration { Token::Explicit => None, Token::Base => Some(self.consume(Token::Number(vec!()))?), Token::Compare => Some(self.consume(Token::Identifier(vec!()))?), Token::Private => Some(self.consume(Token::Module)?), _ => unreachable!(), }; return Some(Statement::Option(OptionStatement { configuration: configuration, value: value, })); } } pub fn parse(tokens: &Vec<Token>) -> Vec<Statement> { let mut parser = Parser::new(tokens); return parser.parse(); }
use crate::token::Token; use crate::statement::*; struct Parser<'a> { tokens: &'a Vec<Token>, tokens_position: usize, } impl<'a> Parser<'a> { pub fn new(tokens: &'a Vec<Token>) -> Parser { return Parser { tokens: tokens, tokens_position: 0, }; } pub fn parse(&mut self) -> Vec<Statement> { let parsers = [ Parser::parse_type, Parser::parse_variable, Parser::parse_constant, Parser::parse_subroutine, Parser::parse_function, Parser::parse_enum, Parser::parse_attribute, Parser::parse_option, ]; let mut statements = vec!(); 'parse_next_statement: while self.tokens_position < self.tokens.len() { for parser in &parsers { let position_before_parsing = self.tokens_position; if let Some(statement) = parser(self) { statements.push(statement); continue 'parse_next_statement; } else { self.tokens_position = position_before_parsing; } } break; } return statements; } fn compare(first: &Token, second: &Token) -> bool { let left_discriminant = std::mem::discriminant(first); let right_discriminant = std::mem::discriminant(second); return left_discriminant == right_discriminant; } fn consume(&mut self, expected_token: Token) -> Option<Token> { if self.tokens_position >= self.tokens.len() { return None; } if Self::compare(&self.tokens[self.tokens_position], &expected_token) { self.tokens_position += 1; return Some(self.tokens[self.tokens_position - 1].clone()); } else { return None; } } fn parse_type(&mut self) -> Option<Statement> { let _ = self.consume(Token::Type)?; let name = self.consume(Token::Identifier(vec!()))?; let mut attributes = vec!(); while let Some(attribute) = self.parse_type_attribute() { attributes.push(attribute); } let _ = self.consume(Token::End)?; let _ = self.consume(Token::Type)?; return Some(Statement::Type(TypeStatement { name: name, attributes: attributes, })); } fn parse_type_attribute(&mut self) -> Option<Statement> { let name = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::As)?; let kind = self.consume(Token::Identifier(vec!()))?; return Some(Statement::TypeAttribute(TypeAttributeStatement { name: name, kind: kind, })); } fn parse_enum(&mut self) -> Option<Statement> { let possible_scopes = [Token::Public, Token::Private]; let scope = std::array::IntoIter::new(possible_scopes).find_map(|t| self.consume(t)); let _ = self.consume(Token::Enum)?; let name = self.consume(Token::Identifier(vec!()))?; let mut attributes = vec!(); while let Some(attribute) = self.parse_enum_attribute() { attributes.push(attribut
fn parse_enum_attribute(&mut self) -> Option<Statement> { let name = self.consume(Token::Identifier(vec!()))?; let value = match self.consume(Token::Assignment) { Some(_) => Some(self.consume(Token::Number(vec!()))?), None => None, }; return Some(Statement::EnumAttribute(EnumAttributeStatement { name: name, value: value, })); } fn parse_variable(&mut self) -> Option<Statement> { let possible_scopes = [Token::Public, Token::Private, Token::Static, Token::Dim]; let scope = std::array::IntoIter::new(possible_scopes).find_map(|t| self.consume(t))?; let name = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::As)?; let kind = self.consume(Token::Identifier(vec!()))?; return Some(Statement::Variable(VariableStatement { scope: scope.clone(), name: name, kind: kind, })); } fn parse_constant(&mut self) -> Option<Statement> { let possible_scopes = [Token::Public, Token::Private, Token::Static, Token::Dim]; let scope = std::array::IntoIter::new(possible_scopes).find_map(|t| self.consume(t))?; let _ = self.consume(Token::Const)?; let name = self.consume(Token::Identifier(vec!()))?; let kind = match self.consume(Token::As) { Some(_) => Some(self.consume(Token::Identifier(vec!()))?), None => None, }; let length = match self.consume(Token::Times) { Some(_) => Some(self.consume(Token::Number(vec!()))?), None => None, }; let _ = self.consume(Token::Assignment)?; let possible_values = [ Token::Identifier(vec!()), Token::Number(vec!()), Token::String(vec!()) ]; let value = std::array::IntoIter::new(possible_values).find_map(|t| self.consume(t))?; return Some(Statement::Constant(ConstantStatement { scope: scope.clone(), name: name, kind: kind, length: length, value: value, })); } fn parse_subroutine(&mut self) -> Option<Statement> { let possible_scopes = [Token::Public, Token::Private, Token::Static]; let scope = std::array::IntoIter::new(possible_scopes).find_map(|t| self.consume(t))?; let _ = self.consume(Token::Sub)?; let name = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::LeftParentheses)?; let mut arguments = vec!(); while let Some(argument) = self.parse_callable_argument() { arguments.push(argument); } let _ = self.consume(Token::RightParentheses)?; let body = self.parse_callable_body(); let _ = self.consume(Token::End)?; let _ = self.consume(Token::Sub)?; return Some(Statement::Subroutine(SubroutineStatement { scope: scope, name: name, arguments: arguments, body: body, })); } fn parse_function(&mut self) -> Option<Statement> { let possible_scopes = [Token::Public, Token::Private, Token::Static]; let scope = std::array::IntoIter::new(possible_scopes).find_map(|t| self.consume(t))?; let _ = self.consume(Token::Function)?; let name = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::LeftParentheses)?; let mut arguments = vec!(); while let Some(argument) = self.parse_callable_argument() { arguments.push(argument); } let _ = self.consume(Token::RightParentheses)?; let kind = match self.consume(Token::As) { Some(_) => self.consume(Token::Identifier(vec!())), None => None, }; let body = self.parse_callable_body(); let _ = self.consume(Token::End)?; let _ = self.consume(Token::Function)?; return Some(Statement::Function(FunctionStatement { scope: scope, name: name, arguments: arguments, kind: kind, body: body, })); } fn parse_callable_argument(&mut self) -> Option<Statement> { let possible_modifiers = [Token::ByVal, Token::ByRef,]; let modifier = std::array::IntoIter::new(possible_modifiers).find_map(|t| self.consume(t)); let name = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::As)?; let kind = self.consume(Token::Identifier(vec!()))?; return Some(Statement::Argument(ArgumentStatement { modifier: modifier, name: name, kind: kind, })); } fn parse_callable_body(&mut self) -> Vec<Statement> { let parsers = [ Parser::parse_variable, Parser::parse_constant, Parser::parse_assignment, Parser::parse_exit, Parser::parse_return, Parser::parse_attribute, ]; let mut statements = vec!(); 'parse_next_statement: while self.tokens_position < self.tokens.len() { for parser in &parsers { let position_before_parsing = self.tokens_position; if let Some(statement) = parser(self) { statements.push(statement); continue 'parse_next_statement; } else { self.tokens_position = position_before_parsing; } } break; } return statements; } fn parse_assignment(&mut self) -> Option<Statement> { let left = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::Assignment)?; let possible_values = [ Token::Identifier(vec!()), Token::Number(vec!()), Token::String(vec!()) ]; let right = std::array::IntoIter::new(possible_values).find_map(|t| self.consume(t))?; return Some(Statement::Assignment(AssignmentStatement { left: left, right: Box::new(right), })); } fn parse_exit(&mut self) -> Option<Statement> { let _ = self.consume(Token::Exit)?; let possible_blocks = [ Token::Sub, Token::Function, ]; let block = std::array::IntoIter::new(possible_blocks).find_map(|t| self.consume(t))?; return Some(Statement::Exit(ExitStatement { block: block, })); } fn parse_return(&mut self) -> Option<Statement> { let _ = self.consume(Token::Return)?; let possible_values = [ Token::Identifier(vec!()), Token::Number(vec!()), Token::String(vec!()) ]; let value = std::array::IntoIter::new(possible_values).find_map(|t| self.consume(t)); return Some(Statement::Return(ReturnStatement { value: value, })); } fn parse_attribute(&mut self) -> Option<Statement> { let _ = self.consume(Token::Attribute)?; let name = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::Assignment)?; let possible_values = [ Token::Identifier(vec!()), Token::Number(vec!()), Token::String(vec!()) ]; let value = std::array::IntoIter::new(possible_values).find_map(|t| self.consume(t))?; return Some(Statement::Attribute(AttributeStatement { name: name, value: value, })); } fn parse_option(&mut self) -> Option<Statement> { let _ = self.consume(Token::Option)?; let possible_configurations = [ Token::Explicit, Token::Base, Token::Compare, Token::Private, ]; let configuration = std::array::IntoIter::new(possible_configurations).find_map(|t| self.consume(t))?; let value = match configuration { Token::Explicit => None, Token::Base => Some(self.consume(Token::Number(vec!()))?), Token::Compare => Some(self.consume(Token::Identifier(vec!()))?), Token::Private => Some(self.consume(Token::Module)?), _ => unreachable!(), }; return Some(Statement::Option(OptionStatement { configuration: configuration, value: value, })); } } pub fn parse(tokens: &Vec<Token>) -> Vec<Statement> { let mut parser = Parser::new(tokens); return parser.parse(); }
e); } let _ = self.consume(Token::End)?; let _ = self.consume(Token::Enum)?; return Some(Statement::Enum(EnumStatement { scope: scope, name: name, attributes: attributes, })); }
function_block-function_prefixed
[ { "content": "// TODO: Handle multiline comments (through the underscore character).\n\n// Or is it a parser matter?\n\nfn lex_comment(characters: &Vec<u8>, position: &mut usize, _tokens: &mut Vec<Token>) -> bool {\n\n let mut character = characters[*position] as char;\n\n\n\n if character != '\\'' { retu...
Rust
evolvim-lib/src/lib/neat/mod.rs
splintah/evolvim
13e95441e617f5931441995a3fd041447c40a77e
mod genome; mod input; mod output; mod phenotype; pub use genome::Genome; pub use phenotype::NeuralNet; #[derive(Debug)] pub struct NeatBrain { genome: Genome, net: NeuralNet, } impl From<Genome> for NeatBrain { fn from(genome: Genome) -> Self { let net = (&genome).into(); NeatBrain { genome, net } } } impl crate::brain::NeuralNet for NeatBrain { fn load_input(&mut self, env: &crate::brain::Environment) { self.net.load_input(env); } fn run(&mut self) { self.net.run_calculations(); } fn use_output(&self, env: &mut crate::brain::EnvironmentMut<Self>, time_step: f64) { self.net.use_output(env, time_step); } } impl crate::brain::Intentions for NeatBrain { fn wants_birth(&self) -> f64 { unimplemented!() } fn wants_help_birth(&self) -> f64 { unimplemented!() } } impl crate::brain::GenerateRandom for NeatBrain { fn new_random() -> Self { Genome::new_fully_linked().into() } } impl crate::brain::RecombinationTwoParents for NeatBrain { fn recombination_two_parents(parent_a: &Self, parent_b: &Self) -> Self { let genome = Genome::new_from_2(&parent_a.genome, &parent_b.genome); genome.into() } } impl crate::brain::RecombinationInfinite for NeatBrain { fn recombination_infinite_parents(parents: &Vec<crate::softbody::HLSoftBody<Self>>) -> Self { use crate::brain::RecombinationTwoParents; if parents.len() == 1 { let parent = parents[0].borrow(); let mut genome = parent.brain.genome.clone(); genome.mutate(); genome.into() } else { NeatBrain::recombination_two_parents( &parents[0].borrow().brain, &parents[1].borrow().brain, ) } } } impl crate::brain::ProvideInformation for NeatBrain { fn get_keys(&self) -> Vec<String> { vec!["nodes".to_string(), "connections".to_string()] } fn get_raw_values(&self) -> Vec<String> { vec![ format!("{}", self.genome.get_node_genome().len()), format!("{}", self.genome.get_connection_genome().len()), ] } } impl serde::Serialize for NeatBrain { fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { use serde::ser::SerializeStruct; let mut state = serializer.serialize_struct("NeatBrain", 1)?; state.serialize_field("genome", &self.genome)?; state.end() } } impl<'de> serde::Deserialize<'de> for NeatBrain { fn deserialize<D: serde::Deserializer<'de>>(deserializer: D) -> Result<NeatBrain, D::Error> { use serde::de::*; struct BrainVisitor; impl<'de> Visitor<'de> for BrainVisitor { type Value = NeatBrain; fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { formatter.write_str("struct NeatBrain") } fn visit_seq<V: SeqAccess<'de>>(self, mut seq: V) -> Result<NeatBrain, V::Error> { let genome: Genome = seq .next_element()? .ok_or_else(|| Error::invalid_length(0, &self))?; Ok(genome.into()) } } const FIELDS: &[&str] = &["genome"]; deserializer.deserialize_struct::<BrainVisitor>("NeatBrain", FIELDS, BrainVisitor) } }
mod genome; mod input; mod output; mod phenotype; pub use genome::Genome; pub use phenotype::NeuralNet; #[derive(Debug)] pub struct NeatBrain { genome: Genome, net: NeuralNet, } impl From<Genome> for NeatBrain { fn from(genome: Genome) -> Self { let net = (&genome).into(); NeatBrain { genome, net } } } impl crate::brain::NeuralNet for NeatBrain { fn load_input(&mut self, env: &crate::brain::Environment) { self.net.load_input(env); } fn run(&mut self) { self.net.run_calculations(); } fn use_output(&self, env: &mut crate::brain::EnvironmentMut<Self>, time_step: f64) { self.net.use_output(env, time_step); } } impl crate::brain::Intentions for NeatBrain { fn wants_birth(&self) -> f64 { unimplemented!() } fn wants_help_birth(&self) -> f64 { unimplemented!() } } impl crate::brain::GenerateRandom for NeatBrain { fn new_random() -> Self { Genome::new_fully_linked().into() } } impl crate::brain::RecombinationTwoParents for NeatBrain { fn recombination_two_parents(parent_a: &Self, parent_b: &Self) -> Self { let genome = Genome::new_from_2(&parent_a.genome, &parent_b.genome); genome.into() } } impl crate::brain::RecombinationInfinite for NeatBrain { fn recombination_infinite_parents(parents: &Vec<crate::softbody::HLSoftBody<Self>>) -> Self { use crate::brain::RecombinationTwoParents; if parents.len() == 1 { let parent = parents[0].borrow(); let mut genome = parent.brain.genome.clone(); genome.mutate(); genome.into() } else { NeatBrain::recombination_two_parents( &parents[0].borrow().brain, &parents[1].borrow().brain, ) } } } impl crate::brain::ProvideInformation for NeatBrain { fn get_keys(&self) -> Vec<String> { vec!["nodes".to_string(), "connections".to_string()] } fn get_raw_values(&self) -> Vec<String> { vec![ format!("{}", self.genome.get_node_genome().len()), format!("{}", self.genome.get_connection_genome().len()), ] } } impl serde::Serialize for NeatBrain {
} impl<'de> serde::Deserialize<'de> for NeatBrain { fn deserialize<D: serde::Deserializer<'de>>(deserializer: D) -> Result<NeatBrain, D::Error> { use serde::de::*; struct BrainVisitor; impl<'de> Visitor<'de> for BrainVisitor { type Value = NeatBrain; fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { formatter.write_str("struct NeatBrain") } fn visit_seq<V: SeqAccess<'de>>(self, mut seq: V) -> Result<NeatBrain, V::Error> { let genome: Genome = seq .next_element()? .ok_or_else(|| Error::invalid_length(0, &self))?; Ok(genome.into()) } } const FIELDS: &[&str] = &["genome"]; deserializer.deserialize_struct::<BrainVisitor>("NeatBrain", FIELDS, BrainVisitor) } }
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { use serde::ser::SerializeStruct; let mut state = serializer.serialize_struct("NeatBrain", 1)?; state.serialize_field("genome", &self.genome)?; state.end() }
function_block-full_function
[ { "content": "#[derive(Debug)]\n\nstruct Output {\n\n node_index: usize,\n\n value: f64,\n\n output_type: OutputType,\n\n}\n\n\n\nimpl Output {\n\n fn use_output(&self, env: &mut crate::brain::EnvironmentMut<super::NeatBrain>, time_step: f64) {\n\n self.output_type.use_output(self.value, env,...
Rust
research/gaia/pegasus/pegasus/src/communication/channel.rs
tianliplus/GraphScope
0226e00c106d5959d6fdc1637fe1646b16b26136
use crate::api::function::{MultiRouteFunction, RouteFunction}; use crate::channel_id::{ChannelId, SubChannelId}; use crate::communication::decorator::{count::CountedPush, exchange::ExchangePush, DataPush}; use crate::data::{Data, DataSet}; use crate::data_plane::{GeneralPull, GeneralPush, Push}; use crate::dataflow::DataflowBuilder; use crate::errors::BuildJobError; use crate::graph::Edge; enum ChannelKind<T: Data> { Pipeline, Shuffle(Box<dyn RouteFunction<T>>), Broadcast(Option<Box<dyn MultiRouteFunction<T>>>), Aggregate(u64), } pub struct Channel<T: Data> { kind: ChannelKind<T>, allow_cancel: bool, } #[derive(Copy, Clone, Debug)] pub(crate) struct ChannelMeta { pub id: SubChannelId, pub is_local: bool, pub push_peers: usize, pub forbid_cancel: bool, pub is_aggregate: bool, } impl Into<Edge> for ChannelMeta { fn into(self) -> Edge { Edge { id: self.id.index() as usize, source: Default::default(), target: Default::default(), scope_depth: 0, src_peers: self.push_peers, dst_peers: if self.is_aggregate { 1 } else { self.push_peers }, is_local: self.is_local, } } } pub(crate) struct MaterializedChannel<T: Data> { pub meta: ChannelMeta, push: DataPush<T>, pull: GeneralPull<DataSet<T>>, } impl<T: Data> MaterializedChannel<T> { pub fn take(self) -> (DataPush<T>, GeneralPull<DataSet<T>>) { (self.push, self.pull) } } impl<T: Data> Channel<T> { fn new(kind: ChannelKind<T>, allow_cancel: bool) -> Self { Channel { kind, allow_cancel } } pub fn forbid_cancel(&mut self) { self.allow_cancel = false; } pub(crate) fn materialize( self, dfb: &DataflowBuilder, ) -> Result<MaterializedChannel<T>, BuildJobError> { let index = dfb.next_channel_index(); let ch_id = (ChannelId { job_seq: dfb.config.job_id as u64, index }, dfb.worker_id.index).into(); match self.kind { ChannelKind::Pipeline => { let (tx, rx) = crate::data_plane::pipeline::<DataSet<T>>(ch_id); let meta = ChannelMeta { id: ch_id, is_local: true, push_peers: 1, forbid_cancel: !self.allow_cancel, is_aggregate: false, }; let push = CountedPush::new( ch_id, dfb.worker_id, dfb.worker_id, tx.into(), &dfb.event_bus, ); Ok(MaterializedChannel { meta, push: DataPush::Count(push), pull: rx.into() }) } ChannelKind::Shuffle(r) => { let (raw, pull) = super::build_channel::<DataSet<T>>(index, &dfb.config)?.take(); let meta = ChannelMeta { id: ch_id, is_local: false, push_peers: raw.len(), forbid_cancel: !self.allow_cancel, is_aggregate: false, }; let pushes = decorate_to_count(ch_id, raw, &dfb); let push = ExchangePush::exchange_to_one(dfb.config.batch_size as usize, ch_id, pushes, r); Ok(MaterializedChannel { meta, push: DataPush::Exchange(push), pull: pull.into() }) } ChannelKind::Broadcast(r) => { let (raw, pull) = super::build_channel::<DataSet<T>>(index, &dfb.config)?.take(); let meta = ChannelMeta { id: ch_id, is_local: false, push_peers: raw.len(), forbid_cancel: !self.allow_cancel, is_aggregate: false, }; let pushes = decorate_to_count(ch_id, raw, &dfb); let push = if let Some(r) = r { ExchangePush::exchange_to_some(dfb.config.batch_size as usize, ch_id, pushes, r) } else { ExchangePush::broadcast(dfb.config.batch_size as usize, ch_id, pushes) }; Ok(MaterializedChannel { meta, push: DataPush::Exchange(push), pull: pull.into() }) } ChannelKind::Aggregate(id) => { let (mut raw, pull) = super::build_channel::<DataSet<T>>(index, &dfb.config)?.take(); let meta = ChannelMeta { id: ch_id, is_local: false, push_peers: raw.len(), forbid_cancel: !self.allow_cancel, is_aggregate: true, }; let push = raw.swap_remove(id as usize); let mut target = dfb.worker_id; target.index = id as u32; let push = CountedPush::new(ch_id, dfb.worker_id, target, push, &dfb.event_bus); for mut unused in raw { unused.close().ok(); } Ok(MaterializedChannel { meta, push: DataPush::Count(push), pull: pull.into() }) } } } } #[inline] fn decorate_to_count<T: Data>( ch_id: SubChannelId, raw: Vec<GeneralPush<DataSet<T>>>, dfb: &DataflowBuilder, ) -> Vec<CountedPush<T>> { let mut counts = Vec::with_capacity(raw.len()); let source = dfb.worker_id; for (idx, p) in raw.into_iter().enumerate() { let mut target = source; target.index = idx as u32; let push = CountedPush::new(ch_id, source, target, p, &dfb.event_bus); counts.push(push); } counts } pub struct Pipeline; impl<T: Data> From<Pipeline> for Channel<T> { fn from(_: Pipeline) -> Self { Channel::new(ChannelKind::Pipeline, true) } } impl<T: Data, R: RouteFunction<T>> From<Box<R>> for Channel<T> { fn from(route: Box<R>) -> Self { let kind = ChannelKind::Shuffle(route as Box<dyn RouteFunction<T>>); Channel::new(kind, true) } } impl<T: Data> From<Box<dyn RouteFunction<T>>> for Channel<T> { fn from(route: Box<dyn RouteFunction<T>>) -> Self { let kind = ChannelKind::Shuffle(route); Channel::new(kind, true) } } pub struct Broadcast; impl<T: Data> From<Broadcast> for Channel<T> { fn from(_: Broadcast) -> Self { Channel::new(ChannelKind::Broadcast(None), true) } } impl<T: Data> From<Box<dyn MultiRouteFunction<T>>> for Channel<T> { fn from(route: Box<dyn MultiRouteFunction<T>>) -> Self { let kind = ChannelKind::Broadcast(Some(route)); Channel::new(kind, true) } } pub struct Aggregate(pub u64); impl<T: Data> From<Aggregate> for Channel<T> { fn from(a: Aggregate) -> Self { let kind = ChannelKind::Aggregate(a.0); Channel::new(kind, true) } }
use crate::api::function::{MultiRouteFunction, RouteFunction}; use crate::channel_id::{ChannelId, SubChannelId}; use crate::communication::decorator::{count::CountedPush, exchange::ExchangePush, DataPush}; use crate::data::{Data, DataSet}; use crate::data_plane::{GeneralPull, GeneralPush, Push}; use crate::dataflow::DataflowBuilder; use crate::errors::BuildJobError; use crate::graph::Edge; enum ChannelKind<T: Data> { Pipeline, Shuffle(Box<dyn RouteFunction<T>>), Broadcast(Option<Box<dyn MultiRouteFunction<T>>>), Aggregate(u64), } pub struct Channel<T: Data> { kind: ChannelKind<T>, allow_cancel: bool, } #[derive(Copy, Clone, Debug)] pub(crate) struct ChannelMeta { pub id: SubChannelId, pub is_local: bool, pub push_peers: usize, pub forbid_cancel: bool, pub is_aggregate: bool, } impl Into<Edge> for ChannelMeta { fn into(self) -> Edge { Edge { id: self.id.index() as usize, source: Default::default(), target: Default::default(), scope_depth: 0, src_peers: self.push_peers, dst_peers: if self.is_aggregate { 1 } else { self.push_peers }, is_local: self.is_local, } } } pub(crate) struct MaterializedChannel<T: Data> { pub meta: ChannelMeta, push: DataPush<T>, pull: GeneralPull<DataSet<T>>, } impl<T: Data> MaterializedChannel<T> { pub fn take(self) -> (DataPush<T>, GeneralPull<DataSet<T>>) { (self.push, self.pull) } } impl<T: Data> Channel<T> { fn new(kind: ChannelKind<T>, allow_cancel: bool) -> Self { Channel { kind, allow_cancel } } pub fn forbid_cancel(&mut self) { self.allow_cancel = false; } pub(crate) fn materialize( self, dfb: &DataflowBuilder, ) -> Result<MaterializedChannel<T>, BuildJobError> { let index = dfb.next_channel_index(); let ch_id = (ChannelId { job_seq: dfb.config.job_id as u64, index }, dfb.worker_id.index).into(); match self.kind { ChannelKind::Pipeline => { let (tx, rx) = crate::data_plane::pipeline::<DataSet<T>>(ch_id); let meta = ChannelMeta { id: ch_id, is_local: true, push_peers: 1, forbid_cancel: !self.allow_cancel, is_aggregate: false, }; let push = CountedPush::new( ch_id, dfb.worker_id, dfb.worker_id, tx.into(), &dfb.event_bus, ); Ok(MaterializedChannel { meta, push: DataPush::Count(push), pull: rx.into() }) } ChannelKind::Shuffle(r) => { let (raw, pull) = super::build_channel::<DataSet<T>>(index, &dfb.config)?.take(); let meta = ChannelMeta { id: ch_id, is_local: false, push_peers: raw.len(), forbid_cancel: !self.allow_cancel, is_aggregate: false, }; let pushes = decorate_to_count(ch_id, raw, &dfb); let push = ExchangePush::exchange_to_one(dfb.config.batch_size as usize, ch_id, pushes, r); Ok(MaterializedChannel { meta, push: DataPush::Exchange(push), pull: pull.into() }) } ChannelKind::Broadcast(r) => { let (raw, pull) = super::build_channel::<DataSet<T>>(index, &dfb.config)?.take(); let meta = ChannelMeta { id: ch_id, is_local: false, push_peers: raw.len(), forbid_cancel: !self.allow_cancel, is_aggregate: false, }; let pushes = decorate_to_count(ch_id, raw, &dfb); let push =
; Ok(MaterializedChannel { meta, push: DataPush::Exchange(push), pull: pull.into() }) } ChannelKind::Aggregate(id) => { let (mut raw, pull) = super::build_channel::<DataSet<T>>(index, &dfb.config)?.take(); let meta = ChannelMeta { id: ch_id, is_local: false, push_peers: raw.len(), forbid_cancel: !self.allow_cancel, is_aggregate: true, }; let push = raw.swap_remove(id as usize); let mut target = dfb.worker_id; target.index = id as u32; let push = CountedPush::new(ch_id, dfb.worker_id, target, push, &dfb.event_bus); for mut unused in raw { unused.close().ok(); } Ok(MaterializedChannel { meta, push: DataPush::Count(push), pull: pull.into() }) } } } } #[inline] fn decorate_to_count<T: Data>( ch_id: SubChannelId, raw: Vec<GeneralPush<DataSet<T>>>, dfb: &DataflowBuilder, ) -> Vec<CountedPush<T>> { let mut counts = Vec::with_capacity(raw.len()); let source = dfb.worker_id; for (idx, p) in raw.into_iter().enumerate() { let mut target = source; target.index = idx as u32; let push = CountedPush::new(ch_id, source, target, p, &dfb.event_bus); counts.push(push); } counts } pub struct Pipeline; impl<T: Data> From<Pipeline> for Channel<T> { fn from(_: Pipeline) -> Self { Channel::new(ChannelKind::Pipeline, true) } } impl<T: Data, R: RouteFunction<T>> From<Box<R>> for Channel<T> { fn from(route: Box<R>) -> Self { let kind = ChannelKind::Shuffle(route as Box<dyn RouteFunction<T>>); Channel::new(kind, true) } } impl<T: Data> From<Box<dyn RouteFunction<T>>> for Channel<T> { fn from(route: Box<dyn RouteFunction<T>>) -> Self { let kind = ChannelKind::Shuffle(route); Channel::new(kind, true) } } pub struct Broadcast; impl<T: Data> From<Broadcast> for Channel<T> { fn from(_: Broadcast) -> Self { Channel::new(ChannelKind::Broadcast(None), true) } } impl<T: Data> From<Box<dyn MultiRouteFunction<T>>> for Channel<T> { fn from(route: Box<dyn MultiRouteFunction<T>>) -> Self { let kind = ChannelKind::Broadcast(Some(route)); Channel::new(kind, true) } } pub struct Aggregate(pub u64); impl<T: Data> From<Aggregate> for Channel<T> { fn from(a: Aggregate) -> Self { let kind = ChannelKind::Aggregate(a.0); Channel::new(kind, true) } }
if let Some(r) = r { ExchangePush::exchange_to_some(dfb.config.batch_size as usize, ch_id, pushes, r) } else { ExchangePush::broadcast(dfb.config.batch_size as usize, ch_id, pushes) }
if_condition
[ { "content": "pub fn pipeline<T>(id: SubChannelId) -> (ThreadPush<T>, ThreadPull<T>) {\n\n let queue = Box::new(VecDeque::new());\n\n let ptr =\n\n NonNull::new(Box::into_raw(queue)).expect(\"inter thread communication_old init failure;\");\n\n let exhaust = Arc::new(CachePadded::new(AtomicBool:...
Rust
neqo-transport/src/connection/tests/resumption.rs
hawkinsw/neqo
197c69b613cae40283c801fc2eb338f5482f3808
use super::{ connect, connect_with_rtt, default_client, default_server, exchange_ticket, get_tokens, send_something, AT_LEAST_PTO, }; use crate::addr_valid::{AddressValidation, ValidateAddress}; use std::cell::RefCell; use std::rc::Rc; use std::time::Duration; use test_fixture::{self, assertions, now}; #[test] fn resume() { let mut client = default_client(); let mut server = default_server(); connect(&mut client, &mut server); let token = exchange_ticket(&mut client, &mut server, now()); let mut client = default_client(); client .enable_resumption(now(), token) .expect("should set token"); let mut server = default_server(); connect(&mut client, &mut server); assert!(client.tls_info().unwrap().resumed()); assert!(server.tls_info().unwrap().resumed()); } #[test] fn remember_smoothed_rtt() { const RTT1: Duration = Duration::from_millis(130); const RTT2: Duration = Duration::from_millis(70); let mut client = default_client(); let mut server = default_server(); let now = connect_with_rtt(&mut client, &mut server, now(), RTT1); assert_eq!(client.loss_recovery.rtt(), RTT1); let token = exchange_ticket(&mut client, &mut server, now); let mut client = default_client(); let mut server = default_server(); client.enable_resumption(now, token).unwrap(); assert_eq!( client.loss_recovery.rtt(), RTT1, "client should remember previous RTT" ); connect_with_rtt(&mut client, &mut server, now, RTT2); assert_eq!( client.loss_recovery.rtt(), RTT2, "previous RTT should be completely erased" ); } #[test] fn address_validation_token_resume() { const RTT: Duration = Duration::from_millis(10); let mut client = default_client(); let mut server = default_server(); let validation = AddressValidation::new(now(), ValidateAddress::Always).unwrap(); let validation = Rc::new(RefCell::new(validation)); server.set_validation(Rc::clone(&validation)); let mut now = connect_with_rtt(&mut client, &mut server, now(), RTT); let token = exchange_ticket(&mut client, &mut server, now); let mut client = default_client(); client.enable_resumption(now, token).unwrap(); let mut server = default_server(); let dgram = client.process(None, now).dgram(); assertions::assert_initial(dgram.as_ref().unwrap(), true); now += AT_LEAST_PTO; connect_with_rtt(&mut client, &mut server, now, RTT); assert!(client.crypto.tls.info().unwrap().resumed()); assert!(server.crypto.tls.info().unwrap().resumed()); } fn can_resume(token: impl AsRef<[u8]>, initial_has_token: bool) { let mut client = default_client(); client.enable_resumption(now(), token).unwrap(); let initial = client.process_output(now()).dgram(); assertions::assert_initial(initial.as_ref().unwrap(), initial_has_token); } #[test] fn two_tickets_on_timer() { let mut client = default_client(); let mut server = default_server(); connect(&mut client, &mut server); server.send_ticket(now(), &[]).expect("send ticket1"); server.send_ticket(now(), &[]).expect("send ticket2"); let pkt = send_something(&mut server, now()); assert!(client.process(Some(pkt), now()).dgram().is_some()); assert_eq!(get_tokens(&mut client).len(), 0); let mut now = now() + 3 * client.get_pto(); let _ = client.process(None, now); let mut recv_tokens = get_tokens(&mut client); assert_eq!(recv_tokens.len(), 1); let token1 = recv_tokens.pop().unwrap(); now += 3 * client.get_pto(); let _ = client.process(None, now); let mut recv_tokens = get_tokens(&mut client); assert_eq!(recv_tokens.len(), 1); let token2 = recv_tokens.pop().unwrap(); now += 3 * client.get_pto(); let _ = client.process(None, now); assert_eq!(get_tokens(&mut client).len(), 0); assert_ne!(token1.as_ref(), token2.as_ref()); can_resume(&token1, false); can_resume(&token2, false); } #[test] fn two_tickets_with_new_token() { let mut client = default_client(); let mut server = default_server(); let validation = AddressValidation::new(now(), ValidateAddress::Always).unwrap(); let validation = Rc::new(RefCell::new(validation)); server.set_validation(Rc::clone(&validation)); connect(&mut client, &mut server); server.send_ticket(now(), &[]).expect("send ticket1"); server.send_ticket(now(), &[]).expect("send ticket2"); let pkt = send_something(&mut server, now()); client.process_input(pkt, now()); let mut all_tokens = get_tokens(&mut client); assert_eq!(all_tokens.len(), 2); let token1 = all_tokens.pop().unwrap(); let token2 = all_tokens.pop().unwrap(); assert_ne!(token1.as_ref(), token2.as_ref()); can_resume(&token1, true); can_resume(&token2, true); } #[test] fn take_token() { let mut client = default_client(); let mut server = default_server(); connect(&mut client, &mut server); server.send_ticket(now(), &[]).unwrap(); let dgram = server.process(None, now()).dgram(); client.process_input(dgram.unwrap(), now()); let tokens = get_tokens(&mut client); assert_eq!(tokens.len(), 0); let token = client.take_resumption_token(now()).unwrap(); can_resume(&token, false); }
use super::{ connect, connect_with_rtt, default_client, default_server, exchange_ticket, get_tokens, send_something, AT_LEAST_PTO, }; use crate::addr_valid::{AddressValidation, ValidateAddress}; use std::cell::RefCell; use std::rc::Rc; use std::time::Duration; use test_fixture::{self, assertions, now}; #[test] fn resume() { let mut client = default_client(); let mut server = default_server(); connect(&mut client, &mut server); let token = exchange_ticket(&mut client, &mut server, now()); let mut client = default_client(); client .enable_resumption(now(), token) .expect("should set token"); let mut server = default_server(); connect(&mut client, &mut server); assert!(client.tls_info().unwrap().resumed()); assert!(server.tls_info().unwrap().resumed()); } #[test] fn remember_smoothed_rtt() { const RTT1: Duration = Duration::from_millis(130); const RTT2: Duration = Duration::from_millis(70); let mut client = default_client(); let mut server = default_server(); let now = connect_with_rtt(&mut client, &mut server, now(), RTT1); assert_eq!(client.loss_recovery.rtt(), RTT1); let token = exchange_ticket(&mut client, &mut server, now); let mut client = default_client(); let mut server = default_server(); client.enable_resumption(now, token).unwrap(); assert_eq!( client.loss_recovery.rtt(), RTT1, "client should remember previous RTT" ); connect_with_rtt(&mut client, &mut server, now, RTT2); assert_eq!( client.loss_recovery.rtt(), RTT2, "previous RTT should be completely erased" ); } #[test] fn address_validation_token_resume() { const RTT: Duration = Duration::from_millis(10); let mut client = default_client(); let mut server = default_server(); let validation = AddressValidation::new(now(), ValidateAddress::Always).unwrap(); let validation = Rc::new(RefCell::new(validation)); server.set_validation(Rc::clone(&validation)); let mut now = connect_with_rtt(&mut client, &mut server, now(), RTT); let token = exchange_ticket(&mut client, &mut server, now); let mut client = default_client(); client.enable_resumption(now, token).unwrap(); let mut server = default_server(); let dgram = client.process(None, now).dgram(); assertions::assert_initial(dgram.as_ref().unwrap(), true); now += AT_LEAST_PTO; connect_with_rtt(&mut client, &mut server, now, RTT); assert!(client.crypto.tls.info().unwrap().resumed()); assert!(server.crypto.tls.info().unwrap().resumed()); } fn can_resume(token: impl AsRef<[u8]>, initial_has_token: bool) { let mut client = default_client(); client.enable_resumption(now(), token).unwrap(); let initial = client.process_output(now()).dgram(); assertions::assert_initial(initial.as_ref().unwrap(), initial_has_token); } #[test] fn two_tickets_on_timer() { let mut client = default_client(); let mut server = default_server(); connect(&mut client, &mut server); server.send_ticket(now(), &[]).expect("send ticket1"); server.send_ticket(now(), &[]).expect("send ticket2"); let pkt = send_something(&mut server, now()); assert!(client.process(Some(pkt), now()).dgram().is_some()); assert_eq!(get_tokens(&mut client).len(), 0); let mut now = now() + 3 * client.get_pto(); let _ = client.process(None, now); let mut recv_tokens = get_tokens(&mut client); assert_eq!(recv_tokens.len(), 1); let token1 = recv_tokens.pop().unwrap(); now += 3 * client.get_pto(); let _ = client.process(None, now); let mut recv_tokens = get_tokens(&mut client); assert_eq!(recv_tokens.len(), 1); let token2 = recv_tokens.pop().unwrap(); now += 3 * client.get_pto(); let _ = client.process(None, now); assert_eq!(get_tokens(&mut client).len(), 0); assert_ne!(token1.as_ref(), token2.as_ref()); can_resume(&token1, false); can_resume(&token2, false); } #[test] fn two_tickets_with_new_token() { let mut client = default_client(); let mut server = default_server(); let validation = AddressValidation::new(now(), ValidateAddress::Always).unwrap(); let validation = Rc::new(RefCell::new(validation)); server.set_validation(Rc::clone(&validation)); connect(&mut client, &mut server); server.send_ticket(now(), &[]).expect("send ticket1"); server.send_ticket(now(), &[]).expect("send ticket2"); let pkt = send_something(&mut server, now()); client.process_input(pkt, now()); let mut all_tokens = get_tokens(&mut client); assert_eq!(all_tokens.len(), 2); let token1 = all_tokens.pop().unwrap(); let token2 = all_tokens.pop().unwrap(); assert_ne!(token1.as_ref(), token2.as_ref()); can_resume(&token1, true); can_resume(&token2, true); } #[test]
fn take_token() { let mut client = default_client(); let mut server = default_server(); connect(&mut client, &mut server); server.send_ticket(now(), &[]).unwrap(); let dgram = server.process(None, now()).dgram(); client.process_input(dgram.unwrap(), now()); let tokens = get_tokens(&mut client); assert_eq!(tokens.len(), 0); let token = client.take_resumption_token(now()).unwrap(); can_resume(&token, false); }
function_block-full_function
[ { "content": "/// Connect with an RTT and then force both peers to be idle.\n\nfn connect_rtt_idle(client: &mut Connection, server: &mut Connection, rtt: Duration) -> Instant {\n\n let now = connect_with_rtt(client, server, now(), rtt);\n\n let now = force_idle(client, server, rtt, now);\n\n // Drain e...
Rust
demo-kitferret/src/st7735.rs
GuiAmPm/kit-ferret-rs
51e042dd591095ce51d18d8b4c31c93c47ac0efd
pub mod instruction; use crate::spi_controller::SpiController; use ferret_rs::system::ScreenTrait; use crate::st7735::instruction::Instruction; use embedded_hal::blocking::delay::DelayMs; use embedded_hal::digital::v2::OutputPin; pub struct ST7735<'a, SPI, DC, RST> where SPI: SpiController, DC: OutputPin, RST: OutputPin, { spi: SPI, dc: DC, rst: RST, rgb: bool, inverted: bool, pub width: u16, pub height: u16, buffer: Option<&'a mut [u8]>, interlace: bool, interlace_even: bool } impl<'a, SPI, DC, RST> ST7735<'a, SPI, DC, RST> where SPI: SpiController, DC: OutputPin, RST: OutputPin, { pub fn new( spi: SPI, dc: DC, rst: RST, rgb: bool, inverted: bool, width: u16, height: u16, ) -> Self { let display = ST7735 { spi, dc, rst, rgb, inverted, width, height, buffer: None, interlace: false, interlace_even: false }; display } pub fn set_buffer(&mut self, buffer: Option<&'a mut [u8]>) { self.buffer = buffer; } pub fn set_interlace(&mut self, value: bool) { if !value { self.set_address_window(0, 0, self.width - 1, self.height - 1) } self.interlace = value; } fn update_entire_screen(&mut self) -> Result<(), ()> { if self.buffer != None { self.write_command(Instruction::RAMWR, &[])?; self.start_data()?; let buffer = self.buffer.as_ref().unwrap(); self.spi.write(&buffer); Ok(()) } else { todo!() } } fn update_screen_interlace(&mut self) -> Result<(), ()> { if self.buffer != None { let width = self.width; let height = self.height; let even = self.interlace_even; let start = if even { 0 } else { 1 }; for y in (start..height).step_by(2) { self.set_address_window(0, y, 160, y + 1); self.write_command(Instruction::RAMWR, &[])?; self.start_data()?; let buffer = self.buffer.as_ref().unwrap(); let start_y = y as usize * 160 * 2; let end_y = (y as usize + 1) * 160 * 2; self.spi.write(&buffer[(start_y..end_y)]); } self.interlace_even = !self.interlace_even; Ok(()) } else { todo!() } } pub fn init<DELAY>(&mut self, delay: &mut DELAY) where DELAY: DelayMs<u8>, { log::info!("Initialising screen"); self.hard_reset(delay); log::info!("SWRESET"); self.write_command(Instruction::SWRESET, &[]); delay.delay_ms(200); log::info!("SLPOUT"); self.write_command(Instruction::SLPOUT, &[]); delay.delay_ms(200); log::info!("FRMCTR1"); self.write_command(Instruction::FRMCTR1, &[0x01, 0x2C, 0x2D]); log::info!("FRMCTR2"); self.write_command(Instruction::FRMCTR2, &[0x01, 0x2C, 0x2D]); log::info!("FRMCTR3"); self.write_command(Instruction::FRMCTR3, &[0x01, 0x2C, 0x2D, 0x01, 0x2C, 0x2D]); log::info!("INVCTR"); self.write_command(Instruction::INVCTR, &[0x07]); log::info!("PWCTR1"); self.write_command(Instruction::PWCTR1, &[0xA2, 0x02, 0x84]); log::info!("PWCTR2"); self.write_command(Instruction::PWCTR2, &[0xC5]); log::info!("PWCTR3"); self.write_command(Instruction::PWCTR3, &[0x0A, 0x00]); log::info!("PWCTR4"); self.write_command(Instruction::PWCTR4, &[0x8A, 0x2A]); log::info!("PWCTR5"); self.write_command(Instruction::PWCTR5, &[0x8A, 0xEE]); log::info!("VMCTR1"); self.write_command(Instruction::VMCTR1, &[0x0E]); if self.inverted { log::info!("INVON"); self.write_command(Instruction::INVON, &[]); } else { log::info!("INVOFF"); self.write_command(Instruction::INVOFF, &[]); } if self.rgb { log::info!("MADCTL"); self.write_command(Instruction::MADCTL, &[0x00]); } else { log::info!("MADCTL"); self.write_command(Instruction::MADCTL, &[0x08]); } log::info!("COLMOD"); self.write_command(Instruction::COLMOD, &[0x05]); log::info!("COLMOD"); self.write_command(Instruction::DISPON, &[]); log::info!("MADCTL"); self.write_command(Instruction::MADCTL, &[0x60]); self.set_address_window(0, 0, self.width - 1, self.height - 1); delay.delay_ms(200); } pub fn hard_reset<DELAY>(&mut self, delay: &mut DELAY) -> Result<(), ()> where DELAY: DelayMs<u8>, { self.rst.set_high().map_err(|_| ())?; delay.delay_ms(10); self.rst.set_low().map_err(|_| ())?; delay.delay_ms(10); self.rst.set_high().map_err(|_| ()) } pub fn set_address_window(&mut self, sx: u16, sy: u16, ex: u16, ey: u16) { self.write_command(Instruction::CASET, &[]); self.start_data(); self.write_word(sx); self.write_word(ex); self.write_command(Instruction::RASET, &[]); self.start_data(); self.write_word(sy); self.write_word(ey); } fn write_word(&mut self, value: u16) { self.write_data(&value.to_be_bytes()); } fn write_command(&mut self, command: Instruction, params: &[u8]) -> Result<(), ()> { self.dc.set_low().map_err(|_| ())?; self.spi.write(&[command as u8]); if !params.is_empty() { self.start_data()?; self.write_data(params); } Ok(()) } fn start_data(&mut self) -> Result<(), ()> { self.dc.set_high().map_err(|_| ()) } fn write_data(&mut self, data: &[u8]) { self.spi.write(data); } fn set_pixel_internal(&mut self, x: u16, y: u16, r: u8, g: u8, b: u8) { let width = self.width; if x > self.width as u16 || y > self.height as u16 { return; } if let Some(buffer) = &mut self.buffer { let index = (y * width as u16 + x) as usize; let r = ((((r as u16) * 31 / 255) & 0b0011_1111) as u16) << 11; let g = ((((g as u16) * 63 / 255) & 0b0111_1111) as u16) << 5; let b = ((((b as u16) * 31 / 255) & 0b0011_1111) as u16) << 0; let color = r+g+b; let bytes = color.to_be_bytes(); buffer[index * 2 + 0] = bytes[0]; buffer[index * 2 + 1] = bytes[1]; } else { } } fn clear_internal(&mut self, red: u8, green: u8, blue: u8) { if let Some(buffer) = &mut self.buffer { for x in (0..buffer.len()).step_by(2) { let r = ((((red as u16) * 31 / 255) & 0b0011_1111) as u16) << 11; let g = ((((green as u16) * 63 / 255) & 0b0111_1111) as u16) << 5; let b = ((((blue as u16) * 31 / 255) & 0b0011_1111) as u16) << 0; let color = r+g+b; let bytes = color.to_be_bytes(); buffer[x + 0] = bytes[0]; buffer[x + 1] = bytes[1]; } } } } impl<'a, SPI, DC, RST> ScreenTrait for ST7735<'a, SPI, DC, RST> where SPI: SpiController, DC: OutputPin, RST: OutputPin, { fn get_width(&self) -> u16 { self.width } fn get_height(&self) -> u16 { self.height } fn set_pixel(&mut self, x: u16, y: u16, r: u8, g: u8, b: u8) { self.set_pixel_internal(x, y, r, g, b); } fn clear(&mut self, r: u8, g: u8, b: u8) { self.clear_internal(r, g, b); } fn update_screen(&mut self) -> core::result::Result<(), ()> { if self.interlace { self.update_screen_interlace() } else { self.update_entire_screen() } } }
pub mod instruction; use crate::spi_controller::SpiController; use ferret_rs::system::ScreenTrait; use crate::st7735::instruction::Instruction; use embedded_hal::blocking::delay::DelayMs; use embedded_hal::digital::v2::OutputPin; pub struct ST7735<'a, SPI, DC, RST> where SPI: SpiController, DC: OutputPin, RST: OutputPin, { spi: SPI, dc: DC, rst: RST, rgb: bool, inverted: bool, pub width: u16, pub height: u16, buffer: Option<&'a mut [u8]>, interlace: bool, interlace_even: bool } impl<'a, SPI, DC, RST> ST7735<'a, SPI, DC, RST> where SPI: SpiController, DC: OutputPin, RST: OutputPin, { pub fn new( spi: SPI, dc: DC, rst: RST, rgb: bool, inverted: bool, width: u16, height: u16, ) -> Self { let display = ST7735 { spi, dc, rst, rgb, inverted, width, height, buffer: None, interlace: false, interlace_even: false }; display } pub fn set_buffer(&mut self, buffer: Option<&'a mut [u8]>) { self.buffer = buffer; } pub fn set_interlace(&mut self, value: bool) { if !value { self.set_address_window(0, 0, self.width - 1, self.height - 1) } self.interlace = value; }
fn update_screen_interlace(&mut self) -> Result<(), ()> { if self.buffer != None { let width = self.width; let height = self.height; let even = self.interlace_even; let start = if even { 0 } else { 1 }; for y in (start..height).step_by(2) { self.set_address_window(0, y, 160, y + 1); self.write_command(Instruction::RAMWR, &[])?; self.start_data()?; let buffer = self.buffer.as_ref().unwrap(); let start_y = y as usize * 160 * 2; let end_y = (y as usize + 1) * 160 * 2; self.spi.write(&buffer[(start_y..end_y)]); } self.interlace_even = !self.interlace_even; Ok(()) } else { todo!() } } pub fn init<DELAY>(&mut self, delay: &mut DELAY) where DELAY: DelayMs<u8>, { log::info!("Initialising screen"); self.hard_reset(delay); log::info!("SWRESET"); self.write_command(Instruction::SWRESET, &[]); delay.delay_ms(200); log::info!("SLPOUT"); self.write_command(Instruction::SLPOUT, &[]); delay.delay_ms(200); log::info!("FRMCTR1"); self.write_command(Instruction::FRMCTR1, &[0x01, 0x2C, 0x2D]); log::info!("FRMCTR2"); self.write_command(Instruction::FRMCTR2, &[0x01, 0x2C, 0x2D]); log::info!("FRMCTR3"); self.write_command(Instruction::FRMCTR3, &[0x01, 0x2C, 0x2D, 0x01, 0x2C, 0x2D]); log::info!("INVCTR"); self.write_command(Instruction::INVCTR, &[0x07]); log::info!("PWCTR1"); self.write_command(Instruction::PWCTR1, &[0xA2, 0x02, 0x84]); log::info!("PWCTR2"); self.write_command(Instruction::PWCTR2, &[0xC5]); log::info!("PWCTR3"); self.write_command(Instruction::PWCTR3, &[0x0A, 0x00]); log::info!("PWCTR4"); self.write_command(Instruction::PWCTR4, &[0x8A, 0x2A]); log::info!("PWCTR5"); self.write_command(Instruction::PWCTR5, &[0x8A, 0xEE]); log::info!("VMCTR1"); self.write_command(Instruction::VMCTR1, &[0x0E]); if self.inverted { log::info!("INVON"); self.write_command(Instruction::INVON, &[]); } else { log::info!("INVOFF"); self.write_command(Instruction::INVOFF, &[]); } if self.rgb { log::info!("MADCTL"); self.write_command(Instruction::MADCTL, &[0x00]); } else { log::info!("MADCTL"); self.write_command(Instruction::MADCTL, &[0x08]); } log::info!("COLMOD"); self.write_command(Instruction::COLMOD, &[0x05]); log::info!("COLMOD"); self.write_command(Instruction::DISPON, &[]); log::info!("MADCTL"); self.write_command(Instruction::MADCTL, &[0x60]); self.set_address_window(0, 0, self.width - 1, self.height - 1); delay.delay_ms(200); } pub fn hard_reset<DELAY>(&mut self, delay: &mut DELAY) -> Result<(), ()> where DELAY: DelayMs<u8>, { self.rst.set_high().map_err(|_| ())?; delay.delay_ms(10); self.rst.set_low().map_err(|_| ())?; delay.delay_ms(10); self.rst.set_high().map_err(|_| ()) } pub fn set_address_window(&mut self, sx: u16, sy: u16, ex: u16, ey: u16) { self.write_command(Instruction::CASET, &[]); self.start_data(); self.write_word(sx); self.write_word(ex); self.write_command(Instruction::RASET, &[]); self.start_data(); self.write_word(sy); self.write_word(ey); } fn write_word(&mut self, value: u16) { self.write_data(&value.to_be_bytes()); } fn write_command(&mut self, command: Instruction, params: &[u8]) -> Result<(), ()> { self.dc.set_low().map_err(|_| ())?; self.spi.write(&[command as u8]); if !params.is_empty() { self.start_data()?; self.write_data(params); } Ok(()) } fn start_data(&mut self) -> Result<(), ()> { self.dc.set_high().map_err(|_| ()) } fn write_data(&mut self, data: &[u8]) { self.spi.write(data); } fn set_pixel_internal(&mut self, x: u16, y: u16, r: u8, g: u8, b: u8) { let width = self.width; if x > self.width as u16 || y > self.height as u16 { return; } if let Some(buffer) = &mut self.buffer { let index = (y * width as u16 + x) as usize; let r = ((((r as u16) * 31 / 255) & 0b0011_1111) as u16) << 11; let g = ((((g as u16) * 63 / 255) & 0b0111_1111) as u16) << 5; let b = ((((b as u16) * 31 / 255) & 0b0011_1111) as u16) << 0; let color = r+g+b; let bytes = color.to_be_bytes(); buffer[index * 2 + 0] = bytes[0]; buffer[index * 2 + 1] = bytes[1]; } else { } } fn clear_internal(&mut self, red: u8, green: u8, blue: u8) { if let Some(buffer) = &mut self.buffer { for x in (0..buffer.len()).step_by(2) { let r = ((((red as u16) * 31 / 255) & 0b0011_1111) as u16) << 11; let g = ((((green as u16) * 63 / 255) & 0b0111_1111) as u16) << 5; let b = ((((blue as u16) * 31 / 255) & 0b0011_1111) as u16) << 0; let color = r+g+b; let bytes = color.to_be_bytes(); buffer[x + 0] = bytes[0]; buffer[x + 1] = bytes[1]; } } } } impl<'a, SPI, DC, RST> ScreenTrait for ST7735<'a, SPI, DC, RST> where SPI: SpiController, DC: OutputPin, RST: OutputPin, { fn get_width(&self) -> u16 { self.width } fn get_height(&self) -> u16 { self.height } fn set_pixel(&mut self, x: u16, y: u16, r: u8, g: u8, b: u8) { self.set_pixel_internal(x, y, r, g, b); } fn clear(&mut self, r: u8, g: u8, b: u8) { self.clear_internal(r, g, b); } fn update_screen(&mut self) -> core::result::Result<(), ()> { if self.interlace { self.update_screen_interlace() } else { self.update_entire_screen() } } }
fn update_entire_screen(&mut self) -> Result<(), ()> { if self.buffer != None { self.write_command(Instruction::RAMWR, &[])?; self.start_data()?; let buffer = self.buffer.as_ref().unwrap(); self.spi.write(&buffer); Ok(()) } else { todo!() } }
function_block-full_function
[ { "content": "// ported from: https://www.geeksforgeeks.org/implement-itoa/\n\npub fn integer_to_string<T>(mut num: T, buffer: &mut [char], base: u8)\n\nwhere T: IntegerType {\n\n let mut index = 0;\n\n let mut is_negative = false;\n\n\n\n if num.is_zero() {\n\n buffer[index] = '0';\n\n b...
Rust
macros/src/command/slash.rs
noituri/poise
45f27a86990a9443077c0cb457bd7600af625b3b
use syn::spanned::Spanned as _; use super::{extract_option_type, extract_vec_type, Invocation}; fn generate_options(inv: &Invocation) -> proc_macro2::TokenStream { let check = match &inv.more.check { Some(check) => { quote::quote! { Some(|ctx| Box::pin(#check(ctx.into()))) } } None => quote::quote! { None }, }; let on_error = match &inv.more.on_error { Some(on_error) => quote::quote! { Some(|err, ctx| Box::pin(#on_error(err, ::poise::CommandErrorContext::Application(ctx)))) }, None => quote::quote! { None }, }; let ephemeral = inv.more.ephemeral; quote::quote! { ::poise::ApplicationCommandOptions { check: #check, on_error: #on_error, ephemeral: #ephemeral, } } } pub fn generate_slash_command_spec( inv: &Invocation, ) -> Result<proc_macro2::TokenStream, darling::Error> { let command_name = &inv.command_name; let description = inv.description.as_ref().ok_or_else(|| { syn::Error::new( inv.function.sig.span(), "slash commands must have a description (doc comment)", ) })?; let mut parameter_structs = Vec::new(); for param in inv.parameters { let description = param.more.description.as_ref().ok_or_else(|| { syn::Error::new( param.span, "slash command parameters must have a description", ) })?; let (mut required, type_) = match extract_option_type(&param.type_).or_else(|| extract_vec_type(&param.type_)) { Some(t) => (false, t), None => (true, &param.type_), }; if param.more.flag { required = false; } let param_name = &param.name; let autocomplete_callback = match &param.more.autocomplete { Some(autocomplete_fn) => { quote::quote! { Some(| ctx: poise::ApplicationContext<'_, _, _>, interaction: &poise::serenity_prelude::AutocompleteInteraction, options: &[poise::serenity_prelude::ApplicationCommandInteractionDataOption], | Box::pin(async move { use ::poise::futures::{Stream, StreamExt}; let choice = match options .iter() .find(|option| option.focused && option.name == stringify!(#param_name)) { Some(x) => x, None => return Ok(()), }; let json_value = choice.value .as_ref() .ok_or(::poise::SlashArgError::CommandStructureMismatch("expected argument value"))?; let partial_input = (&&&&&std::marker::PhantomData::<#type_>).extract_partial(json_value)?; let choices_stream = ::poise::into_stream!( #autocomplete_fn(ctx.into(), partial_input).await ); let choices_json = choices_stream .take(25) .map(|value| poise::AutocompleteChoice::from(value)) .map(|choice| serde_json::json!({ "name": choice.name, "value": (&&&&&std::marker::PhantomData::<#type_>).into_json(choice.value), })) .collect() .await; let choices_json = poise::serde_json::Value::Array(choices_json); if let Err(e) = interaction .create_autocomplete_response( &ctx.discord.http, |b| b.set_choices(choices_json), ) .await { println!("Warning: couldn't send autocomplete response: {}", e); } Ok(()) })) } } None => quote::quote! { None }, }; let is_autocomplete = param.more.autocomplete.is_some(); parameter_structs.push(( quote::quote! { ::poise::SlashCommandParameter { builder: |o| (&&&&&std::marker::PhantomData::<#type_>).create(o) .required(#required) .name(stringify!(#param_name)) .description(#description) .set_autocomplete(#is_autocomplete), autocomplete_callback: #autocomplete_callback, } }, required, )); } parameter_structs.sort_by_key(|(_, required)| !required); let parameter_structs = parameter_structs .into_iter() .map(|(builder, _)| builder) .collect::<Vec<_>>(); let param_names = inv.parameters.iter().map(|p| &p.name).collect::<Vec<_>>(); let param_types = inv .parameters .iter() .map(|p| match p.more.flag { true => syn::parse_quote! { FLAG }, false => p.type_.clone(), }) .collect::<Vec<_>>(); let options = generate_options(inv); Ok(quote::quote! { ::poise::SlashCommand { name: #command_name, description: #description, parameters: { use ::poise::{SlashArgumentHack, AutocompletableHack}; vec![ #( #parameter_structs, )* ] }, action: |ctx, args| Box::pin(async move { #[allow(clippy::needless_question_mark)] let ( #( #param_names, )* ) = ::poise::parse_slash_args!( ctx.discord, ctx.interaction.guild_id(), ctx.interaction.channel_id(), args => #( (#param_names: #param_types), )* ).await?; inner(ctx.into(), #( #param_names, )*).await }), id: std::sync::Arc::clone(&command_id), options: #options, } }) } pub fn generate_context_menu_command_spec( inv: &Invocation, name: &str, ) -> Result<proc_macro2::TokenStream, darling::Error> { if inv.parameters.len() != 1 { return Err(syn::Error::new( inv.function.sig.inputs.span(), "Context menu commands require exactly one parameter", ) .into()); } let param_type = &inv.parameters[0].type_; let options = generate_options(inv); Ok(quote::quote! { ::poise::ContextMenuCommand { name: #name, action: <#param_type as ::poise::ContextMenuParameter<_, _>>::to_action(|ctx, value| { Box::pin(async move { inner(ctx.into(), value).await }) }), id: std::sync::Arc::clone(&command_id), options: #options, } }) }
use syn::spanned::Spanned as _; use super::{extract_option_type, extract_vec_type, Invocation}; fn generate_options(inv: &Invocation) -> proc_macro2::TokenStream { let check = match &inv.more.check { Some(check) => { quote::quote! { Some(|ctx| Box::pin(#check(ctx.into()))) } } None => quote::quote! { None }, }; let on_error = match &inv.more.on_error { Some(on_error) => quote::quote! { Some(|err, ctx| Box::pin(#on_error(err, ::poise::CommandErrorContext::Application(ctx)))) }, None => quote::quote! { None }, }; let ephemeral = inv.more.ephemeral; quote::quote! { ::poise::ApplicationCommandOptions { check: #check, on_error: #on_error, ephemeral: #ephemeral, } } } pub fn generate_slash_command_spec( inv: &Invocation, ) -> Result<proc_macro2::TokenStream, darling::Error> { let command_name = &inv.command_name; let description = inv.description.as_ref().ok_or_else(|| { syn::Error::new( inv.function.sig.span(), "slash commands must have a description (doc comment)", ) })?; let mut parameter_structs = Vec::new(); for param in inv.parameters { let description = param.more.description.as_ref().ok_or_else(|| { syn::Error::new( param.span, "slash command parameters must have a description", ) })?; let (mut required, type_) = match extract_option_type(&param.type_).or_else(|| extract_vec_type(&param.type_)) { Some(t) => (false, t), None => (true, &param.type_), }; if param.more.flag { required = false; } let param_name = &param.name; let autocomplete_callback = match &param.more.autocomplete { Some(autocomplete_fn) => { quote::quote! { Some(| ctx: poise::ApplicationContext<'_, _, _>, interaction: &poise::serenity_prelude::AutocompleteInteraction, options: &[poise::serenity_prelude::ApplicationCommandInteractionDataOption], | Box::pin(async move { use ::poise::futures::{Stream, StreamExt}; let choice = match options .iter() .find(|option| option.focused && option.name == stringify!(#param_name)) { Some(x) => x, None => return Ok(()), }; let json_value = choice.value .as_ref() .ok_or(::poise::SlashArgError::CommandStructureMismatch("expected argument value"))?; let partial_input = (&&&&&std::marker::PhantomData::<#type_>).extract_partial(json_value)?; let choices_stream = ::poise::into_stream!( #autocomplete_fn(ctx.into(), partial_input).await ); let choices_json = choices_stream .take(25) .map(|value| poise::AutocompleteChoice::from(value)) .map(|choice| serde_json::json!({ "name": choice.name, "value": (&&&&&std::marker::PhantomData::<#type_>).into_json(choice.value), })) .collect() .await; let choices_json = poise::serde_json::Value::Array(choices_json); if let Err(e) = interaction .create_autocomplete_response( &ctx.discord.http, |b| b.set_choices(choices_json), ) .await { println!("Warning: couldn't send autocomplete response: {}", e); } Ok(()) })) } } None => quote::quote! { None }, }; let is_autocomplete = param.more.autocomplete.is_some(); parameter_structs.push(( quote::quote! { ::poise::SlashCommandParameter { builder: |o| (&&&&&std::marker::PhantomData::<#type_>).create(o) .required(#required) .name(stringify!(#param_name)) .description(#description) .set_autocomplete(#is_autocomplete), autocomplete_callback: #autocomplete_callback, } }, required, )); } parameter_structs.sort_by_key(|(_, required)| !required); let parameter_structs = parameter_structs .into_iter() .map(|(builder, _)| builder) .collect::<Vec<_>>(); let param_names = inv.parameters.iter().map(|p| &p.name).collect::<Vec<_>>(); let param_types = inv .parameters .iter() .map(|p| match p.more.flag { true => syn::parse_quote! { FLAG }, false => p.type_.clone(), }) .collect::<Vec<_>>(); let options = generate_options(inv); Ok(quote::quote! { ::poise::SlashCommand { name: #command_name, description: #description, parameters: { use ::poise::{SlashArgumentHack, AutocompletableHack}; vec![ #( #parameter_structs, )* ] }, action: |ctx, args| Box::pin(async move { #[allow(clippy::needless_question_mark)] let ( #( #param_names, )* ) = ::poise::parse_slash_args!( ctx.discord, ctx.interaction.guild_id(), ctx.interaction.channel_id(), args => #( (#param_names: #param_types), )* ).await?; inner(ctx.into(), #( #param_names, )*).await }), id: std::sync::Arc::clone(&command_id), options: #options, } }) } pub fn generate_context_menu_command_spec( inv: &Invocation, name: &str, ) -> Result<proc_macro2::TokenStream, darling::Error> { if inv.parameters.len() != 1 { return Err(syn::Error::new( inv.function.sig.inputs.span(), "Context menu commands require exactly one parameter", ) .into()); } let param_type = &inv.parameters[0].type_; let options = generate_options(inv);
}
Ok(quote::quote! { ::poise::ContextMenuCommand { name: #name, action: <#param_type as ::poise::ContextMenuParameter<_, _>>::to_action(|ctx, value| { Box::pin(async move { inner(ctx.into(), value).await }) }), id: std::sync::Arc::clone(&command_id), options: #options, } })
call_expression
[ { "content": "/// Implemented for all types that can be used in a context menu command\n\npub trait ContextMenuParameter<U, E> {\n\n /// Convert an action function pointer that takes Self as an argument into the appropriate\n\n /// [`crate::ContextMenuCommandAction`] variant.\n\n fn to_action(\n\n ...
Rust
src/tokenizer.rs
quail-lang/quail
696b6f11b65776843320468fdad0acc9dfad1312
use std::fmt; use std::collections::HashMap; #[derive(Debug, PartialEq, Eq, Clone)] pub enum Token { Ident(Loc, String), Hole(Loc, Option<String>, Option<String>), Lambda(Loc), Let(Loc), Def(Loc), Equals(Loc), In(Loc), Arrow(Loc), FatArrow(Loc), LeftParen(Loc), RightParen(Loc), LeftCurly(Loc), RightCurly(Loc), Match(Loc), With(Loc), Import(Loc), Colon(Loc), Dollar(Loc), As(Loc), Str(Loc, String), Nat(Loc, usize), } pub struct Tokenizer { input: Vec<char>, cur: usize, loc: Loc, } #[derive(Debug, PartialEq, Eq, Clone)] pub struct Loc { pub path: Option<String>, pub line: usize, pub col: usize, } type TokenizeErr = String; impl Token { pub fn name(&self) -> &'static str { use Token::*; match self { Ident(_loc, _x) => "IDENT", Hole(_loc, _x, _contents) => "HOLE", Lambda(_loc) => "LAMBDA", Let(_loc) => "LET", Def(_loc) => "DEF", Equals(_loc) => "EQUALS", In(_loc) => "IN", Arrow(_loc) => "ARROW", FatArrow(_loc) => "FATARROW", LeftParen(_loc) => "LEFTPAREN", RightParen(_loc) => "RIGHTPAREN", LeftCurly(_loc) => "LEFTCURLY", RightCurly(_loc) => "RIGHTCURLY", Match(_loc) => "MATCH", With(_loc) => "WITH", Import(_loc) => "IMPORT", Colon(_loc) => "COLON", Dollar(_loc) => "DOLLAR", As(_loc) => "AS", Str(_loc, _val) => "STR", Nat(_loc, _val) => "NAT", } } pub fn show(&self) -> String { use Token::*; match self { Ident(_loc, x) => format!("IDENT({})", x), Hole(_loc, x, _contents) => format!("HOLE({:?}, ...)", x), Lambda(_loc) => format!("LAMBDA"), Let(_loc) => format!("LET"), Def(_loc) => format!("DEF"), Equals(_loc) => format!("EQUALS"), In(_loc) => format!("IN"), Arrow(_loc) => format!("ARROW"), FatArrow(_loc) => format!("FATARROW"), LeftParen(_loc) => format!("LEFTPAREN"), RightParen(_loc) => format!("RIGHTPAREN"), LeftCurly(_loc) => format!("LEFTCURLY"), RightCurly(_loc) => format!("RIGHTCURLY"), Match(_loc) => format!("MATCH"), With(_loc) => format!("WITH"), Import(_loc) => format!("IMPORT"), Colon(_loc) => format!("COLON"), Dollar(_loc) => format!("DOLLAR"), As(_loc) => format!("AS"), Str(_loc, val) => format!("STR({})", val), Nat(_loc, val) => format!("NAT({})", val), } } pub fn loc(&self) -> &Loc { use Token::*; match self { Ident(loc, _x) => loc, Hole(loc, _x, _contents) => loc, Lambda(loc) => loc, Let(loc) => loc, Def(loc) => loc, Equals(loc) => loc, In(loc) => loc, Arrow(loc) => loc, FatArrow(loc) => loc, LeftParen(loc) => loc, RightParen(loc) => loc, LeftCurly(loc) => loc, RightCurly(loc) => loc, Match(loc) => loc, With(loc) => loc, Import(loc) => loc, Colon(loc) => loc, Dollar(loc) => loc, As(loc) => loc, Str(loc, _val) => loc, Nat(loc, _val) => loc, } } } impl Tokenizer { pub fn new(source: Option<String>, input: &str) -> Self { Tokenizer { input: input.chars().collect(), cur: 0, loc: Loc::new(source), } } pub fn tokenize(&mut self) -> Result<Vec<Token>, TokenizeErr> { let mut tokens = Vec::new(); while let Some(token) = self.token()? { tokens.push(token); } Ok(tokens) } fn tokenize_lines(&mut self) -> Result<Vec<Vec<Token>>, TokenizeErr> { let toks: Vec<Token> = self.tokenize()?; let mut lines: Vec<Vec<Token>> = Vec::new(); let mut cur_line: Vec<Token> = Vec::new(); let mut line_no = 0; for tok in toks { while tok.loc().line > line_no { line_no += 1; lines.push(cur_line); cur_line = Vec::new(); } cur_line.push(tok); } lines.push(cur_line); Ok(lines) } fn double_character_token(&mut self) -> Option<Token> { let head_char = self.peek()?; let next_char = self.peek_ahead(1)?; let chars = format!("{}{}", head_char, next_char); macro_rules! double_char_token { ($characters:literal, $tok:ident) => { if chars == $characters { self.consume(); self.consume(); return Some(Token::$tok(self.loc.clone())); } } } double_char_token!("->", Arrow); double_char_token!("=>", FatArrow); return None; } fn single_character_token(&mut self) -> Option<Token> { let head_char = self.peek()?; macro_rules! single_char_token { ($character:literal, $tok:ident) => { if head_char == $character { self.consume(); return Some(Token::$tok(self.loc.clone())); } } } single_char_token!('(', LeftParen); single_char_token!(')', RightParen); single_char_token!('{', LeftCurly); single_char_token!('}', RightCurly); single_char_token!(':', Colon); single_char_token!('$', Dollar); single_char_token!('=', Equals); return None; } fn token(&mut self) -> Result<Option<Token>, TokenizeErr> { while let Some(head_char) = self.peek() { if head_char.is_ascii_whitespace() { self.consume(); } else if head_char == '#' { self.consume_comment(); } else { break; } } match self.peek() { Some(head_char) => { if let Some(tok) = self.double_character_token() { Ok(Some(tok)) } else if let Some(tok) = self.single_character_token() { Ok(Some(tok)) } else if head_char.is_ascii_alphabetic() { let token = self.tokenize_identifier()?; Ok(Some(token)) } else if head_char == '?' { Ok(Some(self.tokenize_hole()?)) } else if head_char == '"' { Ok(Some(self.tokenize_str()?)) } else if head_char.is_ascii_digit() { Ok(Some(self.tokenize_nat()?)) } else { Err(format!("Unexpected character while parsing: {}", head_char)) } }, None => Ok(None), } } fn tokenize_hole(&mut self) -> Result<Token, TokenizeErr> { let loc = self.loc.clone(); assert_eq!(self.consume(), Some('?')); let peek_char : char; let name: Option<String>; match self.peek() { None => return Ok(Token::Hole(loc, None, None)), Some(chr) => peek_char = chr, } if peek_char.is_ascii_alphabetic() { if let Token::Ident(_, token_name) = self.tokenize_identifier()? { name = Some(token_name); } else { unreachable!(); } } else { name = None; } if let Some('{') = self.peek() { let mut level = 1; let mut contents = String::new(); self.consume(); while let Some(peek_char) = self.consume() { if peek_char == '{' { level += 1; } else if peek_char == '}' { level -= 1; } if level == 0 { break; } else { contents.push(peek_char); } } if level != 0 { Err("Mismatch curly braces.".to_string()) } else { Ok(Token::Hole(loc, name, Some(contents))) } } else { Ok(Token::Hole(loc, name, None)) } } fn tokenize_str(&mut self) -> Result<Token, TokenizeErr> { #![allow(irrefutable_let_patterns)] let loc = self.loc.clone(); assert_eq!(self.consume(), Some('"')); let mut buffer = String::new(); while let consume_char = self.consume() { match consume_char { None => return Err("Expected \" but found end of file. Good luck!".to_string()), Some(chr) => { if chr == '"' { break; } else { buffer.push(chr); } }, } } Ok(Token::Str(loc, buffer)) } fn tokenize_nat(&mut self) -> Result<Token, TokenizeErr> { let loc = self.loc.clone(); let mut buffer = String::new(); match self.peek() { None => return Err("Expected digit but found end of file. Good luck!".to_owned()), Some(ch) => { if !ch.is_ascii_digit() { return Err(format!("Expected digit but found {}.", ch)); } while let Some(ch) = self.peek() { if ch.is_ascii_digit() { self.consume(); buffer.push(ch); } else { break; } } } } let n = buffer.parse::<usize>().unwrap(); Ok(Token::Nat(loc, n)) } fn tokenize_identifier(&mut self) -> Result<Token, TokenizeErr> { let keywords: HashMap<String, Token> = vec![ ("fun".to_string(), Token::Lambda(self.loc.clone())), ("let".to_string(), Token::Let(self.loc.clone())), ("def".to_string(), Token::Def(self.loc.clone())), ("in".to_string(), Token::In(self.loc.clone())), ("match".to_string(), Token::Match(self.loc.clone())), ("with".to_string(), Token::With(self.loc.clone())), ("import".to_string(), Token::Import(self.loc.clone())), ("as".to_string(), Token::As(self.loc.clone())), ].iter().cloned().collect(); let loc = self.loc.clone(); let mut first_char = '\0'; match self.peek() { Some(chr) => { self.consume(); first_char = chr; }, None => assert!(first_char.is_ascii_alphabetic()), } let mut token_string = String::new(); token_string.push(first_char); while let Some(peek_char) = self.peek() { if peek_char.is_ascii_alphabetic() || peek_char == '_' { self.consume(); token_string.push(peek_char); } else { break; } } while let Some(peek_char) = self.peek() { if peek_char == '\'' { self.consume(); token_string.push(peek_char); } else { break; } } match keywords.get(&token_string) { Some(token) => Ok(token.clone()), None => Ok(Token::Ident(loc, token_string)) } } fn peek(&self) -> Option<char> { self.peek_ahead(0) } fn peek_ahead(&self, k: usize) -> Option<char> { match self.input.get(self.cur + k) { Some(c) => Some(*c), None => None, } } #[allow(dead_code)] fn preview(&self, len: usize) -> String { let mut s = String::new(); for i in 0..len { if let Some(ch) = self.peek_ahead(i) { s.push(ch); } else { break; } } s } fn consume(&mut self) -> Option<char> { match self.peek() { Some(peek_char) => { if peek_char == '\n' { self.loc.next_line(); } else { self.loc.next_col(); } self.cur += 1; Some(peek_char) }, None => None, } } fn consume_comment(&mut self) { while let Some(consume_char) = self.consume() { if consume_char == '\n' { break } } } } impl Loc { fn new(source: Option<String>) -> Self { Loc { path: source, line: 0, col: 0, } } fn next_line(&mut self) { self.line += 1; self.col = 0; } fn next_col(&mut self) { self.col += 1; } } impl fmt::Display for Loc { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Line {} col {}", self.line + 1, self.col + 1)?; if let Some(path) = &self.path { write!(f, " at {}", path)?; } Ok(()) } } pub fn tokenize(source: Option<String>, input: &str) -> Result<Vec<Token>, TokenizeErr> { let mut tokenizer = Tokenizer::new(source, input); tokenizer.tokenize() } pub fn tokenize_lines(source: Option<String>, input: &str) -> Result<Vec<Vec<Token>>, TokenizeErr> { let mut tokenizer = Tokenizer::new(source, input); tokenizer.tokenize_lines() }
use std::fmt; use std::collections::HashMap; #[derive(Debug, PartialEq, Eq, Clone)] pub enum Token { Ident(Loc, String), Hole(Loc, Option<String>, Option<String>), Lambda(Loc), Let(Loc), Def(Loc), Equals(Loc), In(Loc), Arrow(Loc), FatArrow(Loc), LeftParen(Loc), RightParen(Loc), LeftCurly(Loc), RightCurly(Loc), Match(Loc), With(Loc), Import(Loc), Colon(Loc), Dollar(Loc), As(Loc), Str(Loc, String), Nat(Loc, usize), } pub struct Tokenizer { input: Vec<char>, cur: usize, loc: Loc, } #[derive(Debug, PartialEq, Eq, Clone)] pub struct Loc { pub path: Option<String>, pub line: usize, pub col: usize, } type TokenizeErr = String; impl Token { pub fn name(&self) -> &'static str { use Token::*; match self { Ident(_loc, _x) => "IDENT", Hole(_loc, _x, _contents) => "HOLE", Lambda(_loc) => "LAMBDA", Let(_loc) => "LET", Def(_loc) => "DEF", Equals(_loc) => "EQUALS", In(_loc) => "IN", Arrow(_loc) => "ARROW", FatArrow(_loc) => "FATARROW", LeftParen(_loc) => "LEFTPAREN", RightParen(_loc) => "RIGHTPAREN", LeftCurly(_loc) => "LEFTCURLY", RightCurly(_loc) => "RIGHTCURLY", Match(_loc) => "MATCH", With(_loc) => "WITH", Import(_loc) => "IMPORT", Colon(_loc) => "COLON", Dollar(_loc) => "DOLLAR", As(_loc) => "AS", Str(_loc, _val) => "STR", Nat(_loc, _val) => "NAT", } } pub fn show(&self) -> String { use Token::*; match self { Ident(_loc, x) => format!("IDENT({})", x), Hole(_loc, x, _contents) => format!("HOLE({:?}, ...)", x), Lambda(_loc) => format!("LAMBDA"), Let(_loc) => format!("LET"), Def(_loc) => format!("DEF"), Equals(_loc) => format!("EQUALS"), In(_loc) => format!("IN"), Arrow(_loc) => format!("ARROW"), FatArrow(_loc) => format!("FATARROW"), LeftParen(_loc) => format!("LEFTPAREN"), RightParen(_loc) => format!("RIGHTPAREN"), LeftCurly(_loc) => format!("LEFTCURLY"), RightCurly(_loc) => format!("RIGHTCURLY"), Match(_loc) => format!("MATCH"), With(_loc) => format!("WITH"), Import(_loc) => format!("IMPORT"), Colon(_loc) => format!("COLON"), Dollar(_loc) => format!("DOLLAR"), As(_loc) => format!("AS"), Str(_loc, val) => format!("STR({})", val), Nat(_loc, val) => format!("NAT({})", val), } } pub fn loc(&self) -> &Loc { use Token::*; match self { Ident(loc, _x) => loc, Hole(loc, _x, _contents) => loc, Lambda(loc) => loc, Let(loc) => loc, Def(loc) => loc, Equals(loc) => loc, In(loc) =>
ng(), Token::With(self.loc.clone())), ("import".to_string(), Token::Import(self.loc.clone())), ("as".to_string(), Token::As(self.loc.clone())), ].iter().cloned().collect(); let loc = self.loc.clone(); let mut first_char = '\0'; match self.peek() { Some(chr) => { self.consume(); first_char = chr; }, None => assert!(first_char.is_ascii_alphabetic()), } let mut token_string = String::new(); token_string.push(first_char); while let Some(peek_char) = self.peek() { if peek_char.is_ascii_alphabetic() || peek_char == '_' { self.consume(); token_string.push(peek_char); } else { break; } } while let Some(peek_char) = self.peek() { if peek_char == '\'' { self.consume(); token_string.push(peek_char); } else { break; } } match keywords.get(&token_string) { Some(token) => Ok(token.clone()), None => Ok(Token::Ident(loc, token_string)) } } fn peek(&self) -> Option<char> { self.peek_ahead(0) } fn peek_ahead(&self, k: usize) -> Option<char> { match self.input.get(self.cur + k) { Some(c) => Some(*c), None => None, } } #[allow(dead_code)] fn preview(&self, len: usize) -> String { let mut s = String::new(); for i in 0..len { if let Some(ch) = self.peek_ahead(i) { s.push(ch); } else { break; } } s } fn consume(&mut self) -> Option<char> { match self.peek() { Some(peek_char) => { if peek_char == '\n' { self.loc.next_line(); } else { self.loc.next_col(); } self.cur += 1; Some(peek_char) }, None => None, } } fn consume_comment(&mut self) { while let Some(consume_char) = self.consume() { if consume_char == '\n' { break } } } } impl Loc { fn new(source: Option<String>) -> Self { Loc { path: source, line: 0, col: 0, } } fn next_line(&mut self) { self.line += 1; self.col = 0; } fn next_col(&mut self) { self.col += 1; } } impl fmt::Display for Loc { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Line {} col {}", self.line + 1, self.col + 1)?; if let Some(path) = &self.path { write!(f, " at {}", path)?; } Ok(()) } } pub fn tokenize(source: Option<String>, input: &str) -> Result<Vec<Token>, TokenizeErr> { let mut tokenizer = Tokenizer::new(source, input); tokenizer.tokenize() } pub fn tokenize_lines(source: Option<String>, input: &str) -> Result<Vec<Vec<Token>>, TokenizeErr> { let mut tokenizer = Tokenizer::new(source, input); tokenizer.tokenize_lines() }
loc, Arrow(loc) => loc, FatArrow(loc) => loc, LeftParen(loc) => loc, RightParen(loc) => loc, LeftCurly(loc) => loc, RightCurly(loc) => loc, Match(loc) => loc, With(loc) => loc, Import(loc) => loc, Colon(loc) => loc, Dollar(loc) => loc, As(loc) => loc, Str(loc, _val) => loc, Nat(loc, _val) => loc, } } } impl Tokenizer { pub fn new(source: Option<String>, input: &str) -> Self { Tokenizer { input: input.chars().collect(), cur: 0, loc: Loc::new(source), } } pub fn tokenize(&mut self) -> Result<Vec<Token>, TokenizeErr> { let mut tokens = Vec::new(); while let Some(token) = self.token()? { tokens.push(token); } Ok(tokens) } fn tokenize_lines(&mut self) -> Result<Vec<Vec<Token>>, TokenizeErr> { let toks: Vec<Token> = self.tokenize()?; let mut lines: Vec<Vec<Token>> = Vec::new(); let mut cur_line: Vec<Token> = Vec::new(); let mut line_no = 0; for tok in toks { while tok.loc().line > line_no { line_no += 1; lines.push(cur_line); cur_line = Vec::new(); } cur_line.push(tok); } lines.push(cur_line); Ok(lines) } fn double_character_token(&mut self) -> Option<Token> { let head_char = self.peek()?; let next_char = self.peek_ahead(1)?; let chars = format!("{}{}", head_char, next_char); macro_rules! double_char_token { ($characters:literal, $tok:ident) => { if chars == $characters { self.consume(); self.consume(); return Some(Token::$tok(self.loc.clone())); } } } double_char_token!("->", Arrow); double_char_token!("=>", FatArrow); return None; } fn single_character_token(&mut self) -> Option<Token> { let head_char = self.peek()?; macro_rules! single_char_token { ($character:literal, $tok:ident) => { if head_char == $character { self.consume(); return Some(Token::$tok(self.loc.clone())); } } } single_char_token!('(', LeftParen); single_char_token!(')', RightParen); single_char_token!('{', LeftCurly); single_char_token!('}', RightCurly); single_char_token!(':', Colon); single_char_token!('$', Dollar); single_char_token!('=', Equals); return None; } fn token(&mut self) -> Result<Option<Token>, TokenizeErr> { while let Some(head_char) = self.peek() { if head_char.is_ascii_whitespace() { self.consume(); } else if head_char == '#' { self.consume_comment(); } else { break; } } match self.peek() { Some(head_char) => { if let Some(tok) = self.double_character_token() { Ok(Some(tok)) } else if let Some(tok) = self.single_character_token() { Ok(Some(tok)) } else if head_char.is_ascii_alphabetic() { let token = self.tokenize_identifier()?; Ok(Some(token)) } else if head_char == '?' { Ok(Some(self.tokenize_hole()?)) } else if head_char == '"' { Ok(Some(self.tokenize_str()?)) } else if head_char.is_ascii_digit() { Ok(Some(self.tokenize_nat()?)) } else { Err(format!("Unexpected character while parsing: {}", head_char)) } }, None => Ok(None), } } fn tokenize_hole(&mut self) -> Result<Token, TokenizeErr> { let loc = self.loc.clone(); assert_eq!(self.consume(), Some('?')); let peek_char : char; let name: Option<String>; match self.peek() { None => return Ok(Token::Hole(loc, None, None)), Some(chr) => peek_char = chr, } if peek_char.is_ascii_alphabetic() { if let Token::Ident(_, token_name) = self.tokenize_identifier()? { name = Some(token_name); } else { unreachable!(); } } else { name = None; } if let Some('{') = self.peek() { let mut level = 1; let mut contents = String::new(); self.consume(); while let Some(peek_char) = self.consume() { if peek_char == '{' { level += 1; } else if peek_char == '}' { level -= 1; } if level == 0 { break; } else { contents.push(peek_char); } } if level != 0 { Err("Mismatch curly braces.".to_string()) } else { Ok(Token::Hole(loc, name, Some(contents))) } } else { Ok(Token::Hole(loc, name, None)) } } fn tokenize_str(&mut self) -> Result<Token, TokenizeErr> { #![allow(irrefutable_let_patterns)] let loc = self.loc.clone(); assert_eq!(self.consume(), Some('"')); let mut buffer = String::new(); while let consume_char = self.consume() { match consume_char { None => return Err("Expected \" but found end of file. Good luck!".to_string()), Some(chr) => { if chr == '"' { break; } else { buffer.push(chr); } }, } } Ok(Token::Str(loc, buffer)) } fn tokenize_nat(&mut self) -> Result<Token, TokenizeErr> { let loc = self.loc.clone(); let mut buffer = String::new(); match self.peek() { None => return Err("Expected digit but found end of file. Good luck!".to_owned()), Some(ch) => { if !ch.is_ascii_digit() { return Err(format!("Expected digit but found {}.", ch)); } while let Some(ch) = self.peek() { if ch.is_ascii_digit() { self.consume(); buffer.push(ch); } else { break; } } } } let n = buffer.parse::<usize>().unwrap(); Ok(Token::Nat(loc, n)) } fn tokenize_identifier(&mut self) -> Result<Token, TokenizeErr> { let keywords: HashMap<String, Token> = vec![ ("fun".to_string(), Token::Lambda(self.loc.clone())), ("let".to_string(), Token::Let(self.loc.clone())), ("def".to_string(), Token::Def(self.loc.clone())), ("in".to_string(), Token::In(self.loc.clone())), ("match".to_string(), Token::Match(self.loc.clone())), ("with".to_stri
random
[ { "content": "pub fn parse_import(source: Option<String>, input: &str) -> Result<Import, ParseErr> {\n\n let mut toker = Tokenizer::new(source, input);\n\n let tokens = toker.tokenize()?;\n\n\n\n let mut parser = Parser::new(tokens);\n\n parser.parse_import()\n\n}\n\n\n", "file_path": "src/parse...
Rust
src/structs.rs
th0rex/pe_load
edf8caf2a2ae97f48c711398894d5e92a4b1a09b
use std::marker::PhantomData; use std::mem::size_of; use std::os::raw::{c_char, c_void}; use std::slice; use super::rva::{Pointer, RVA}; #[repr(u16)] pub enum Machine { X64 = 0x8864, I386 = 0x14c, } #[repr(u16)] pub enum OptionalHeaderSignature { X64 = 523, X86 = 267, ROM = 263, } #[repr(u32)] #[derive(Clone, Copy)] pub enum Characteristics { NoPad = 0x8, CntCode = 0x20, CntInitializedData = 0x40, CntUninitializedData = 0x80, Gprel = 0x8000, NumRelocationsOverflow = 0x1000000, MemExecute = 0x20000000, MemRead = 0x40000000, MemWrite = 0x80000000, } #[repr(u16)] pub enum FileCharacteristics { RelocsStripped = 0x1, ExecutableImage = 0x2, LineNumsStripped = 0x4, LocalSymsStripped = 0x8, LargeAddressAware = 0x20, X86Machine = 0x100, DebugStripped = 0x200, RemovableRunFromSwap = 0x400, NetRunFromSwap = 0x800, System = 0x1000, Dll = 0x2000, SingleProcessorOnly = 0x4000, } #[repr(u16)] pub enum Subsystem { Unknown = 0, Native = 1, WindowsGUI = 2, WindowsCUI = 3, OS2Cui = 5, PosixCui = 7, WindowsCEGui = 9, EFIApplication = 10, EFIBootServiceDriver = 11, EFIRuntimeDriver = 12, EFIRom = 13, XBox = 14, WindowsBootApplication = 16, } #[repr(u16)] pub enum DirectoryEntry { Export = 0, Import = 1, Resource = 2, Exception = 3, Security = 4, Basereloc = 5, Debug = 6, Architecture = 7, Globalptr = 8, Tls = 9, LoadConfig = 10, BoundImport = 11, Iat = 12, DelayImport = 13, ComDescriptor = 14, } #[derive(Debug, PartialEq)] pub struct RelocationType(u16); pub const RelocateAbsolute: RelocationType = RelocationType(0); pub const RelocateHighLow: RelocationType = RelocationType(3); pub const RelocateDir64: RelocationType = RelocationType(10); #[repr(u16)] pub enum DllCharacteristics { DynamicBase = 0x40, ForceIntegrity = 0x80, NXCompat = 0x100, NoIsolation = 0x200, NoSEH = 0x400, NoBind = 0x800, WDMDriver = 0x2000, TerminalServerAware = 0x8000, } pub union MiscUnion { physical_address: u32, virtual_size: u32, } #[repr(C)] pub struct ImageSectionHeader { pub name: [c_char; 8], pub misc: MiscUnion, pub(crate) virtual_address: RVA<u32, Pointer<*mut u8>>, pub size_of_raw_data: u32, pub p_raw_data: u32, pub p_reloc: u32, pub p_line_nums: u32, pub num_relocations: u16, pub num_line_nums: u16, pub characteristics: Characteristics, } #[repr(C)] pub struct ImageBaseRelocation { pub(crate) virtual_address: RVA<u32, Pointer<*mut u64>>, pub size_of_block: u32, } impl ImageBaseRelocation { pub fn base_relocations<'a>(&'a self) -> BaseRelocationIterator<'a> { BaseRelocationIterator::new(self) } pub fn relocations(&self) -> RelocationIterator { RelocationIterator::new(self) } pub fn next_relocation(&self) -> Option<&ImageBaseRelocation> { let relocations_start = unsafe { (self as *const _).offset(1) as *const u16 }; let count = (self.size_of_block as usize - size_of::<ImageBaseRelocation>()) / 2; let next_base_relocation = unsafe { &*(relocations_start.offset(count as _) as *const ImageBaseRelocation) }; if next_base_relocation.virtual_address.value == 0 && next_base_relocation.size_of_block == 0 { None } else { Some(next_base_relocation) } } } pub type RelocationOffset = u16; pub struct BaseRelocationIterator<'a> { current: Option<&'a ImageBaseRelocation>, } impl<'a> BaseRelocationIterator<'a> { fn new(current: &'a ImageBaseRelocation) -> Self { Self { current: Some(current), } } } impl<'a> Iterator for BaseRelocationIterator<'a> { type Item = &'a ImageBaseRelocation; fn next(&mut self) -> Option<Self::Item> { if let Some(c) = self.current { self.current = c.next_relocation(); Some(c) } else { None } } } pub struct RelocationIterator { relocation: *const u16, current: usize, count: usize, } impl RelocationIterator { fn new(base_relocation: &ImageBaseRelocation) -> Self { Self { relocation: unsafe { (base_relocation as *const _).offset(1) as *const _ }, current: 0, count: (base_relocation.size_of_block as usize - size_of::<ImageBaseRelocation>()) / 2, } } } impl Iterator for RelocationIterator { type Item = (RelocationType, RelocationOffset); fn next(&mut self) -> Option<Self::Item> { if self.current >= self.count { None } else { let value = unsafe { &*self.relocation.offset(self.current as _) }; self.current += 1; Some(( RelocationType((value >> 12) & 0b00001111u16), value & 0x0FFFu16, )) } } } pub(crate) struct ThunkIterator<'a> { current: Pointer<*mut ThunkData>, _p: PhantomData<&'a u32>, } impl<'a> ThunkIterator<'a> { fn new(c: &'a ImportDescriptor, base: u64) -> Self { Self { current: c.first_thunk.resolve(base), _p: PhantomData, } } } impl<'a> Iterator for ThunkIterator<'a> { type Item = &'a mut ThunkData; fn next(&mut self) -> Option<Self::Item> { if unsafe { self.current.address_of_data.value } != 0 { let current = self.current.p; self.current = Pointer { p: unsafe { current.offset(1) }, }; Some(unsafe { &mut *current }) } else { None } } } #[repr(C)] pub struct ImportDescriptor { pub imports_by_name: u32, pub time_stamp: u32, pub forwarder_chain: u32, pub(crate) name: RVA<u32, Pointer<*const c_char>>, pub(crate) first_thunk: RVA<u32, Pointer<*mut ThunkData>>, } impl ImportDescriptor { pub(crate) fn thunk_iterator<'a>(&'a self, base: u64) -> ThunkIterator<'a> { ThunkIterator::new(self, base) } pub(crate) fn import_iterator<'a>(&'a self) -> ImportIterator<'a> { ImportIterator::new(self) } } pub(crate) struct ImportIterator<'a> { p: *const ImportDescriptor, _p: PhantomData<&'a ImportDescriptor>, } impl<'a> ImportIterator<'a> { fn new(i: &'a ImportDescriptor) -> Self { Self { p: i as *const _, _p: PhantomData, } } } impl<'a> Iterator for ImportIterator<'a> { type Item = &'a ImportDescriptor; fn next(&mut self) -> Option<Self::Item> { unsafe { if (*self.p).name.value == 0 { None } else { let item = &(*self.p); self.p = self.p.offset(1); Some(item) } } } } pub fn image_snap_by_ordinal(ordinal: u64) -> bool { (ordinal & 0x8000000000000000) != 0 } pub fn image_ordinal(ordinal: u64) -> u64 { ordinal & 0xffff } #[repr(C)] pub union ThunkData { pub forwarder_string: u64, pub function: u64, pub ordinal: u64, pub(crate) address_of_data: RVA<u64, Pointer<*const ImageImportByName>>, } #[repr(C)] pub struct TlsDirectory { pub address_of_raw_data: u64, pub end_address_of_raw_data: u64, pub address_of_index: u64, pub address_of_callbacks: u64, } pub type TlsCallback = Option<extern "stdcall" fn(*mut c_void, u32, *mut c_void)>; #[repr(C)] pub struct ImageImportByName { pub hint: u16, pub name: c_char, } #[repr(C)] pub struct DataEntry<T> { pub(crate) virtual_address: RVA<u32, Pointer<*mut T>>, pub size: u32, } #[repr(C)] pub struct OptionalHeader { pub signature: OptionalHeaderSignature, pub _major_linker_version: c_char, pub _minor_linker_version: c_char, pub size_of_code: u32, pub size_of_initialized_data: u32, pub size_of_uninitialized_data: u32, pub address_of_entry_point: u32, pub base_of_code: u32, pub image_base: u64, pub section_alignment: u32, pub file_alignment: u32, pub major_os_version: u16, pub minor_os_version: u16, pub major_image_version: u16, pub minor_image_version: u16, pub major_subsystem_version: u16, pub minor_subsystem_version: u16, pub win32_version: u32, pub size_of_image: u32, pub size_of_headers: u32, pub checksum: u32, pub subsystem: Subsystem, pub dll_characteristics: DllCharacteristics, pub size_of_stack_reserve: u64, pub size_of_stack_commit: u64, pub size_of_heap_reserve: u64, pub size_of_heap_commit: u64, pub __loader_flags: u32, pub num_of_rva_and_sizes: u32, } impl OptionalHeader { fn data_entries_start(&self) -> *const u8 { unsafe { (self as *const _ as *const u8).offset(size_of::<OptionalHeader>() as _) } } fn data_entry<T>(&self, e: DirectoryEntry) -> &DataEntry<T> { unsafe { let ptr = (self.data_entries_start() as *const DataEntry<T>).offset(e as _); &*ptr } } pub(crate) fn get_import_descriptor( &self, base: u64, ) -> Option<Pointer<*const ImportDescriptor>> { let entry = self.data_entry::<ImportDescriptor>(DirectoryEntry::Import); if entry.virtual_address.value == 0 || entry.size == 0 { None } else { Some(entry.virtual_address.resolve(base).into()) } } pub(crate) fn get_relocation_entries( &self, base: u64, ) -> Option<Pointer<*mut ImageBaseRelocation>> { let entry = self.data_entry::<ImageBaseRelocation>(DirectoryEntry::Basereloc); if entry.virtual_address.value == 0 { None } else { let reloc = entry.virtual_address.resolve(base); if reloc.size_of_block as usize <= size_of::<ImageBaseRelocation>() { None } else { Some(reloc) } } } pub(crate) fn get_tls_entries(&self, base: u64) -> Option<Pointer<*const TlsDirectory>> { let entry = self.data_entry::<TlsDirectory>(DirectoryEntry::Tls); if entry.virtual_address.value == 0 { None } else { Some(entry.virtual_address.resolve(base).into()) } } /*pub fn get_data_entries(&self) -> &[DataEntry] { unsafe { let self_ptr = self as *const _ as *const c_char; slice::from_raw_parts( self_ptr.offset(size_of::<OptionalHeader>() as _) as *const _, self.num_of_rva_and_sizes as _, ) } }*/ } #[repr(C)] pub struct FileHeader { pub machine: Machine, pub num_sections: u16, pub time_date: u32, pub p_symbol_table: u32, pub num_symbols: u32, pub size_optional_header: u16, pub characteristics: u16, } impl FileHeader { pub fn get_sections(&self) -> &[ImageSectionHeader] { unsafe { let self_ptr = self as *const _ as *const c_char; slice::from_raw_parts( self_ptr.offset((size_of::<FileHeader>() + self.size_optional_header as usize) as _) as *const _, self.num_sections as _, ) } } } #[repr(C)] pub struct PeHeader { pub signature: [c_char; 4], pub file_header: FileHeader, pub optional_header: OptionalHeader, } #[repr(C)] pub struct DosHeader { pub signature: [c_char; 2], pub not_needed: [c_char; 58], pub offset_to_pe_header: u32, } impl DosHeader { pub fn get_pe_header(&self) -> &PeHeader { unsafe { let self_ptr = self as *const _ as *const c_char; &*(self_ptr.offset(self.offset_to_pe_header as _) as *const _) } } }
use std::marker::PhantomData; use std::mem::size_of; use std::os::raw::{c_char, c_void}; use std::slice; use super::rva::{Pointer, RVA}; #[repr(u16)] pub enum Machine { X64 = 0x8864, I386 = 0x14c, } #[repr(u16)] pub enum OptionalHeaderSignature { X64 = 523, X86 = 267, ROM = 263, } #[repr(u32)] #[derive(Clone, Copy)] pub enum Characteristics { NoPad = 0x8, CntCode = 0x20, CntInitializedData = 0x40, CntUninitializedData = 0x80, Gprel = 0x8000, NumRelocationsOverflow = 0x1000000, MemExecute = 0x20000000, MemRead = 0x40000000, MemWrite = 0x80000000, } #[repr(u16)] pub enum FileCharacteristics { RelocsStripped = 0x1, ExecutableImage = 0x2, LineNumsStripped = 0x4, LocalSymsStripped = 0x8, LargeAddressAware = 0x20, X86Machine = 0x100, DebugStripped = 0x200, RemovableRunFromSwap = 0x400, NetRunFromSwap = 0x800, System = 0x1000, Dll = 0x2000, SingleProcessorOnly = 0x4000, } #[repr(u16)] pub enum Subsystem { Unknown = 0, Native = 1, WindowsGUI = 2, WindowsCUI = 3, OS2Cui = 5, PosixCui = 7, WindowsCEGui = 9, EFIApplication = 10, EFIBootServiceDriver = 11, EFIRuntimeDriver = 12, EFIRom = 13, XBox = 14, WindowsBootApplication = 16, } #[repr(u16)] pub enum DirectoryEntry { Export = 0, Import = 1, Resource = 2, Exception = 3, Security = 4, Basereloc = 5, Debug = 6, Architecture = 7, Globalptr = 8, Tls = 9, LoadConfig = 10, BoundImport = 11, Iat = 12, DelayImport = 13, ComDescriptor = 14, } #[derive(Debug, PartialEq)] pub struct RelocationType(u16); pub const RelocateAbsolute: RelocationType = RelocationType(0); pub const RelocateHighLow: RelocationType = RelocationType(3); pub const RelocateDir64: RelocationType = RelocationType(10); #[repr(u16)] pub enum DllCharacteristics { DynamicBase = 0x40, ForceIntegrity = 0x80, NXCompat = 0x100, NoIsolation = 0x200, NoSEH = 0x400, NoBind = 0x800, WDMDriver = 0x2000, TerminalServerAware = 0x8000, } pub union MiscUnion { physical_address: u32, virtual_size: u32, } #[repr(C)] pub struct ImageSectionHeader { pub name: [c_char; 8], pub misc: MiscUnion, pub(crate) virtual_address: RVA<u32, Pointer<*mut u8>>, pub size_of_raw_data: u32, pub p_raw_data: u32, pub p_reloc: u32, pub p_line_nums: u32, pub num_relocations: u16, pub num_line_nums: u16, pub characteristics: Characteristics, } #[repr(C)] pub struct ImageBaseRelocation { pub(crate) virtual_address: RVA<u32, Pointer<*mut u64>>, pub size_of_block: u32, } impl ImageBaseRelocation { pub fn base_relocations<'a>(&'a self) -> BaseRelocationIterator<'a> { BaseRelocationIterator::new(self) } pub fn relocations(&self) -> RelocationIterator { RelocationIterator::new(self) } pub fn next_relocation(&self) -> Option<&ImageBaseRelocation> { let relocations_start = unsafe { (self as *const _).offset(1) as *const u16 }; let count = (self.size_of_block as usize - size_of::<ImageBaseRelocation>()) / 2; let next_base_relocation = unsafe { &*(relocations_start.offset(count as _) as *const ImageBaseRelocation) }; if next_base_relocation.virtual_address.value == 0 && next_base_relocation.size_of_block == 0 { None } else { Some(next_base_relocation) } } } pub type RelocationOffset = u16; pub struct BaseRelocationIterator<'a> { current: Option<&'a ImageBaseRelocation>, } impl<'a> BaseRelocationIterator<'a> { fn new(current: &'a ImageBaseRelocation) -> Self { Self { current: Some(current), } } } impl<'a> Iterator for BaseRelocationIterator<'a> { type Item = &'a ImageBaseRelocation; fn next(&mut self) -> Option<Self::Item> { if let Some(c) = self.current { self.current = c.next_relocation(); Some(c) } else { None } } } pub struct RelocationIterator { relocation: *const u16, current: usize, count: usize, } impl RelocationIterator { fn new(base_relocation: &ImageBaseRelocation) -> Self { Self { relocation: unsafe { (base_relocation as *const _).offset(1) as *const _ }, current: 0, count: (base_relocation.size_of_block as usize - size_of::<ImageBaseRelocation>()) / 2, } } } impl Iterator for RelocationIterator { type Item = (RelocationType, RelocationOffset); fn next(&mut self) -> Option<Self::Item> { if self.current >= self.count { None } else { let value = unsafe { &*self.relocation.offset(self.current as _) }; self.current += 1; Some(( RelocationType((value >> 12) & 0b00001111u16), value & 0x0FFFu16, )) } } } pub(crate) struct ThunkIterator<'a> { current: Pointer<*mut ThunkData>, _p: PhantomData<&'a u32>, } impl<'a> ThunkIterator<'a> { fn new(c: &'a ImportDescriptor, base: u64) -> Self { Self { current: c.first_thunk.resolve(base), _p: PhantomData, } } } impl<'a> Iterator for ThunkIterator<'a> { type Item = &'a mut ThunkData; fn next(&mut self) -> Option<Self::Item> { if unsafe { self.current.address_of_data.value } != 0 { let current = self.current.p; self.current = Pointer { p: unsafe { current.offset(1) }, }; Some(unsafe { &mut *current }) } else { None } } } #[repr(C)] pub struct ImportDescriptor { pub imports_by_name: u32, pub time_stamp: u32, pub forwarder_chain: u32, pub(crate) name: RVA<u32, Pointer<*const c_char>>, pub(crate) first_thunk: RVA<u32, Pointer<*mut ThunkData>>, } impl ImportDescriptor { pub(crate) fn thunk_iterator<'a>(&'a self, base: u64) -> ThunkIterator<'a> { ThunkIterator::new(self, base) } pub(crate) fn import_iterator<'a>(&'a self) -> ImportIterator<'a> { ImportIterator::new(self) } } pub(crate) struct ImportIterator<'a> { p: *const ImportDescriptor, _p: PhantomData<&'a ImportDescriptor>, } impl<'a> ImportIterator<'a> { fn new(i: &'a ImportDescriptor) -> Self { Self { p: i as *const _, _p: PhantomData, } } } impl<'a> Iterator for ImportIterator<'a> { type Item = &'a ImportDescriptor; fn next(&mut self) -> Option<Self::Item> { unsafe { if (*self.p).name.value == 0 { None } else { let item = &(*self.p); self.p = self.p.offset(1); Some(item) } } } } pub fn image_snap_by_ordinal(ordinal: u64) -> bool { (ordinal & 0x8000000000000000) != 0 } pub fn image_ordinal(ordinal: u64) -> u64 { ordinal & 0xffff } #[repr(C)] pub union ThunkData { pub forwarder_string: u64, pub function: u64, pub ordinal: u64, pub(crate) address_of_data: RVA<u64, Pointer<*const ImageImportByName>>, } #[repr(C)] pub struct TlsDirectory { pub address_of_raw_data: u64, pub end_address_of_raw_data: u64, pub address_of_index: u64, pub address_of_callbacks: u64, } pub type TlsCallback = Option<extern "stdcall" fn(*mut c_void, u32, *mut c_void)>; #[repr(C)] pub struct ImageImportByName { pub hint: u16, pub name: c_char, } #[repr(C)] pub struct DataEntry<T> { pub(crate) virtual_address: RVA<u32, Pointer<*mut T>>, pub size: u32, } #[repr(C)] pub struct OptionalHeader { pub signature: OptionalHeaderSignature, pub _major_linker_version: c_char, pub _minor_linker_version: c_char, pub size_of_code: u32, pub size_of_initialized_data: u32, pub size_of_uninitialized_data: u32, pub address_of_entry_point: u32, pub base_of_code: u32, pub image_base: u64, pub sect
[c_char; 2], pub not_needed: [c_char; 58], pub offset_to_pe_header: u32, } impl DosHeader { pub fn get_pe_header(&self) -> &PeHeader { unsafe { let self_ptr = self as *const _ as *const c_char; &*(self_ptr.offset(self.offset_to_pe_header as _) as *const _) } } }
ion_alignment: u32, pub file_alignment: u32, pub major_os_version: u16, pub minor_os_version: u16, pub major_image_version: u16, pub minor_image_version: u16, pub major_subsystem_version: u16, pub minor_subsystem_version: u16, pub win32_version: u32, pub size_of_image: u32, pub size_of_headers: u32, pub checksum: u32, pub subsystem: Subsystem, pub dll_characteristics: DllCharacteristics, pub size_of_stack_reserve: u64, pub size_of_stack_commit: u64, pub size_of_heap_reserve: u64, pub size_of_heap_commit: u64, pub __loader_flags: u32, pub num_of_rva_and_sizes: u32, } impl OptionalHeader { fn data_entries_start(&self) -> *const u8 { unsafe { (self as *const _ as *const u8).offset(size_of::<OptionalHeader>() as _) } } fn data_entry<T>(&self, e: DirectoryEntry) -> &DataEntry<T> { unsafe { let ptr = (self.data_entries_start() as *const DataEntry<T>).offset(e as _); &*ptr } } pub(crate) fn get_import_descriptor( &self, base: u64, ) -> Option<Pointer<*const ImportDescriptor>> { let entry = self.data_entry::<ImportDescriptor>(DirectoryEntry::Import); if entry.virtual_address.value == 0 || entry.size == 0 { None } else { Some(entry.virtual_address.resolve(base).into()) } } pub(crate) fn get_relocation_entries( &self, base: u64, ) -> Option<Pointer<*mut ImageBaseRelocation>> { let entry = self.data_entry::<ImageBaseRelocation>(DirectoryEntry::Basereloc); if entry.virtual_address.value == 0 { None } else { let reloc = entry.virtual_address.resolve(base); if reloc.size_of_block as usize <= size_of::<ImageBaseRelocation>() { None } else { Some(reloc) } } } pub(crate) fn get_tls_entries(&self, base: u64) -> Option<Pointer<*const TlsDirectory>> { let entry = self.data_entry::<TlsDirectory>(DirectoryEntry::Tls); if entry.virtual_address.value == 0 { None } else { Some(entry.virtual_address.resolve(base).into()) } } /*pub fn get_data_entries(&self) -> &[DataEntry] { unsafe { let self_ptr = self as *const _ as *const c_char; slice::from_raw_parts( self_ptr.offset(size_of::<OptionalHeader>() as _) as *const _, self.num_of_rva_and_sizes as _, ) } }*/ } #[repr(C)] pub struct FileHeader { pub machine: Machine, pub num_sections: u16, pub time_date: u32, pub p_symbol_table: u32, pub num_symbols: u32, pub size_optional_header: u16, pub characteristics: u16, } impl FileHeader { pub fn get_sections(&self) -> &[ImageSectionHeader] { unsafe { let self_ptr = self as *const _ as *const c_char; slice::from_raw_parts( self_ptr.offset((size_of::<FileHeader>() + self.size_optional_header as usize) as _) as *const _, self.num_sections as _, ) } } } #[repr(C)] pub struct PeHeader { pub signature: [c_char; 4], pub file_header: FileHeader, pub optional_header: OptionalHeader, } #[repr(C)] pub struct DosHeader { pub signature:
random
[ { "content": "pub fn wrapped_dll_main(ep: extern \"C\" fn()) -> impl FnOnce() -> () {\n\n let x: extern \"stdcall\" fn(HINSTANCE, u32, *mut c_void) = unsafe { mem::transmute(ep) };\n\n let y = move || {\n\n // TODO: Use the mapped address as the HMODULE parameter (i.e. loader.image_base)\n\n ...
Rust
src/invoker/src/controller.rs
MikailBag/jjs
bf00423f70f8a6ed508bbcb8b38840225e43cc73
mod notify; mod task_loading; mod toolchains; use crate::{ scheduler::Scheduler, worker::{JudgeOutcome, Request, Response}, }; use anyhow::Context; use notify::Notifier; use std::{ path::{Path, PathBuf}, sync::Arc, }; use tracing::{debug, info, instrument}; use uuid::Uuid; #[derive(Debug)] struct LoweredJudgeRequestExtensions { notifier: Notifier, invocation_dir: PathBuf, } pub enum InvocationFinishReason { Fault, CompileError, TestingDone, } pub struct JudgeRequestAndCallbacks { pub request: invoker_api::JudgeRequest, pub callbacks: Arc<dyn JudgeResponseCallbacks>, } impl std::fmt::Debug for JudgeRequestAndCallbacks { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("JudgeRequestAndCallbacks") .field("request", &self.request) .field("handler", &"..") .finish() } } #[async_trait::async_trait] pub trait JudgeResponseCallbacks: Send + Sync { async fn set_finished( &self, invocation_id: Uuid, reason: InvocationFinishReason, ) -> anyhow::Result<()>; async fn add_outcome_header( &self, invocation_id: Uuid, header: invoker_api::JudgeOutcomeHeader, ) -> anyhow::Result<()>; async fn deliver_live_status_update( &self, invocation_id: Uuid, lsu: invoker_api::LiveStatusUpdate, ) -> anyhow::Result<()>; } #[derive(Clone)] pub struct Controller { scheduler: Arc<Scheduler>, problem_loader: Arc<problem_loader::Loader>, toolchains_dir: Arc<Path>, _config: Arc<crate::config::InvokerConfig>, _temp_dir: Arc<tempfile::TempDir>, toolchain_loader: Arc<toolchains::ToolchainLoader>, } fn get_num_cpus() -> usize { static CACHE: std::sync::atomic::AtomicUsize = std::sync::atomic::AtomicUsize::new(0); let old = CACHE.load(std::sync::atomic::Ordering::Relaxed); if old != 0 { return old; } let corr = num_cpus::get(); assert_ne!(corr, 0); CACHE.store(corr, std::sync::atomic::Ordering::Relaxed); corr } impl Controller { pub async fn new( cfg_data: util::cfg::CfgData, config: Arc<crate::config::InvokerConfig>, ) -> anyhow::Result<Controller> { let worker_count = match config.workers { Some(cnt) => cnt, None => get_num_cpus(), }; info!("Using {} workers", worker_count); let mut scheduler = Scheduler::new(&config).context("failed to initialize Scheduler")?; for _ in 0..worker_count { scheduler .add_worker() .await .context("failed to start a worker")?; } let scheduler = Arc::new(scheduler); let temp_dir = tempfile::TempDir::new().context("can not find temporary dir")?; let problem_loader = problem_loader::Loader::from_config(&config.problems, temp_dir.path().join("problems")) .await .context("can not create ProblemLoader")?; let toolchain_loader = Arc::new( toolchains::ToolchainLoader::new() .await .context("toolchain loader initialization error")?, ); Ok(Controller { scheduler, problem_loader: Arc::new(problem_loader), toolchains_dir: cfg_data.data_dir.join("opt").into(), _config: config, _temp_dir: Arc::new(temp_dir), toolchain_loader, }) } #[instrument(skip(self, chan))] pub fn exec_on(self, chan: async_mpmc::Receiver<JudgeRequestAndCallbacks>) { chan.process_all(move |req| { let this = self.clone(); async move { let request_id = req.request.request_id; if let Err(err) = this.process_request(req).await { tracing::warn!(request_id = %request_id, err = %format_args!("{:#}", err), "Failed to process a judge request"); } } }); } #[instrument(skip(self, req), fields(request_id=%req.request.request_id))] async fn process_request(&self, req: JudgeRequestAndCallbacks) -> anyhow::Result<()> { let (low_req, mut exts) = self .lower_judge_request(&req) .await .context("request preprocessing failed")?; debug!(lowered_judge_request = ?low_req, "created a lowered judge request"); let worker = self.scheduler.find_free_worker().await; let mut responses = worker .send(Request::Judge(low_req)) .await .context("failed to submit lowered judge request")?; loop { let message = responses .next() .await .context("failed to receive next worker message")?; match message { Response::JudgeDone(judge_outcome) => { debug!("Publising: JudgeOutcome {:?}", &judge_outcome); let reason = match judge_outcome { JudgeOutcome::Fault => InvocationFinishReason::Fault, JudgeOutcome::TestingDone => InvocationFinishReason::TestingDone, JudgeOutcome::CompileError(_) => InvocationFinishReason::CompileError, }; req.callbacks .set_finished(req.request.request_id, reason) .await .context("failed to set run outcome in DB")?; break; } Response::LiveScore(score) => { exts.notifier.set_score(score).await; } Response::LiveTest(test) => { exts.notifier.set_test(test).await; } Response::OutcomeHeader(header) => { req.callbacks .add_outcome_header(req.request.request_id, header) .await?; } } } Ok(()) } }
mod notify; mod task_loading; mod toolchains; use crate::{ scheduler::Scheduler, worker::{JudgeOutcome, Request, Response}, }; use anyhow::Context; use notify::Notifier; use std::{ path::{Path, PathBuf}, sync::Arc, }; use tracing::{debug, info, instrument}; use uuid::Uuid; #[derive(Debug)] struct LoweredJudgeRequestExtensions { notifier: Notifier, invocation_dir: PathBuf, } pub enum InvocationFinishReason { Fault, CompileError, TestingDone, } pub struct JudgeRequestAndCallbacks { pub request: invoker_api::JudgeRequest, pub callbacks: Arc<dyn JudgeResponseCallbacks>, } impl std::fmt::Debug for JudgeRequestAndCallbacks {
} #[async_trait::async_trait] pub trait JudgeResponseCallbacks: Send + Sync { async fn set_finished( &self, invocation_id: Uuid, reason: InvocationFinishReason, ) -> anyhow::Result<()>; async fn add_outcome_header( &self, invocation_id: Uuid, header: invoker_api::JudgeOutcomeHeader, ) -> anyhow::Result<()>; async fn deliver_live_status_update( &self, invocation_id: Uuid, lsu: invoker_api::LiveStatusUpdate, ) -> anyhow::Result<()>; } #[derive(Clone)] pub struct Controller { scheduler: Arc<Scheduler>, problem_loader: Arc<problem_loader::Loader>, toolchains_dir: Arc<Path>, _config: Arc<crate::config::InvokerConfig>, _temp_dir: Arc<tempfile::TempDir>, toolchain_loader: Arc<toolchains::ToolchainLoader>, } fn get_num_cpus() -> usize { static CACHE: std::sync::atomic::AtomicUsize = std::sync::atomic::AtomicUsize::new(0); let old = CACHE.load(std::sync::atomic::Ordering::Relaxed); if old != 0 { return old; } let corr = num_cpus::get(); assert_ne!(corr, 0); CACHE.store(corr, std::sync::atomic::Ordering::Relaxed); corr } impl Controller { pub async fn new( cfg_data: util::cfg::CfgData, config: Arc<crate::config::InvokerConfig>, ) -> anyhow::Result<Controller> { let worker_count = match config.workers { Some(cnt) => cnt, None => get_num_cpus(), }; info!("Using {} workers", worker_count); let mut scheduler = Scheduler::new(&config).context("failed to initialize Scheduler")?; for _ in 0..worker_count { scheduler .add_worker() .await .context("failed to start a worker")?; } let scheduler = Arc::new(scheduler); let temp_dir = tempfile::TempDir::new().context("can not find temporary dir")?; let problem_loader = problem_loader::Loader::from_config(&config.problems, temp_dir.path().join("problems")) .await .context("can not create ProblemLoader")?; let toolchain_loader = Arc::new( toolchains::ToolchainLoader::new() .await .context("toolchain loader initialization error")?, ); Ok(Controller { scheduler, problem_loader: Arc::new(problem_loader), toolchains_dir: cfg_data.data_dir.join("opt").into(), _config: config, _temp_dir: Arc::new(temp_dir), toolchain_loader, }) } #[instrument(skip(self, chan))] pub fn exec_on(self, chan: async_mpmc::Receiver<JudgeRequestAndCallbacks>) { chan.process_all(move |req| { let this = self.clone(); async move { let request_id = req.request.request_id; if let Err(err) = this.process_request(req).await { tracing::warn!(request_id = %request_id, err = %format_args!("{:#}", err), "Failed to process a judge request"); } } }); } #[instrument(skip(self, req), fields(request_id=%req.request.request_id))] async fn process_request(&self, req: JudgeRequestAndCallbacks) -> anyhow::Result<()> { let (low_req, mut exts) = self .lower_judge_request(&req) .await .context("request preprocessing failed")?; debug!(lowered_judge_request = ?low_req, "created a lowered judge request"); let worker = self.scheduler.find_free_worker().await; let mut responses = worker .send(Request::Judge(low_req)) .await .context("failed to submit lowered judge request")?; loop { let message = responses .next() .await .context("failed to receive next worker message")?; match message { Response::JudgeDone(judge_outcome) => { debug!("Publising: JudgeOutcome {:?}", &judge_outcome); let reason = match judge_outcome { JudgeOutcome::Fault => InvocationFinishReason::Fault, JudgeOutcome::TestingDone => InvocationFinishReason::TestingDone, JudgeOutcome::CompileError(_) => InvocationFinishReason::CompileError, }; req.callbacks .set_finished(req.request.request_id, reason) .await .context("failed to set run outcome in DB")?; break; } Response::LiveScore(score) => { exts.notifier.set_score(score).await; } Response::LiveTest(test) => { exts.notifier.set_test(test).await; } Response::OutcomeHeader(header) => { req.callbacks .add_outcome_header(req.request.request_id, header) .await?; } } } Ok(()) } }
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("JudgeRequestAndCallbacks") .field("request", &self.request) .field("handler", &"..") .finish() }
function_block-function_prefix_line
[ { "content": " /// HTTP Request.\n\n pub trait Request {\n\n type Form: Form;\n\n\n\n /// Sets the header with the given key and value.\n\n fn header(self, name: &'static str, value: &str) -> Self;\n\n\n\n /// Sets body using the given vector of bytes.\n\n ///\n\n ...
Rust
src/lib.rs
Thog/blz-nx-rs
3b7a3fc587f064ab5c86da0aaff8e7decea26be1
#![no_std] use byteorder::ByteOrder; use byteorder::LittleEndian; #[derive(Debug)] pub enum Error { Unknown, InvalidBlz, DecompressionBufferTooSmall, CompressionBufferTooSmall, } const BLZ_SHIFT: u8 = 1; const BLZ_MASK: u8 = 0x80; const BLZ_THRESHOLD: usize = 2; const BLZ_MAX_OFFSET: usize = 0x1002; const BLZ_MAX_CODED: usize = (1 << 4) + BLZ_THRESHOLD; pub type BlzResult<T> = core::result::Result<T, Error>; #[inline] pub fn get_worst_compression_buffer_size(raw_len: usize) -> usize { raw_len + ((raw_len + 7) / 8) + 15 } fn get_size_for_decompression(data: &[u8]) -> BlzResult<(u32, u32, u32)> { if data.len() < 4 { return Err(Error::InvalidBlz); } let inc_len = LittleEndian::read_u32(&data[data.len() - 4..]); if inc_len == 0 { let raw_len = data.len() as u32 - 4; Ok((raw_len, 0, raw_len)) } else { if data.len() < 8 { return Err(Error::InvalidBlz); } let header_len = LittleEndian::read_u32(&data[data.len() - 8..]); if data.len() <= header_len as usize { return Err(Error::InvalidBlz); } let enc_len = LittleEndian::read_u32(&data[data.len() - 12..]); let dec_len = data.len() as u32 - enc_len; let pak_len = enc_len - header_len; let raw_len = dec_len + enc_len + inc_len; Ok((dec_len, pak_len, raw_len)) } } pub fn get_decompression_buffer_size(data: &[u8]) -> BlzResult<usize> { Ok(get_size_for_decompression(data)?.2 as usize) } fn invert_slice(data: &mut [u8]) { let mut top_position = 0; let mut bottom_position = data.len() - 1; while top_position < bottom_position { let tmp = data[top_position]; data[top_position] = data[bottom_position]; data[bottom_position] = tmp; bottom_position -= 1; top_position += 1; } } fn compression_search(data: &[u8], current_position: usize) -> (usize, usize) { let mut len = BLZ_THRESHOLD as usize; let mut pos = 0; let max = if current_position >= BLZ_MAX_OFFSET { BLZ_MAX_OFFSET } else { current_position }; for tmp_pos in 3..=max { let mut tmp_len = 0; while tmp_len < BLZ_MAX_CODED { if tmp_len == data[current_position..].len() || tmp_len >= tmp_pos { break; } if data[current_position + tmp_len] != data[current_position + tmp_len - tmp_pos] { break; } tmp_len += 1; } if tmp_len > len { pos = tmp_pos; len = tmp_len; if len == BLZ_MAX_CODED { break; } } } (len, pos) } pub fn compress_raw( decompressed_buffer: &mut [u8], compression_buffer: &mut [u8], ) -> BlzResult<usize> { if compression_buffer.len() < get_worst_compression_buffer_size(decompressed_buffer.len()) { return Err(Error::CompressionBufferTooSmall); } invert_slice(decompressed_buffer); let mut compressed_size_tmp = 0; let mut decompressed_size_tmp = decompressed_buffer.len(); let mut mask = 0; let mut decompressed_pos = 0; let mut compressed_pos = 0; let mut flag_pos = 0; while decompressed_pos < decompressed_buffer.len() { mask >>= BLZ_SHIFT; if mask == 0 { flag_pos = compressed_pos; compression_buffer[flag_pos] = 0; compressed_pos += 1; mask = BLZ_MASK; } let (mut len_best, pos_best) = compression_search(&decompressed_buffer, decompressed_pos); if len_best > BLZ_THRESHOLD { if decompressed_pos + len_best < decompressed_buffer.len() { decompressed_pos += len_best; let (mut len_next, _) = compression_search(&decompressed_buffer, decompressed_pos); decompressed_pos -= len_best - 1; let (mut len_post, _) = compression_search(&decompressed_buffer, decompressed_pos); decompressed_pos -= 1; if len_next <= BLZ_THRESHOLD { len_next = 1; } if len_post <= BLZ_THRESHOLD { len_post = 1; } if len_best + len_next <= 1 + len_post { len_best = 1; } } } compression_buffer[flag_pos] <<= 1; if len_best > BLZ_THRESHOLD { decompressed_pos += len_best; compression_buffer[flag_pos] |= 1; compression_buffer[compressed_pos] = (((len_best - (BLZ_THRESHOLD + 1)) << 4) | ((pos_best - 3) >> 8)) as u8; compression_buffer[compressed_pos + 1] = ((pos_best - 3) & 0xFF) as u8; compressed_pos += 2; } else { compression_buffer[compressed_pos] = decompressed_buffer[decompressed_pos]; compressed_pos += 1; decompressed_pos += 1; } if compressed_pos + decompressed_buffer.len() - decompressed_pos < compressed_size_tmp + decompressed_size_tmp { compressed_size_tmp = compressed_pos; decompressed_size_tmp = decompressed_buffer.len() - decompressed_pos; } } while mask != 0 && mask != 1 { mask >>= BLZ_SHIFT; compression_buffer[flag_pos] <<= 1; } let compressed_size = compressed_pos; invert_slice(decompressed_buffer); invert_slice(&mut compression_buffer[0..compressed_size]); let result_size; if compressed_size_tmp == 0 || (decompressed_buffer.len() + 4 < ((compressed_size_tmp + decompressed_size_tmp + 3) & 0xFFFFFFFC) + 8) { &(compression_buffer[0..decompressed_buffer.len()]).copy_from_slice(&decompressed_buffer); compressed_pos = decompressed_buffer.len(); while (compressed_pos & 3) != 0 { compression_buffer[compressed_pos] = 0; compressed_pos += 1; } LittleEndian::write_u32(&mut compression_buffer[compressed_pos..], 0); compressed_pos += 4; result_size = compressed_pos; } else { let mut i = 0; while i < compressed_size_tmp { compression_buffer[decompressed_size_tmp + i] = compression_buffer[i + compressed_pos - compressed_size_tmp]; i += 1; } (&mut compression_buffer[0..decompressed_size_tmp]) .copy_from_slice(&decompressed_buffer[0..decompressed_size_tmp]); compressed_pos = decompressed_size_tmp + compressed_size_tmp; let compressed_len = compressed_size_tmp; let mut header_size = 12; let inc_len = decompressed_buffer.len() - compressed_len - decompressed_size_tmp; while (compressed_pos & 3) != 0 { compression_buffer[compressed_pos] = 0xFF; compressed_pos += 1; header_size += 1; } LittleEndian::write_u32( &mut compression_buffer[compressed_pos..], (compressed_len + header_size) as u32, ); LittleEndian::write_u32( &mut compression_buffer[compressed_pos + 4..], header_size as u32, ); LittleEndian::write_u32( &mut compression_buffer[compressed_pos + 8..], (inc_len - header_size) as u32, ); compressed_pos += 12; result_size = compressed_pos; } Ok(result_size) } pub fn decompress_raw( compressed_data: &mut [u8], decompression_buffer: &mut [u8], ) -> BlzResult<usize> { let (dec_len, pak_len, raw_len) = get_size_for_decompression(compressed_data)?; if (decompression_buffer.len() as u32) < raw_len { return Err(Error::DecompressionBufferTooSmall); } let mut pak_buffer = &mut compressed_data[0..(dec_len + pak_len) as usize]; let mut raw_buffer = &mut decompression_buffer[0..raw_len as usize]; (&mut raw_buffer[0..dec_len as usize]).copy_from_slice(&pak_buffer[0..dec_len as usize]); pak_buffer = &mut pak_buffer[dec_len as usize..]; raw_buffer = &mut raw_buffer[dec_len as usize..]; invert_slice(pak_buffer); let mut mask = 0; let mut decompression_buffer_position: usize = 0; let mut pak_position: usize = 0; let pak_position_end: usize = pak_len as usize; let mut flags = 0u8; while decompression_buffer_position < raw_buffer.len() { mask >>= BLZ_SHIFT; if mask == 0 { if pak_position == pak_position_end { break; } flags = pak_buffer[pak_position]; pak_position += 1; mask = BLZ_MASK; } if (flags & mask) == 0 { if pak_position == pak_position_end { break; } raw_buffer[decompression_buffer_position] = pak_buffer[pak_position]; decompression_buffer_position += 1; pak_position += 1; } else { if pak_position + 1 >= pak_position_end { break; } let mut pos: u32 = (u32::from(pak_buffer[pak_position]) << 8) | u32::from(pak_buffer[pak_position + 1]); pak_position += 2; let mut len: u32 = (pos >> 12) + BLZ_THRESHOLD as u32 + 1; if decompression_buffer_position + len as usize > raw_buffer.len() { len = (raw_buffer.len() - decompression_buffer_position) as u32; } pos = (pos & 0xFFF) + 3; while len != 0 { raw_buffer[decompression_buffer_position] = raw_buffer[decompression_buffer_position - pos as usize]; decompression_buffer_position += 1; len -= 1; } } } invert_slice(raw_buffer); debug_assert!( decompression_buffer_position == raw_buffer.len(), "Unexpected end of decompression" ); Ok(decompression_buffer_position + dec_len as usize) }
#![no_std] use byteorder::ByteOrder; use byteorder::LittleEndian; #[derive(Debug)] pub enum Error { Unknown, InvalidBlz, DecompressionBufferTooSmall, CompressionBufferTooSmall, } const BLZ_SHIFT: u8 = 1; const BLZ_MASK: u8 = 0x80; const BLZ_THRESHOLD: usize = 2; const BLZ_MAX_OFFSET: usize = 0x1002; const BLZ_MAX_CODED: usize = (1 << 4) + BLZ_THRESHOLD; pub type BlzResult<T> = core::result::Result<T, Error>; #[inline] pub fn get_worst_compression_buffer_size(raw_len: usize) -> usize { raw_len + ((raw_len + 7) / 8) + 15 } fn get_size_for_decompression(data: &[u8]) -> BlzResult<(u32, u32, u32)> { if data.len() < 4 { return Err(Error::InvalidBlz); } let inc_len = LittleEndian::read_u32(&data[data.len() - 4..]); if inc_len == 0 { let raw_len = data.len() as u32 - 4; Ok((raw_len, 0, raw_len)) } else { if data.len() < 8 { return Err(Error::InvalidBlz); } let header_len = LittleEndian::read_u32(&data[data.len() - 8..]); if data.len() <= header_len as usize { return Err(Error::InvalidBlz); } let enc_len = LittleEndian::read_u32(&data[data.len() - 12..]); let dec_len = data.len() as u32 - enc_len; let pak_len = enc_len - header_len; let raw_len = dec_len + enc_len + inc_len; Ok((dec_len, pak_len, raw_len)) } } pub fn get_decompression_buffer_size(data: &[u8]) -> BlzResult<usize> { Ok(get_size_for_decompression(data)?.2 as usize) } fn invert_slice(data: &mut [u8]) { let mut top_position = 0; l
(compressed_len + header_size) as u32, ); LittleEndian::write_u32( &mut compression_buffer[compressed_pos + 4..], header_size as u32, ); LittleEndian::write_u32( &mut compression_buffer[compressed_pos + 8..], (inc_len - header_size) as u32, ); compressed_pos += 12; result_size = compressed_pos; } Ok(result_size) } pub fn decompress_raw( compressed_data: &mut [u8], decompression_buffer: &mut [u8], ) -> BlzResult<usize> { let (dec_len, pak_len, raw_len) = get_size_for_decompression(compressed_data)?; if (decompression_buffer.len() as u32) < raw_len { return Err(Error::DecompressionBufferTooSmall); } let mut pak_buffer = &mut compressed_data[0..(dec_len + pak_len) as usize]; let mut raw_buffer = &mut decompression_buffer[0..raw_len as usize]; (&mut raw_buffer[0..dec_len as usize]).copy_from_slice(&pak_buffer[0..dec_len as usize]); pak_buffer = &mut pak_buffer[dec_len as usize..]; raw_buffer = &mut raw_buffer[dec_len as usize..]; invert_slice(pak_buffer); let mut mask = 0; let mut decompression_buffer_position: usize = 0; let mut pak_position: usize = 0; let pak_position_end: usize = pak_len as usize; let mut flags = 0u8; while decompression_buffer_position < raw_buffer.len() { mask >>= BLZ_SHIFT; if mask == 0 { if pak_position == pak_position_end { break; } flags = pak_buffer[pak_position]; pak_position += 1; mask = BLZ_MASK; } if (flags & mask) == 0 { if pak_position == pak_position_end { break; } raw_buffer[decompression_buffer_position] = pak_buffer[pak_position]; decompression_buffer_position += 1; pak_position += 1; } else { if pak_position + 1 >= pak_position_end { break; } let mut pos: u32 = (u32::from(pak_buffer[pak_position]) << 8) | u32::from(pak_buffer[pak_position + 1]); pak_position += 2; let mut len: u32 = (pos >> 12) + BLZ_THRESHOLD as u32 + 1; if decompression_buffer_position + len as usize > raw_buffer.len() { len = (raw_buffer.len() - decompression_buffer_position) as u32; } pos = (pos & 0xFFF) + 3; while len != 0 { raw_buffer[decompression_buffer_position] = raw_buffer[decompression_buffer_position - pos as usize]; decompression_buffer_position += 1; len -= 1; } } } invert_slice(raw_buffer); debug_assert!( decompression_buffer_position == raw_buffer.len(), "Unexpected end of decompression" ); Ok(decompression_buffer_position + dec_len as usize) }
et mut bottom_position = data.len() - 1; while top_position < bottom_position { let tmp = data[top_position]; data[top_position] = data[bottom_position]; data[bottom_position] = tmp; bottom_position -= 1; top_position += 1; } } fn compression_search(data: &[u8], current_position: usize) -> (usize, usize) { let mut len = BLZ_THRESHOLD as usize; let mut pos = 0; let max = if current_position >= BLZ_MAX_OFFSET { BLZ_MAX_OFFSET } else { current_position }; for tmp_pos in 3..=max { let mut tmp_len = 0; while tmp_len < BLZ_MAX_CODED { if tmp_len == data[current_position..].len() || tmp_len >= tmp_pos { break; } if data[current_position + tmp_len] != data[current_position + tmp_len - tmp_pos] { break; } tmp_len += 1; } if tmp_len > len { pos = tmp_pos; len = tmp_len; if len == BLZ_MAX_CODED { break; } } } (len, pos) } pub fn compress_raw( decompressed_buffer: &mut [u8], compression_buffer: &mut [u8], ) -> BlzResult<usize> { if compression_buffer.len() < get_worst_compression_buffer_size(decompressed_buffer.len()) { return Err(Error::CompressionBufferTooSmall); } invert_slice(decompressed_buffer); let mut compressed_size_tmp = 0; let mut decompressed_size_tmp = decompressed_buffer.len(); let mut mask = 0; let mut decompressed_pos = 0; let mut compressed_pos = 0; let mut flag_pos = 0; while decompressed_pos < decompressed_buffer.len() { mask >>= BLZ_SHIFT; if mask == 0 { flag_pos = compressed_pos; compression_buffer[flag_pos] = 0; compressed_pos += 1; mask = BLZ_MASK; } let (mut len_best, pos_best) = compression_search(&decompressed_buffer, decompressed_pos); if len_best > BLZ_THRESHOLD { if decompressed_pos + len_best < decompressed_buffer.len() { decompressed_pos += len_best; let (mut len_next, _) = compression_search(&decompressed_buffer, decompressed_pos); decompressed_pos -= len_best - 1; let (mut len_post, _) = compression_search(&decompressed_buffer, decompressed_pos); decompressed_pos -= 1; if len_next <= BLZ_THRESHOLD { len_next = 1; } if len_post <= BLZ_THRESHOLD { len_post = 1; } if len_best + len_next <= 1 + len_post { len_best = 1; } } } compression_buffer[flag_pos] <<= 1; if len_best > BLZ_THRESHOLD { decompressed_pos += len_best; compression_buffer[flag_pos] |= 1; compression_buffer[compressed_pos] = (((len_best - (BLZ_THRESHOLD + 1)) << 4) | ((pos_best - 3) >> 8)) as u8; compression_buffer[compressed_pos + 1] = ((pos_best - 3) & 0xFF) as u8; compressed_pos += 2; } else { compression_buffer[compressed_pos] = decompressed_buffer[decompressed_pos]; compressed_pos += 1; decompressed_pos += 1; } if compressed_pos + decompressed_buffer.len() - decompressed_pos < compressed_size_tmp + decompressed_size_tmp { compressed_size_tmp = compressed_pos; decompressed_size_tmp = decompressed_buffer.len() - decompressed_pos; } } while mask != 0 && mask != 1 { mask >>= BLZ_SHIFT; compression_buffer[flag_pos] <<= 1; } let compressed_size = compressed_pos; invert_slice(decompressed_buffer); invert_slice(&mut compression_buffer[0..compressed_size]); let result_size; if compressed_size_tmp == 0 || (decompressed_buffer.len() + 4 < ((compressed_size_tmp + decompressed_size_tmp + 3) & 0xFFFFFFFC) + 8) { &(compression_buffer[0..decompressed_buffer.len()]).copy_from_slice(&decompressed_buffer); compressed_pos = decompressed_buffer.len(); while (compressed_pos & 3) != 0 { compression_buffer[compressed_pos] = 0; compressed_pos += 1; } LittleEndian::write_u32(&mut compression_buffer[compressed_pos..], 0); compressed_pos += 4; result_size = compressed_pos; } else { let mut i = 0; while i < compressed_size_tmp { compression_buffer[decompressed_size_tmp + i] = compression_buffer[i + compressed_pos - compressed_size_tmp]; i += 1; } (&mut compression_buffer[0..decompressed_size_tmp]) .copy_from_slice(&decompressed_buffer[0..decompressed_size_tmp]); compressed_pos = decompressed_size_tmp + compressed_size_tmp; let compressed_len = compressed_size_tmp; let mut header_size = 12; let inc_len = decompressed_buffer.len() - compressed_len - decompressed_size_tmp; while (compressed_pos & 3) != 0 { compression_buffer[compressed_pos] = 0xFF; compressed_pos += 1; header_size += 1; } LittleEndian::write_u32( &mut compression_buffer[compressed_pos..],
random
[ { "content": "# blz-nx-rs\n\n\n\n[![Travis Build](https://img.shields.io/travis/com/Thog/blz-nx-rs.svg?logo=travis)](https://travis-ci.com/Thog/blz-nx-rs) [![Dependabot Status](https://api.dependabot.com/badges/status?host=github&repo=Thog/blz-nx-rs)](https://dependabot.com)\n\n\n\nAn implementation of the Bott...
Rust
tests/substrate_tests/function_types.rs
reeftotem/solang
56047c48da5f836a23661c092b35a55713346871
use crate::build_solidity; use parity_scale_codec::Encode; use parity_scale_codec_derive::{Decode, Encode}; #[test] fn simple_test() { #[derive(Debug, PartialEq, Encode, Decode)] struct Args(bool, u32, u32); let mut runtime = build_solidity( r##" contract ft { function mul(int32 a, int32 b) internal returns (int32) { return a * b; } function add(int32 a, int32 b) internal returns (int32) { return a + b; } function test(bool action, int32 a, int32 b) public returns (int32) { function(int32,int32) internal returns (int32) func; if (action) { func = mul; } else { func = add; } return func(a, b); } }"##, ); runtime.function("test", Args(true, 100, 10).encode()); assert_eq!(runtime.vm.output, 1000u32.encode()); } #[test] fn internal_function_type_in_contract_storage() { #[derive(Debug, PartialEq, Encode, Decode)] struct Args(u32, u32); let mut runtime = build_solidity( r##" contract ft { function(int32,int32) internal returns (int32) func; function mul(int32 a, int32 b) internal returns (int32) { return a * b; } function add(int32 a, int32 b) internal returns (int32) { return a + b; } function set_op(bool action) public { if (action) { func = mul; } else { func = add; } } function test(int32 a, int32 b) public returns (int32) { return func(a, b); } }"##, ); runtime.function("set_op", false.encode()); runtime.function("test", Args(100, 10).encode()); assert_eq!(runtime.vm.output, 110u32.encode()); } #[test] #[should_panic] fn internal_function_not_init_called() { #[derive(Debug, PartialEq, Encode, Decode)] struct Args(u32, u32); let mut runtime = build_solidity( r##" contract ft { function(int32,int32) internal returns (int32) func; function mul(int32 a, int32 b) internal returns (int32) { return a * b; } function add(int32 a, int32 b) internal returns (int32) { return a + b; } function set_op(bool action) public { if (action) { func = mul; } else { func = add; } } function test(int32 a, int32 b) public returns (int32) { return func(a, b); } }"##, ); runtime.function("test", Args(100, 10).encode()); } #[test] fn base_contract_function() { #[derive(Debug, PartialEq, Encode, Decode)] struct Args(bool, u32, u32); let mut runtime = build_solidity( r##" contract ft is Arith { function test(bool action, int32 a, int32 b) public returns (int32) { function(int32,int32) internal returns (int32) func; if (action) { func = Arith.mul; } else { func = Arith.add; } return func(a, b); } } contract Arith { function mul(int32 a, int32 b) internal returns (int32) { return a * b; } function add(int32 a, int32 b) internal returns (int32) { return a + b; } } "##, ); runtime.function("test", Args(true, 100, 10).encode()); assert_eq!(runtime.vm.output, 1000u32.encode()); } #[test] fn virtual_contract_function() { #[derive(Debug, PartialEq, Encode, Decode)] struct Args(bool, u32, u32); let mut runtime = build_solidity( r##" contract ft is Arith { function mul(int32 a, int32 b) internal override returns (int32) { return a * b * 10; } function add(int32 a, int32 b) internal override returns (int32) { return a + b + 10; } } contract Arith { function test(bool action, int32 a, int32 b) public returns (int32) { function(int32,int32) internal returns (int32) func; if (action) { func = mul; } else { func = add; } return func(a, b); } function mul(int32 a, int32 b) internal virtual returns (int32) { return a * b; } function add(int32 a, int32 b) internal virtual returns (int32) { return a + b; } } "##, ); runtime.function("test", Args(true, 100, 10).encode()); assert_eq!(runtime.vm.output, 10000u32.encode()); } #[test] fn ext() { let mut runtime = build_solidity( r##" contract ft { function test() public { function(int32) external returns (bool) func = this.foo; assert(address(this) == func.address); assert(func.selector == hex"42761137"); } function foo(int32) public returns (bool) { return false; } }"##, ); runtime.function("test", Vec::new()); let mut runtime = build_solidity( r##" contract ft { function test() public { function(int32) external returns (uint64) func = this.foo; assert(func(102) == 0xabbaabba); } function foo(int32) public returns (uint64) { return 0xabbaabba; } }"##, ); runtime.function("test", Vec::new()); let mut runtime = build_solidity( r##" contract ft { function test() public { function(int32) external returns (uint64) func = this.foo; bar(func); } function foo(int32) public returns (uint64) { return 0xabbaabba; } function bar(function(int32) external returns (uint64) f) internal { assert(f(102) == 0xabbaabba); } }"##, ); runtime.function("test", Vec::new()); let mut runtime = build_solidity( r##" contract ft { function test() public { function(int32) external returns (uint64) func = this.foo; bar(func); } function foo(int32) public returns (uint64) { return 0xabbaabba; } function bar(function(int32) external returns (uint64) f) internal { assert(f(102) == 0xabbaabba); } }"##, ); runtime.function("test", Vec::new()); println!("return external function type from public function"); let mut runtime = build_solidity( r##" contract ft { function test() public { function(int32) external returns (uint64) func = this.foo; this.bar(func); } function foo(int32) public returns (uint64) { return 0xabbaabba; } function bar(function(int32) external returns (uint64) f) public { assert(f(102) == 0xabbaabba); } }"##, ); runtime.function("test", Vec::new()); println!("external function type in storage"); let mut runtime = build_solidity( r##" contract ft { function(int32) external returns (uint64) func; function test1() public { func = this.foo; } function test2() public { this.bar(func); } function foo(int32) public returns (uint64) { return 0xabbaabba; } function bar(function(int32) external returns (uint64) f) public { assert(f(102) == 0xabbaabba); } }"##, ); runtime.function("test1", Vec::new()); runtime.function("test2", Vec::new()); }
use crate::build_solidity; use parity_scale_codec::Encode; use parity_scale_codec_derive::{Decode, Encode}; #[test] fn simple_test() { #[derive(Debug, PartialEq, Encode, Decode)] struct Args(bool, u32, u32); let mut runtime = build_solidity( r##" contract ft { function mul(int32 a, int32 b) internal returns (int32) { return a * b; } function add(int32 a, int32 b) internal returns (int32) { return a + b; } function test(bool action, int32 a, int32 b) public returns (int32) { function(int32,int32) internal returns (int32) func; if (action) { func = mul; } else { func = add; } return func(a, b); } }"##, ); runtime.function("test", Args(true, 100, 10).encode()); assert_eq!(runtime.vm.output, 1000u32.encode()); } #[test] fn internal_function_type_in_contract_storage() { #[derive(Debug, PartialEq, Encode, Decode)] struct Args(u32, u32); let mut runtime = build_solidity( r##" contract ft { function(int32,int32) internal returns (int32) func; function mul(int32 a, int32 b) internal returns (int32) { return a * b; } function add(int32 a, int32 b) internal returns (int32) { return a + b; } function set_op(bool action) public { if (action) { func = mul; } else { func = add; } } function test(int32 a, int32 b) public returns (int32) { return func(a, b); } }"##, ); runtime.function("set_op", false.encode()); runtime.function("test", Args(100, 10).encode()); assert_eq!(runtime.vm.output, 110u32.encode()); } #[test] #[should_panic] fn internal_function_not_init_called() { #[derive(Debug, PartialEq, Encode, Decode)] struct Args(u32, u32); let mut runtime = build_solidity( r##" contract ft { function(int32,int32) internal returns (int32) func; function mul(int32 a, int32 b) internal returns (int32) { return a * b; } function add(int32 a, int32 b) internal returns (int32) { return a + b; } function set_op(bool action) public { if (action) { func = mul; } else { func = add; } } function test(int32 a, int32 b) public returns (int32) { return func(a, b); } }"##, ); runtime.function("test", Args(100, 10).encode()); } #[test] fn base_contract_function() { #[derive(Debug, PartialEq, Encode, Decode)] struct Args(bool, u32, u32); let mut runtime = build_solidity( r##" contract ft is Arith { function test(bool action, int32 a, int32 b) public returns (int32) { function(int32,int32) internal returns (int32) func; if (action) { func = Arith.mul; } else { func = Arith.add; } return func(a, b); } } contract Arith { function mul(int32 a, int32 b) internal returns (int32) { return a * b; } function add(int32 a, int32 b) internal returns (int32) { return a + b; } } "##, ); runtime.function("test", Args(true, 100, 10).encode()); assert_eq!(runtime.vm.output, 1000u32.encode()); } #[test] fn virtual_contract_function() { #[derive(Debug, PartialEq, Encode, Decode)] struct Args(bool, u32, u32); let mut runtime = build_solidity( r##" contract ft is Arith { function mul(int32 a, int32 b) internal override returns (int32) { return a * b * 10; } function add(int32 a, int32 b) internal override returns (int32) { return a + b + 10; } } contract Arith { function test(bool action, int32 a, int32 b) public returns (int32) { function(int32,int32) internal returns (int32) func; if (action) { func = mul; } else { func = add; } return func(a, b); } function mul(int32 a, int32 b) internal virtual returns (int32) { return a * b; } function add(int32 a, int32 b) internal virtual returns (int32) { return a + b; } } "##, ); runtime.function("test", Args(true, 100, 10).encode()); assert_eq!(runtime.vm.output, 10000u32.encode()); } #[test] fn ext() { let mut runtime = build_solidity( r##" contract ft { function test() public { function(int32) external returns (bool) func = this.foo; assert(address(this) == func.address); assert(func.selector == hex"42761137"); } function foo(int32) public returns (bool) { return false; } }"##, ); runtime.function("test", Vec::new()); let mut runtime = build_solidity( r##" contract ft { function test() public { function(int32) external returns (uint64) func = this.foo; assert(func(102) == 0xabbaabba); } function foo(int32) public returns (uint64) { return 0xabbaabba; } }"##, ); runtime.function("test", Vec::new());
runtime.function("test", Vec::new()); let mut runtime = build_solidity( r##" contract ft { function test() public { function(int32) external returns (uint64) func = this.foo; bar(func); } function foo(int32) public returns (uint64) { return 0xabbaabba; } function bar(function(int32) external returns (uint64) f) internal { assert(f(102) == 0xabbaabba); } }"##, ); runtime.function("test", Vec::new()); println!("return external function type from public function"); let mut runtime = build_solidity( r##" contract ft { function test() public { function(int32) external returns (uint64) func = this.foo; this.bar(func); } function foo(int32) public returns (uint64) { return 0xabbaabba; } function bar(function(int32) external returns (uint64) f) public { assert(f(102) == 0xabbaabba); } }"##, ); runtime.function("test", Vec::new()); println!("external function type in storage"); let mut runtime = build_solidity( r##" contract ft { function(int32) external returns (uint64) func; function test1() public { func = this.foo; } function test2() public { this.bar(func); } function foo(int32) public returns (uint64) { return 0xabbaabba; } function bar(function(int32) external returns (uint64) f) public { assert(f(102) == 0xabbaabba); } }"##, ); runtime.function("test1", Vec::new()); runtime.function("test2", Vec::new()); }
let mut runtime = build_solidity( r##" contract ft { function test() public { function(int32) external returns (uint64) func = this.foo; bar(func); } function foo(int32) public returns (uint64) { return 0xabbaabba; } function bar(function(int32) external returns (uint64) f) internal { assert(f(102) == 0xabbaabba); } }"##, );
assignment_statement
[]
Rust
cli/src/workflow/render/producer/typescript/render_static.rs
DmitryAstafyev/clibri
9cf501e0274d5cc1aae13fcf8cb50ed7820f8503
use super::{helpers, workflow::event::Event}; use std::include_str; use std::{ fs, path::{Path, PathBuf}, }; #[allow(non_upper_case_globals)] mod paths { pub mod events { pub const connected: &str = "connected.ts"; pub const disconnected: &str = "disconnected.ts"; pub const error: &str = "error.ts"; pub const ready: &str = "ready.ts"; pub const shutdown: &str = "shutdown.ts"; pub const dest: &str = "events"; } pub mod consumer { pub const module: &str = "index.ts"; pub const filter: &str = "filter.ts"; pub const dest: &str = "implementation/consumer"; } pub mod emitters { pub const connected: &str = "connected.ts"; pub const disconnected: &str = "disconnected.ts"; pub const error: &str = "error.ts"; pub const ready: &str = "ready.ts"; pub const shutdown: &str = "shutdown.ts"; pub const dest: &str = "implementation/events"; } pub mod scope { pub const module: &str = "index.ts"; pub const dest: &str = "implementation/scope"; } pub mod index { pub const module: &str = "index.ts"; pub const dest: &str = ""; } pub mod context { pub const module: &str = "context.ts"; pub const dest: &str = ""; } } pub struct Render {} impl Default for Render { fn default() -> Self { Self::new() } } impl Render { pub fn new() -> Self { Self {} } pub fn render(&self, base: &Path, events: &[Event]) -> Result<(), String> { if !events.iter().any(|event| match event.get_reference() { Ok(reference) => reference == "connected", Err(_) => false, }) { if !self .get_dest_file(base, paths::events::dest, paths::events::connected)? .exists() { helpers::fs::write( self.get_dest_file(base, paths::events::dest, paths::events::connected)?, include_str!("./static/events/connected.ts").to_owned(), true, )?; } helpers::fs::write( self.get_dest_file(base, paths::emitters::dest, paths::emitters::connected)?, include_str!("./static/implementation/events/connected.ts").to_owned(), true, )?; } if !events.iter().any(|event| match event.get_reference() { Ok(reference) => reference == "disconnected", Err(_) => false, }) { if !self .get_dest_file(base, paths::events::dest, paths::events::disconnected)? .exists() { helpers::fs::write( self.get_dest_file(base, paths::events::dest, paths::events::disconnected)?, include_str!("./static/events/disconnected.ts").to_owned(), true, )?; } helpers::fs::write( self.get_dest_file(base, paths::emitters::dest, paths::emitters::disconnected)?, include_str!("./static/implementation/events/disconnected.ts").to_owned(), true, )?; } if !self .get_dest_file(base, paths::events::dest, paths::events::error)? .exists() { helpers::fs::write( self.get_dest_file(base, paths::events::dest, paths::events::error)?, include_str!("./static/events/error.ts").to_owned(), true, )?; } if !self .get_dest_file(base, paths::events::dest, paths::events::ready)? .exists() { helpers::fs::write( self.get_dest_file(base, paths::events::dest, paths::events::ready)?, include_str!("./static/events/ready.ts").to_owned(), true, )?; } if !self .get_dest_file(base, paths::events::dest, paths::events::shutdown)? .exists() { helpers::fs::write( self.get_dest_file(base, paths::events::dest, paths::events::shutdown)?, include_str!("./static/events/shutdown.ts").to_owned(), true, )?; } helpers::fs::write( self.get_dest_file(base, paths::consumer::dest, paths::consumer::module)?, include_str!("./static/implementation/consumer/index.ts").to_owned(), true, )?; helpers::fs::write( self.get_dest_file(base, paths::consumer::dest, paths::consumer::filter)?, include_str!("./static/implementation/consumer/filter.ts").to_owned(), true, )?; helpers::fs::write( self.get_dest_file(base, paths::scope::dest, paths::scope::module)?, include_str!("./static/implementation/scope/index.ts").to_owned(), true, )?; helpers::fs::write( self.get_dest_file(base, paths::emitters::dest, paths::emitters::error)?, include_str!("./static/implementation/events/error.ts").to_owned(), true, )?; helpers::fs::write( self.get_dest_file(base, paths::emitters::dest, paths::emitters::ready)?, include_str!("./static/implementation/events/ready.ts").to_owned(), true, )?; helpers::fs::write( self.get_dest_file(base, paths::emitters::dest, paths::emitters::shutdown)?, include_str!("./static/implementation/events/shutdown.ts").to_owned(), true, )?; helpers::fs::write( self.get_dest_file(base, paths::index::dest, paths::index::module)?, include_str!("./static/index.ts").to_owned(), true, )?; let context_dest = self.get_dest_file(base, paths::context::dest, paths::context::module)?; if !context_dest.exists() { helpers::fs::write( self.get_dest_file(base, paths::context::dest, paths::context::module)?, include_str!("./static/context.ts").to_owned(), true, )?; } Ok(()) } fn get_dest_file(&self, base: &Path, path: &str, file_name: &str) -> Result<PathBuf, String> { let dest = base.join(path); if !dest.exists() { if let Err(e) = fs::create_dir(&dest) { return Err(format!( "Fail to create dest folder {}. Error: {}", dest.to_string_lossy(), e )); } } Ok(dest.join(file_name)) } }
use super::{helpers, workflow::event::Event}; use std::include_str; use std::{ fs, path::{Path, PathBuf}, }; #[allow(non_upper_case_globals)] mod paths
helpers::fs::write( self.get_dest_file(base, paths::events::dest, paths::events::ready)?, include_str!("./static/events/ready.ts").to_owned(), true, )?; } if !self .get_dest_file(base, paths::events::dest, paths::events::shutdown)? .exists() { helpers::fs::write( self.get_dest_file(base, paths::events::dest, paths::events::shutdown)?, include_str!("./static/events/shutdown.ts").to_owned(), true, )?; } helpers::fs::write( self.get_dest_file(base, paths::consumer::dest, paths::consumer::module)?, include_str!("./static/implementation/consumer/index.ts").to_owned(), true, )?; helpers::fs::write( self.get_dest_file(base, paths::consumer::dest, paths::consumer::filter)?, include_str!("./static/implementation/consumer/filter.ts").to_owned(), true, )?; helpers::fs::write( self.get_dest_file(base, paths::scope::dest, paths::scope::module)?, include_str!("./static/implementation/scope/index.ts").to_owned(), true, )?; helpers::fs::write( self.get_dest_file(base, paths::emitters::dest, paths::emitters::error)?, include_str!("./static/implementation/events/error.ts").to_owned(), true, )?; helpers::fs::write( self.get_dest_file(base, paths::emitters::dest, paths::emitters::ready)?, include_str!("./static/implementation/events/ready.ts").to_owned(), true, )?; helpers::fs::write( self.get_dest_file(base, paths::emitters::dest, paths::emitters::shutdown)?, include_str!("./static/implementation/events/shutdown.ts").to_owned(), true, )?; helpers::fs::write( self.get_dest_file(base, paths::index::dest, paths::index::module)?, include_str!("./static/index.ts").to_owned(), true, )?; let context_dest = self.get_dest_file(base, paths::context::dest, paths::context::module)?; if !context_dest.exists() { helpers::fs::write( self.get_dest_file(base, paths::context::dest, paths::context::module)?, include_str!("./static/context.ts").to_owned(), true, )?; } Ok(()) } fn get_dest_file(&self, base: &Path, path: &str, file_name: &str) -> Result<PathBuf, String> { let dest = base.join(path); if !dest.exists() { if let Err(e) = fs::create_dir(&dest) { return Err(format!( "Fail to create dest folder {}. Error: {}", dest.to_string_lossy(), e )); } } Ok(dest.join(file_name)) } }
{ pub mod events { pub const connected: &str = "connected.ts"; pub const disconnected: &str = "disconnected.ts"; pub const error: &str = "error.ts"; pub const ready: &str = "ready.ts"; pub const shutdown: &str = "shutdown.ts"; pub const dest: &str = "events"; } pub mod consumer { pub const module: &str = "index.ts"; pub const filter: &str = "filter.ts"; pub const dest: &str = "implementation/consumer"; } pub mod emitters { pub const connected: &str = "connected.ts"; pub const disconnected: &str = "disconnected.ts"; pub const error: &str = "error.ts"; pub const ready: &str = "ready.ts"; pub const shutdown: &str = "shutdown.ts"; pub const dest: &str = "implementation/events"; } pub mod scope { pub const module: &str = "index.ts"; pub const dest: &str = "implementation/scope"; } pub mod index { pub const module: &str = "index.ts"; pub const dest: &str = ""; } pub mod context { pub const module: &str = "context.ts"; pub const dest: &str = ""; } } pub struct Render {} impl Default for Render { fn default() -> Self { Self::new() } } impl Render { pub fn new() -> Self { Self {} } pub fn render(&self, base: &Path, events: &[Event]) -> Result<(), String> { if !events.iter().any(|event| match event.get_reference() { Ok(reference) => reference == "connected", Err(_) => false, }) { if !self .get_dest_file(base, paths::events::dest, paths::events::connected)? .exists() { helpers::fs::write( self.get_dest_file(base, paths::events::dest, paths::events::connected)?, include_str!("./static/events/connected.ts").to_owned(), true, )?; } helpers::fs::write( self.get_dest_file(base, paths::emitters::dest, paths::emitters::connected)?, include_str!("./static/implementation/events/connected.ts").to_owned(), true, )?; } if !events.iter().any(|event| match event.get_reference() { Ok(reference) => reference == "disconnected", Err(_) => false, }) { if !self .get_dest_file(base, paths::events::dest, paths::events::disconnected)? .exists() { helpers::fs::write( self.get_dest_file(base, paths::events::dest, paths::events::disconnected)?, include_str!("./static/events/disconnected.ts").to_owned(), true, )?; } helpers::fs::write( self.get_dest_file(base, paths::emitters::dest, paths::emitters::disconnected)?, include_str!("./static/implementation/events/disconnected.ts").to_owned(), true, )?; } if !self .get_dest_file(base, paths::events::dest, paths::events::error)? .exists() { helpers::fs::write( self.get_dest_file(base, paths::events::dest, paths::events::error)?, include_str!("./static/events/error.ts").to_owned(), true, )?; } if !self .get_dest_file(base, paths::events::dest, paths::events::ready)? .exists() {
random
[ { "content": "pub fn write(filename: PathBuf, content: String, overwrite: bool) -> Result<(), String> {\n\n if filename.exists() && overwrite {\n\n if let Err(e) = remove_file(filename.clone()) {\n\n return Err(e.to_string());\n\n }\n\n } else if filename.exists() && !overwrite {\...
Rust
lib/oxrdf/src/interning.rs
etiennept/oxigraph
cbccdfba867204ce4b20b6fc16e37a30719f90eb
use crate::*; use lasso::{Key, Rodeo, Spur}; use std::collections::HashMap; #[derive(Debug, Default)] pub struct Interner { strings: Rodeo, #[cfg(feature = "rdf-star")] triples: HashMap<InternedTriple, Triple>, } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Copy, Hash)] pub struct InternedNamedNode { id: Spur, } impl InternedNamedNode { pub fn encoded_into(named_node: NamedNodeRef<'_>, interner: &mut Interner) -> Self { Self { id: interner.strings.get_or_intern(named_node.as_str()), } } pub fn encoded_from(named_node: NamedNodeRef<'_>, interner: &Interner) -> Option<Self> { Some(Self { id: interner.strings.get(named_node.as_str())?, }) } pub fn decode_from<'a>(&self, interner: &'a Interner) -> NamedNodeRef<'a> { NamedNodeRef::new_unchecked(interner.strings.resolve(&self.id)) } pub fn first() -> Self { Self { id: fist_spur() } } pub fn next(self) -> Self { Self { id: next_spur(self.id), } } pub fn impossible() -> Self { Self { id: impossible_spur(), } } } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Copy, Hash)] pub struct InternedBlankNode { id: Spur, } impl InternedBlankNode { pub fn encoded_into(blank_node: BlankNodeRef<'_>, interner: &mut Interner) -> Self { Self { id: interner.strings.get_or_intern(blank_node.as_str()), } } pub fn encoded_from(blank_node: BlankNodeRef<'_>, interner: &Interner) -> Option<Self> { Some(Self { id: interner.strings.get(blank_node.as_str())?, }) } pub fn decode_from<'a>(&self, interner: &'a Interner) -> BlankNodeRef<'a> { BlankNodeRef::new_unchecked(interner.strings.resolve(&self.id)) } pub fn next(self) -> Self { Self { id: next_spur(self.id), } } } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Copy, Hash)] pub enum InternedLiteral { String { value_id: Spur, }, LanguageTaggedString { value_id: Spur, language_id: Spur, }, TypedLiteral { value_id: Spur, datatype: InternedNamedNode, }, } impl InternedLiteral { pub fn encoded_into(literal: LiteralRef<'_>, interner: &mut Interner) -> Self { let value_id = interner.strings.get_or_intern(literal.value()); if literal.is_plain() { if let Some(language) = literal.language() { Self::LanguageTaggedString { value_id, language_id: interner.strings.get_or_intern(language), } } else { Self::String { value_id } } } else { Self::TypedLiteral { value_id, datatype: InternedNamedNode::encoded_into(literal.datatype(), interner), } } } pub fn encoded_from(literal: LiteralRef<'_>, interner: &Interner) -> Option<Self> { let value_id = interner.strings.get(literal.value())?; Some(if literal.is_plain() { if let Some(language) = literal.language() { Self::LanguageTaggedString { value_id, language_id: interner.strings.get(language)?, } } else { Self::String { value_id } } } else { Self::TypedLiteral { value_id, datatype: InternedNamedNode::encoded_from(literal.datatype(), interner)?, } }) } pub fn decode_from<'a>(&self, interner: &'a Interner) -> LiteralRef<'a> { match self { InternedLiteral::String { value_id } => { LiteralRef::new_simple_literal(interner.strings.resolve(value_id)) } InternedLiteral::LanguageTaggedString { value_id, language_id, } => LiteralRef::new_language_tagged_literal_unchecked( interner.strings.resolve(value_id), interner.strings.resolve(language_id), ), InternedLiteral::TypedLiteral { value_id, datatype } => LiteralRef::new_typed_literal( interner.strings.resolve(value_id), datatype.decode_from(interner), ), } } pub fn next(&self) -> Self { match self { Self::String { value_id } => Self::String { value_id: next_spur(*value_id), }, Self::LanguageTaggedString { value_id, language_id, } => Self::LanguageTaggedString { value_id: *value_id, language_id: next_spur(*language_id), }, Self::TypedLiteral { value_id, datatype } => Self::TypedLiteral { value_id: *value_id, datatype: datatype.next(), }, } } } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Hash)] pub enum InternedSubject { NamedNode(InternedNamedNode), BlankNode(InternedBlankNode), #[cfg(feature = "rdf-star")] Triple(Box<InternedTriple>), } impl InternedSubject { pub fn encoded_into(node: SubjectRef<'_>, interner: &mut Interner) -> Self { match node { SubjectRef::NamedNode(node) => { Self::NamedNode(InternedNamedNode::encoded_into(node, interner)) } SubjectRef::BlankNode(node) => { Self::BlankNode(InternedBlankNode::encoded_into(node, interner)) } #[cfg(feature = "rdf-star")] SubjectRef::Triple(triple) => Self::Triple(Box::new(InternedTriple::encoded_into( triple.as_ref(), interner, ))), } } pub fn encoded_from(node: SubjectRef<'_>, interner: &Interner) -> Option<Self> { Some(match node { SubjectRef::NamedNode(node) => { Self::NamedNode(InternedNamedNode::encoded_from(node, interner)?) } SubjectRef::BlankNode(node) => { Self::BlankNode(InternedBlankNode::encoded_from(node, interner)?) } #[cfg(feature = "rdf-star")] SubjectRef::Triple(triple) => Self::Triple(Box::new(InternedTriple::encoded_from( triple.as_ref(), interner, )?)), }) } pub fn decode_from<'a>(&self, interner: &'a Interner) -> SubjectRef<'a> { match self { Self::NamedNode(node) => SubjectRef::NamedNode(node.decode_from(interner)), Self::BlankNode(node) => SubjectRef::BlankNode(node.decode_from(interner)), #[cfg(feature = "rdf-star")] Self::Triple(triple) => SubjectRef::Triple(&interner.triples[triple.as_ref()]), } } pub fn first() -> Self { Self::NamedNode(InternedNamedNode::first()) } pub fn next(&self) -> Self { match self { Self::NamedNode(node) => Self::NamedNode(node.next()), Self::BlankNode(node) => Self::BlankNode(node.next()), #[cfg(feature = "rdf-star")] Self::Triple(triple) => Self::Triple(Box::new(triple.next())), } } pub fn impossible() -> Self { Self::NamedNode(InternedNamedNode::impossible()) } } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Hash)] pub enum InternedGraphName { DefaultGraph, NamedNode(InternedNamedNode), BlankNode(InternedBlankNode), } impl InternedGraphName { pub fn encoded_into(node: GraphNameRef<'_>, interner: &mut Interner) -> Self { match node { GraphNameRef::DefaultGraph => Self::DefaultGraph, GraphNameRef::NamedNode(node) => { Self::NamedNode(InternedNamedNode::encoded_into(node, interner)) } GraphNameRef::BlankNode(node) => { Self::BlankNode(InternedBlankNode::encoded_into(node, interner)) } } } pub fn encoded_from(node: GraphNameRef<'_>, interner: &Interner) -> Option<Self> { Some(match node { GraphNameRef::DefaultGraph => Self::DefaultGraph, GraphNameRef::NamedNode(node) => { Self::NamedNode(InternedNamedNode::encoded_from(node, interner)?) } GraphNameRef::BlankNode(node) => { Self::BlankNode(InternedBlankNode::encoded_from(node, interner)?) } }) } pub fn decode_from<'a>(&self, interner: &'a Interner) -> GraphNameRef<'a> { match self { Self::DefaultGraph => GraphNameRef::DefaultGraph, Self::NamedNode(node) => GraphNameRef::NamedNode(node.decode_from(interner)), Self::BlankNode(node) => GraphNameRef::BlankNode(node.decode_from(interner)), } } pub fn first() -> Self { Self::DefaultGraph } pub fn next(&self) -> Self { match self { Self::DefaultGraph => Self::NamedNode(InternedNamedNode::first()), Self::NamedNode(node) => Self::NamedNode(node.next()), Self::BlankNode(node) => Self::BlankNode(node.next()), } } pub fn impossible() -> Self { Self::NamedNode(InternedNamedNode::impossible()) } } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Hash)] pub enum InternedTerm { NamedNode(InternedNamedNode), BlankNode(InternedBlankNode), Literal(InternedLiteral), #[cfg(feature = "rdf-star")] Triple(Box<InternedTriple>), } impl InternedTerm { pub fn encoded_into(term: TermRef<'_>, interner: &mut Interner) -> Self { match term { TermRef::NamedNode(term) => { Self::NamedNode(InternedNamedNode::encoded_into(term, interner)) } TermRef::BlankNode(term) => { Self::BlankNode(InternedBlankNode::encoded_into(term, interner)) } TermRef::Literal(term) => Self::Literal(InternedLiteral::encoded_into(term, interner)), #[cfg(feature = "rdf-star")] TermRef::Triple(triple) => Self::Triple(Box::new(InternedTriple::encoded_into( triple.as_ref(), interner, ))), } } pub fn encoded_from(term: TermRef<'_>, interner: &Interner) -> Option<Self> { Some(match term { TermRef::NamedNode(term) => { Self::NamedNode(InternedNamedNode::encoded_from(term, interner)?) } TermRef::BlankNode(term) => { Self::BlankNode(InternedBlankNode::encoded_from(term, interner)?) } TermRef::Literal(term) => Self::Literal(InternedLiteral::encoded_from(term, interner)?), #[cfg(feature = "rdf-star")] TermRef::Triple(triple) => Self::Triple(Box::new(InternedTriple::encoded_from( triple.as_ref(), interner, )?)), }) } pub fn decode_from<'a>(&self, interner: &'a Interner) -> TermRef<'a> { match self { Self::NamedNode(term) => TermRef::NamedNode(term.decode_from(interner)), Self::BlankNode(term) => TermRef::BlankNode(term.decode_from(interner)), Self::Literal(term) => TermRef::Literal(term.decode_from(interner)), #[cfg(feature = "rdf-star")] Self::Triple(triple) => TermRef::Triple(&interner.triples[triple.as_ref()]), } } pub fn first() -> Self { Self::NamedNode(InternedNamedNode::first()) } pub fn next(&self) -> Self { match self { Self::NamedNode(node) => Self::NamedNode(node.next()), Self::BlankNode(node) => Self::BlankNode(node.next()), Self::Literal(node) => Self::Literal(node.next()), #[cfg(feature = "rdf-star")] Self::Triple(triple) => Self::Triple(Box::new(triple.next())), } } pub fn impossible() -> Self { Self::NamedNode(InternedNamedNode::impossible()) } } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Hash)] pub struct InternedTriple { pub subject: InternedSubject, pub predicate: InternedNamedNode, pub object: InternedTerm, } #[cfg(feature = "rdf-star")] impl InternedTriple { pub fn encoded_into(triple: TripleRef<'_>, interner: &mut Interner) -> Self { let interned_triple = Self { subject: InternedSubject::encoded_into(triple.subject, interner), predicate: InternedNamedNode::encoded_into(triple.predicate, interner), object: InternedTerm::encoded_into(triple.object, interner), }; interner .triples .insert(interned_triple.clone(), triple.into_owned()); interned_triple } pub fn encoded_from(triple: TripleRef<'_>, interner: &Interner) -> Option<Self> { let interned_triple = Self { subject: InternedSubject::encoded_from(triple.subject, interner)?, predicate: InternedNamedNode::encoded_from(triple.predicate, interner)?, object: InternedTerm::encoded_from(triple.object, interner)?, }; if interner.triples.contains_key(&interned_triple) { Some(interned_triple) } else { None } } pub fn next(&self) -> Self { Self { subject: self.subject.clone(), predicate: self.predicate, object: self.object.next(), } } } fn fist_spur() -> Spur { Spur::try_from_usize(0).unwrap() } fn next_spur(value: Spur) -> Spur { Spur::try_from_usize(value.into_usize() + 1).unwrap() } fn impossible_spur() -> Spur { Spur::try_from_usize((u32::MAX - 10).try_into().unwrap()).unwrap() }
use crate::*; use lasso::{Key, Rodeo, Spur}; use std::collections::HashMap; #[derive(Debug, Default)] pub struct Interner { strings: Rodeo, #[cfg(feature = "rdf-star")] triples: HashMap<InternedTriple, Triple>, } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Copy, Hash)] pub struct InternedNamedNode { id: Spur, } impl InternedNamedNode { pub fn encoded_into(named_node: NamedNodeRef<'_>, interner: &mut Interner) -> Self { Self { id: interner.strings.get_or_intern(named_node.as_str()), } } pub fn encoded_from(named_node: NamedNodeRef<'_>, interner: &Interner) -> Option<Self> { Some(Self { id: interner.strings.get(named_node.as_str())?, }) } pub fn decode_from<'a>(&self, interner: &'a Interner) -> NamedNodeRef<'a> { NamedNodeRef::new_unchecked(interner.strings.resolve(&self.id)) } pub fn first() -> Self { Self { id: fist_spur() } } pub fn next(self) -> Self { Self { id: next_spur(self.id), } } pub fn impossible() -> Self { Self { id: impossible_spur(), } } } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Copy, Hash)] pub struct InternedBlankNode { id: Spur, } impl InternedBlankNode { pub fn encoded_into(blank_node: BlankNodeRef<'_>, interner: &mut Interner) -> Self { Self { id: interner.strings.get_or_intern(blank_node.as_str()), } } pub fn encoded_from(blank_node: BlankNodeRef<'_>, interner: &Interner) -> Option<Self> { Some(Self { id: interner.strings.get(blank_node.as_str())?, }) } pub fn decode_from<'a>(&self, interner: &'a Interner) -> BlankNodeRef<'a> { BlankNodeRef::new_unchecked(interner.strings.resolve(&self.id)) } pub fn next(self) -> Self { Self { id: next_spur(self.id), } } } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Copy, Hash)] pub enum InternedLiteral { String { value_id: Spur, }, LanguageTaggedString { value_id: Spur, language_id: Spur, }, TypedLiteral { value_id: Spur, datatype: InternedNamedNode, }, } impl InternedLiteral { pub fn encoded_into(literal: LiteralRef<'_>, interner: &mut Interner) -> Self { let value_id = interner.strings.get_or_intern(literal.value()); if literal.is_plain() { if let Some(language) = literal.language() { Self::LanguageTaggedString { value_id, language_id: interner.strings.get_or_intern(language), } } else { Self::String { value_id } } } else { Self::TypedLiteral { value_id, datatype: InternedNamedNode::encoded_into(literal.datatype(), interner), } } } pub fn encoded_from(literal: LiteralRef<'_>, interner: &Interner) -> Option<Self> { let value_id = interner.strings.get(literal.value())?; Some(if literal.is_plain() { if let Some(language) = literal.language() { Self::LanguageTaggedString { value_id, language_id: interner.strings.get(language)?, } } else { Self::String { value_id } } } else { Self::TypedLiteral { value_id, datatype: InternedNamedNode::encoded_from(literal.datatype(), interner)?, } }) } pub fn decode_from<'a>(&self, interner: &'a Interner) -> LiteralRef<'a> { match self { InternedLiteral::String { value_id } => { LiteralRef::new_simple_literal(interner.strings.resolve(value_id)) } InternedLiteral::LanguageTaggedString { value_id, language_id, } => LiteralRef::new_language_tagged_literal_unchecked( interner.strings.resolve(value_id), interner.strings.resolve(language_id), ), InternedLiteral::TypedLiteral { value_id, datatype } => LiteralRef::new_typed_literal( interner.strings.resolve(value_id), datatype.decode_from(interner), ), } } pub fn next(&self) -> Self { match self { Self::String { value_id } => Self::String { value_id: next_spur(*value_id), }, Self::LanguageTaggedString { value_id, language_id, } => Self::LanguageTaggedString { value_id: *value_id, language_id: next_spur(*language_id), }, Self::TypedLiteral { value_id, datatype } => Self::TypedLiteral { value_id: *value_id, datatype: datatype.next(), }, } } } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Hash)] pub enum InternedSubject { NamedNode(InternedNamedNode), BlankNode(InternedBlankNode), #[cfg(feature = "rdf-star")] Triple(Box<InternedTriple>), } impl InternedSubject { pub fn encoded_into(node: SubjectRef<'_>, interner: &mut Interner) -> Self { match node { SubjectRef::NamedNode(node) => { Self::NamedNode(InternedNamedNode::encoded_into(node, interner)) } SubjectRef::BlankNode(node) => { Self::BlankNode(InternedBlankNode::encoded_into(node, interner)) } #[cfg(feature = "rdf-star")] SubjectRef::Triple(triple) => Self::Triple(Box::new(InternedTriple::encoded_into( triple.as_ref(), interner, ))), } } pub fn encoded_from(node: SubjectRef<'_>, interner: &Interner) -> Option<Self> { Some(match node { SubjectRef::NamedNode(node) => { Self::NamedNode(InternedNamedNode::encoded_from(node, interner)?) } SubjectRef::BlankNode(node) => { Self::BlankNode(InternedBlankNode::encoded_from(node, interner)?) } #[cfg(feature = "rdf-star")] SubjectRef::Triple(triple) => Self::Triple(Box::new(InternedTriple::encoded_from( triple.as_ref(), interner, )?)), }) } pub fn decode_from<'a>(&self, interner: &'a Interner) -> SubjectRef<'a> { match self { Self::NamedNode(node) => SubjectRef::NamedNode(node.decode_from(interner)), Self::BlankNode(node) => SubjectRef::BlankNode(node.decode_from(interner)), #[cfg(feature = "rdf-star")] Self::Triple(triple) => SubjectRef::Triple(&interner.triples[triple.as_ref()]), } } pub fn first() -> Self { Self::NamedNode(InternedNamedNode::first()) } pub fn next(&self) -> Self { match self { Self::NamedNode(node) => Self::NamedNode(node.next()), Self::BlankNode(node) => Self::BlankNode(node.next()), #[cfg(feature = "rdf-star")] Self::Triple(triple) => Self::Triple(Box::new(triple.next())), } } pub fn impossible() -> Self { Self::NamedNode(InternedNamedNode::impossible()) } } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Hash)] pub enum InternedGraphName { DefaultGraph, NamedNode(InternedNamedNode), BlankNode(InternedBlankNode), } impl InternedGraphName { pub fn encoded_into(node: GraphNameRef<'_>, interner: &mut Interner) -> Self { match node { GraphNameRef::DefaultGraph => Self::DefaultGraph, GraphNameRef::NamedNode(node) => { Self::NamedNode(InternedNamedNode::encoded_into(node, interner)) } GraphNameRef::BlankNode(node) => { Self::BlankNode(InternedBlankNode::encoded_into(node, interner)) } } } pub fn encoded_from(node: GraphNameRef<'_>, interner: &Interner) -> Option<Self> { Some(match node { GraphNameRef::DefaultGraph => Self::DefaultGraph, GraphNameRef::NamedNode(node) => { Self::NamedNode(InternedNamedNode::encoded_from(node, interner)?) } GraphNameRef::BlankNode(node) => { Self::BlankNode(InternedBlankNode::encoded_from(node, interner)?) } }) } pub fn decode_from<'a>(&self, interner: &'a Interner) -> GraphNameRef<'a> { match self { Self::DefaultGraph => GraphNameRef::DefaultGraph, Self::NamedNode(node) => GraphNameRef::NamedNode(node.decode_from(interner)), Self::BlankNode(node) => GraphNameRef::BlankNode(node.decode_from(interner)), } } pub fn first() -> Self { Self::DefaultGraph } pub fn next(&self) -> Self { match self { Self::DefaultGraph => Self::NamedNode(InternedNamedNode::first()), Self::NamedNode(node) => Self::NamedNode(node.next()
interner, ))), } } pub fn encoded_from(term: TermRef<'_>, interner: &Interner) -> Option<Self> { Some(match term { TermRef::NamedNode(term) => { Self::NamedNode(InternedNamedNode::encoded_from(term, interner)?) } TermRef::BlankNode(term) => { Self::BlankNode(InternedBlankNode::encoded_from(term, interner)?) } TermRef::Literal(term) => Self::Literal(InternedLiteral::encoded_from(term, interner)?), #[cfg(feature = "rdf-star")] TermRef::Triple(triple) => Self::Triple(Box::new(InternedTriple::encoded_from( triple.as_ref(), interner, )?)), }) } pub fn decode_from<'a>(&self, interner: &'a Interner) -> TermRef<'a> { match self { Self::NamedNode(term) => TermRef::NamedNode(term.decode_from(interner)), Self::BlankNode(term) => TermRef::BlankNode(term.decode_from(interner)), Self::Literal(term) => TermRef::Literal(term.decode_from(interner)), #[cfg(feature = "rdf-star")] Self::Triple(triple) => TermRef::Triple(&interner.triples[triple.as_ref()]), } } pub fn first() -> Self { Self::NamedNode(InternedNamedNode::first()) } pub fn next(&self) -> Self { match self { Self::NamedNode(node) => Self::NamedNode(node.next()), Self::BlankNode(node) => Self::BlankNode(node.next()), Self::Literal(node) => Self::Literal(node.next()), #[cfg(feature = "rdf-star")] Self::Triple(triple) => Self::Triple(Box::new(triple.next())), } } pub fn impossible() -> Self { Self::NamedNode(InternedNamedNode::impossible()) } } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Hash)] pub struct InternedTriple { pub subject: InternedSubject, pub predicate: InternedNamedNode, pub object: InternedTerm, } #[cfg(feature = "rdf-star")] impl InternedTriple { pub fn encoded_into(triple: TripleRef<'_>, interner: &mut Interner) -> Self { let interned_triple = Self { subject: InternedSubject::encoded_into(triple.subject, interner), predicate: InternedNamedNode::encoded_into(triple.predicate, interner), object: InternedTerm::encoded_into(triple.object, interner), }; interner .triples .insert(interned_triple.clone(), triple.into_owned()); interned_triple } pub fn encoded_from(triple: TripleRef<'_>, interner: &Interner) -> Option<Self> { let interned_triple = Self { subject: InternedSubject::encoded_from(triple.subject, interner)?, predicate: InternedNamedNode::encoded_from(triple.predicate, interner)?, object: InternedTerm::encoded_from(triple.object, interner)?, }; if interner.triples.contains_key(&interned_triple) { Some(interned_triple) } else { None } } pub fn next(&self) -> Self { Self { subject: self.subject.clone(), predicate: self.predicate, object: self.object.next(), } } } fn fist_spur() -> Spur { Spur::try_from_usize(0).unwrap() } fn next_spur(value: Spur) -> Spur { Spur::try_from_usize(value.into_usize() + 1).unwrap() } fn impossible_spur() -> Spur { Spur::try_from_usize((u32::MAX - 10).try_into().unwrap()).unwrap() }
), Self::BlankNode(node) => Self::BlankNode(node.next()), } } pub fn impossible() -> Self { Self::NamedNode(InternedNamedNode::impossible()) } } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Hash)] pub enum InternedTerm { NamedNode(InternedNamedNode), BlankNode(InternedBlankNode), Literal(InternedLiteral), #[cfg(feature = "rdf-star")] Triple(Box<InternedTriple>), } impl InternedTerm { pub fn encoded_into(term: TermRef<'_>, interner: &mut Interner) -> Self { match term { TermRef::NamedNode(term) => { Self::NamedNode(InternedNamedNode::encoded_into(term, interner)) } TermRef::BlankNode(term) => { Self::BlankNode(InternedBlankNode::encoded_into(term, interner)) } TermRef::Literal(term) => Self::Literal(InternedLiteral::encoded_into(term, interner)), #[cfg(feature = "rdf-star")] TermRef::Triple(triple) => Self::Triple(Box::new(InternedTriple::encoded_into( triple.as_ref(),
random
[ { "content": "fn named_node_repr(node: NamedNodeRef<'_>, buffer: &mut String) {\n\n buffer.push_str(\"<NamedNode value=\");\n\n buffer.push_str(node.as_str());\n\n buffer.push('>');\n\n}\n\n\n", "file_path": "python/src/model.rs", "rank": 1, "score": 289960.8145970217 }, { "content"...
Rust
src/media/audio/lib/test/hermetic_audio_environment/rust/src/lib.rs
mehulagg/fuchsia
3f56175ee594da6b287d5fb19f2f0eccea2897f0
pub mod virtual_audio; pub mod prelude { pub use fidl; pub use fidl_fuchsia_virtualaudio::*; pub use fuchsia_async as fasync; pub type Result<T> = std::result::Result<T, failure::Error>; pub use crate::Environment; pub use fidl_fuchsia_media::*; pub use fuchsia_component::client; pub use fuchsia_zircon as zx; pub use futures::{self, future, FutureExt, SinkExt, StreamExt, TryStreamExt}; pub use test_util::assert_matches; pub use zx::AsHandleRef; } use fidl::endpoints::{create_endpoints, DiscoverableService}; use fidl_fuchsia_io::DirectoryMarker; use fidl_fuchsia_sys::LauncherProxy; use fuchsia_component::client::App; use fuchsia_component::server::*; use lazy_static::lazy_static; use maplit::hashmap; use prelude::*; use std::collections::HashMap; type ServiceName = &'static str; type ComponentUrl = &'static str; struct ComponentLaunchInfo { services: Vec<ServiceName>, arguments: Option<Vec<String>>, } lazy_static! { static ref SERVICES: HashMap<ComponentUrl, ComponentLaunchInfo> = hashmap! { "fuchsia-pkg://fuchsia.com/audio_core#meta/audio_core_nodevfs.cmx" => ComponentLaunchInfo { services: vec![ AudioCoreMarker::SERVICE_NAME, UsageReporterMarker::SERVICE_NAME, AudioDeviceEnumeratorMarker::SERVICE_NAME ], arguments: Some(vec![ "--disable-device-settings-writeback".to_string() ]) }, "fuchsia-pkg://fuchsia.com/virtual_audio_service#meta/virtual_audio_service_nodevfs.cmx" => ComponentLaunchInfo { services: vec![ InputMarker::SERVICE_NAME, OutputMarker::SERVICE_NAME, ], arguments: None }, }; } #[derive(Debug)] struct ConnectRequest { service: ServiceName, component_url: ComponentUrl, channel: zx::Channel, } fn register_services<'a>(fs: &mut ServiceFs<ServiceObj<'a, ConnectRequest>>) { for (component_url, info) in SERVICES.iter() { for service in info.services.iter().copied() { fs.add_service_at(service, move |channel| { Some(ConnectRequest { service, component_url, channel }) }); } } } fn launch_components(launcher: &LauncherProxy) -> Result<HashMap<ComponentUrl, App>> { const TEST_DEV_MGR_URL: &str = "fuchsia-pkg://fuchsia.com/audio_test_devmgr#meta/audio_test_devmgr.cmx"; const TEST_DEV_MGR_NAME: &str = "fuchsia.media.AudioTestDevmgr"; let test_dev_mgr = client::AppBuilder::new(TEST_DEV_MGR_URL).spawn(launcher)?; let mut launched = SERVICES .iter() .map(|(url, launch_info)| { use zx::HandleBased; let test_dev_mgr_handle = { let (dev_enum, directory_request) = create_endpoints::<DirectoryMarker>()?; test_dev_mgr .pass_to_named_service(TEST_DEV_MGR_NAME, directory_request.into_channel())?; dev_enum.into_channel().into_handle() }; let builder = client::AppBuilder::new(*url) .add_handle_to_namespace("/dev".to_string(), test_dev_mgr_handle) .stdout(client::Stdio::Inherit) .stderr(client::Stdio::Inherit); let builder = if let Some(arguments) = launch_info.arguments.as_ref() { builder.args(arguments) } else { builder }; let app = builder.spawn(launcher)?; Ok((*url, app)) }) .collect::<Result<HashMap<ComponentUrl, App>>>()?; launched.insert(TEST_DEV_MGR_URL, test_dev_mgr); Ok(launched) } pub struct Environment { env: NestedEnvironment, } impl Environment { pub fn new() -> Result<Self> { use fidl_fuchsia_logger::LogSinkMarker; let mut fs = ServiceFs::new(); register_services(&mut fs); fs.add_proxy_service::<LogSinkMarker, ConnectRequest>(); let env = fs.create_salted_nested_environment("environment")?; let launched_components = launch_components(env.launcher())?; fasync::spawn(fs.for_each_concurrent(None, move |request| { match launched_components.get(request.component_url) { Some(component) => { component.pass_to_named_service(request.service, request.channel).expect( &format!( "Component {} does not serve {}", request.component_url, request.service ), ); } None => panic!("Unknown component: {:?}", request.component_url), } future::ready(()) })); Ok(Self { env }) } pub fn connect_to_service<S: DiscoverableService>(&self) -> Result<S::Proxy> { self.env.connect_to_service::<S>() } }
pub mod virtual_audio; pub mod prelude { pub use fidl; pub use fidl_fuchsia_virtualaudio::*; pub use fuchsia_async as fasync; pub type Result<T> = std::result::Result<T, failure::Error>; pub use crate::Environment; pub use fidl_fuchsia_media::*; pub use fuchsia_component::client; pub use fuchsia_zircon as zx; pub use futures::{self, future, FutureExt, SinkExt, StreamExt, TryStreamExt}; pub use test_util::assert_matches; pub use zx::AsHandleRef; } use fidl::endpoints::{create_endpoints, DiscoverableService}; use fidl_fuchsia_io::DirectoryMarker; use fidl_fuchsia_sys::LauncherProxy; use fuchsia_component::client::App; use fuchsia_component::server::*; use lazy_static::lazy_static; use maplit::hashmap; use prelude::*; use std::collections::HashMap; type ServiceName = &'static str; type ComponentUrl = &'static str; struct ComponentLaunchInfo { services: Vec<ServiceName>, arguments: Option<Vec<String>>, } lazy_static! { static ref SERVICES: HashMap<ComponentUrl, ComponentLaunchInfo> = hashmap! { "fuchsia-pkg://fuchsia.com/audio_core#meta/audio_core_nodevfs.cmx" => ComponentLaunchInfo { services: vec![ AudioCoreMarker::SERVICE_NAME, UsageReporterMarker::SERVICE_NAME, AudioDeviceEnumeratorMarker::SERVICE_NAME ], arguments: Some(vec![ "--disable-device-settings-writeback".to_string() ]) }, "fuchsia-pkg://fuchsia.com/virtual_audio_service#meta/virtual_audio_service_nodevfs.cmx" => ComponentLaunchInfo { services: vec![ InputMarker::SERVICE_NAME, OutputMarker::SERVICE_NAME, ], arguments: None }, }; } #[derive(Debug)] struct ConnectRequest { service: ServiceName, component_url: ComponentUrl, channel: zx::Channel, } fn register_services<'a>(fs: &mut ServiceFs<ServiceObj<'a, ConnectRequest>>) { for (component_url, info) in SERVICES.iter() { for service in info.services.iter().copied() { fs.add_service_at(service, move |channel| { Some(ConnectRequest { service, component_url, channel }) }); } } } fn launch_components(launcher: &LauncherProxy) -> Result<HashMap<ComponentUrl, App>> { const TEST_DEV_MGR_URL: &str = "fuchsia-pkg://fuchsia.com/audio_test_devmgr#meta/audio_test_devmgr.cmx"; const TEST_DEV_MGR_NAME: &str = "fuchsia.media.AudioTestDevmgr"; let test_dev_mgr = client::AppBuilder::new(TEST_DEV_MGR_URL).spawn(launcher)?; let mut launched = SERVICES .iter() .map(|(url, launch_info)| { use zx::HandleBased; let test_dev_mgr_handle = { let (dev_enum, directory_request) = create_endpoints::<DirectoryMarker>()?; test_dev_mgr .pass_to_named_service(TEST_DEV_MGR_NAME, directory_request.into_channel())?; dev_enum.into_channel().into_handle() }; let builder = client::AppBuilder::new(*url) .add_handle_to_namespace("/dev".to_string(), test_dev_mgr_handle) .stdout(client::Stdio::Inherit) .stderr(client::Stdio::Inherit); let builder = if let Some(arguments) = launch_info.arguments.as_ref() { builder.args(arguments) } else { builder }; let app = builder.spawn(launcher)?; Ok((*url, app)) }) .collect::<Result<HashMap<ComponentUrl, App>>>()?; launched.insert(TEST_DEV_MGR_URL, test_dev_mgr); Ok(launched) } pub struct Environment { env: NestedEnvironment, } impl Environment {
pub fn connect_to_service<S: DiscoverableService>(&self) -> Result<S::Proxy> { self.env.connect_to_service::<S>() } }
pub fn new() -> Result<Self> { use fidl_fuchsia_logger::LogSinkMarker; let mut fs = ServiceFs::new(); register_services(&mut fs); fs.add_proxy_service::<LogSinkMarker, ConnectRequest>(); let env = fs.create_salted_nested_environment("environment")?; let launched_components = launch_components(env.launcher())?; fasync::spawn(fs.for_each_concurrent(None, move |request| { match launched_components.get(request.component_url) { Some(component) => { component.pass_to_named_service(request.service, request.channel).expect( &format!( "Component {} does not serve {}", request.component_url, request.service ), ); } None => panic!("Unknown component: {:?}", request.component_url), } future::ready(()) })); Ok(Self { env }) }
function_block-full_function
[]
Rust
hsp3-analyzer-mini/ham-core/src/analysis/preproc.rs
honobonosun/hsp3-ginger
d2788085d71c8d8fdf31e445a8e262c08e18fba8
use super::{a_scope::*, a_symbol::*}; use crate::{parse::*, source::DocId}; use std::{collections::HashMap, mem::replace}; #[derive(Default)] struct Ctx { doc: DocId, symbols: Vec<ASymbolData>, scope: ALocalScope, modules: HashMap<AModule, AModuleData>, module_len: usize, deffunc_len: usize, } impl Ctx { fn deffunc_scope(&self) -> AScope { AScope::Local(self.scope.clone()) } fn module_scope(&self) -> AScope { AScope::Local(ALocalScope { module_opt: self.scope.module_opt.clone(), deffunc_opt: None, }) } fn privacy_scope_or_local(&self, privacy_opt: &Option<(PPrivacy, PToken)>) -> AScope { match privacy_opt { Some((PPrivacy::Global, _)) => AScope::Global, _ => self.module_scope(), } } fn privacy_scope_or_global(&self, privacy_opt: &Option<(PPrivacy, PToken)>) -> AScope { match privacy_opt { Some((PPrivacy::Local, _)) => self.module_scope(), _ => AScope::Global, } } fn add_symbol(&mut self, kind: ASymbolKind, leader: &PToken, name: &PToken, scope: AScope) { add_symbol(kind, leader, name, scope, &mut self.symbols); } } fn add_symbol( kind: ASymbolKind, leader: &PToken, name: &PToken, scope: AScope, symbols: &mut Vec<ASymbolData>, ) { symbols.push(ASymbolData { kind, name: name.body.text.clone(), def_sites: vec![name.body.loc.clone()], use_sites: vec![], leader: leader.clone(), scope, }); } fn on_stmt(stmt: &PStmt, ctx: &mut Ctx) { match stmt { PStmt::Label(PLabel { star, name_opt }) => { if let Some(name) = name_opt { ctx.add_symbol(ASymbolKind::Label, star, name, ctx.module_scope()); } } PStmt::Assign(_) | PStmt::Command(_) | PStmt::Invoke(_) => {} PStmt::Const(PConstStmt { hash, privacy_opt, name_opt, .. }) => { if let Some(name) = name_opt { let scope = ctx.privacy_scope_or_local(privacy_opt); ctx.add_symbol(ASymbolKind::Const, hash, name, scope); } } PStmt::Define(PDefineStmt { hash, privacy_opt, ctype_opt, name_opt, .. }) => { if let Some(name) = name_opt { let scope = ctx.privacy_scope_or_local(privacy_opt); let ctype = ctype_opt.is_some(); ctx.add_symbol(ASymbolKind::Macro { ctype }, hash, name, scope); } } PStmt::Enum(PEnumStmt { hash, privacy_opt, name_opt, .. }) => { if let Some(name) = name_opt { let scope = ctx.privacy_scope_or_local(privacy_opt); ctx.add_symbol(ASymbolKind::Enum, hash, name, scope); } } PStmt::DefFunc(PDefFuncStmt { hash, keyword: _, kind, privacy_opt, name_opt, onexit_opt, params, stmts, behind: _, .. }) => { ctx.deffunc_len += 1; let deffunc = ADefFunc::new(ctx.deffunc_len); let kind = match *kind { PDefFuncKind::DefFunc => ASymbolKind::DefFunc, PDefFuncKind::DefCFunc => ASymbolKind::DefCFunc, PDefFuncKind::ModInit | PDefFuncKind::ModTerm | PDefFuncKind::ModFunc => { ASymbolKind::ModFunc } PDefFuncKind::ModCFunc => ASymbolKind::ModCFunc, }; if let Some(name) = name_opt { if onexit_opt.is_none() { let scope = ctx.privacy_scope_or_global(privacy_opt); ctx.add_symbol(kind, hash, name, scope); } } let parent_deffunc = replace(&mut ctx.scope.deffunc_opt, Some(deffunc)); for param in params { if let Some(name) = &param.name_opt { let param_ty = param.param_ty_opt.as_ref().map(|&(t, _)| t); ctx.add_symbol( ASymbolKind::Param(param_ty), hash, name, ctx.deffunc_scope(), ); } } for stmt in stmts { on_stmt(stmt, ctx); } ctx.scope.deffunc_opt = parent_deffunc; } PStmt::UseLib(_) => {} PStmt::LibFunc(PLibFuncStmt { hash, privacy_opt, name_opt, onexit_opt, .. }) => { if let Some(name) = name_opt { if onexit_opt.is_none() { let scope = ctx.privacy_scope_or_local(privacy_opt); ctx.add_symbol(ASymbolKind::LibFunc, hash, name, scope); } } } PStmt::UseCom(PUseComStmt { hash, privacy_opt, name_opt, .. }) => { if let Some(name) = name_opt { let scope = ctx.privacy_scope_or_local(privacy_opt); ctx.add_symbol(ASymbolKind::ComInterface, hash, name, scope); } } PStmt::ComFunc(PComFuncStmt { hash, privacy_opt, name_opt, .. }) => { if let Some(name) = name_opt { let scope = ctx.privacy_scope_or_global(privacy_opt); ctx.add_symbol(ASymbolKind::ComFunc, hash, name, scope); } } PStmt::RegCmd(_) => {} PStmt::Cmd(PCmdStmt { hash, privacy_opt, name_opt, .. }) => { if let Some(name) = name_opt { let scope = ctx.privacy_scope_or_local(privacy_opt); ctx.add_symbol(ASymbolKind::PluginCmd, hash, name, scope); } } PStmt::Module(PModuleStmt { hash, keyword, name_opt, fields, stmts, behind, .. }) => { let module = AModule::new(ctx.doc, &mut ctx.module_len, name_opt); ctx.modules.insert( module.clone(), AModuleData { keyword_loc: keyword.body.loc.clone(), content_loc: hash.body.loc.unite(&behind), }, ); let parent_scope = replace( &mut ctx.scope, ALocalScope { module_opt: Some(module), deffunc_opt: None, }, ); if let Some(name) = name_opt { ctx.add_symbol(ASymbolKind::Module, hash, name, AScope::Global); } for field in fields.iter().filter_map(|param| param.name_opt.as_ref()) { ctx.add_symbol(ASymbolKind::Field, field, field, ctx.module_scope()); } for stmt in stmts { on_stmt(stmt, ctx); } ctx.scope = parent_scope; } PStmt::Global(_) => {} PStmt::Include(_) => {} PStmt::UnknownPreProc(_) => {} } } pub(crate) struct PreprocAnalysisResult { pub(crate) symbols: Vec<ASymbolData>, pub(crate) modules: HashMap<AModule, AModuleData>, } pub(crate) fn analyze_preproc(doc: DocId, root: &PRoot) -> PreprocAnalysisResult { let mut ctx = Ctx::default(); ctx.doc = doc; for stmt in &root.stmts { on_stmt(stmt, &mut ctx); } let Ctx { symbols, modules, .. } = ctx; PreprocAnalysisResult { symbols, modules } }
use super::{a_scope::*, a_symbol::*}; use crate::{parse::*, source::DocId}; use std::{collections::HashMap, mem::replace}; #[derive(Default)] struct Ctx { doc: DocId, symbols: Vec<ASymbolData>, scope: ALocalScope, modules: HashMap<AModule, AModuleData>, module_len: usize, deffunc_len: usize, } impl Ctx { fn deffunc_scope(&self) -> AScope { AScope::Local(self.scope.clone()) } fn module_scope(&self) -> AScope { AScope::Local(ALocalScope { module_opt: self.scope.module_opt.clone(), deffunc_opt: None, }) } fn privacy_scope_or_local(&self, privacy_opt: &Option<(PPrivacy, PToken)>) -> AScope { match privacy_opt { Some((PPrivacy::Global, _)) => AScope::Global, _ => self.module_scope(), } } fn privacy_scope_or_global(&self, privacy_opt: &Option<(PPrivacy, PToken)>) -> AScope { match privacy_opt { Some((PPrivacy::Local, _)) => self.module_scope(), _ => AScope::Global, } } fn add_symbol(&mut self, kind: ASymbolKind, leader: &PToken, name: &PToken, scope: AScope) { add_symbol(kind, leader, name, scope, &mut self.symbols); } } fn add_symbol( kind: ASymbolKind, leader: &PToken, name: &PToken, scope: AScope, symbols: &mut Vec<ASymbolData>, ) { symbols.push(ASymbolData { kind, name: name.body.text.clone(), def_sites: vec![name.body.loc.clone()], use_sites: vec![], leader: leader.clone(), scope, }); } fn on_stmt(stmt: &PStmt, ctx: &mut Ctx) { match stmt { PStmt::Label(PLabel { star, name_opt }) => { if let Some(name) = name_opt { ctx.add_symbol(ASymbolKind::Label, star, name, ctx.module_scope()); } } PStmt::Assign(_) | PStmt::Command(_) | PStmt::Invoke(_) => {} PStmt::Const(PConstStmt { hash, privacy_opt, name_opt, .. }) => { if let Some(name) = name_opt { let scope = ctx.privacy_scope_or_local(privacy_opt); ctx.add_symbol(ASymbolKind::Const, hash, name, scope); } } PStmt::Define(PDefineStmt { hash, privacy_opt, ctype_opt, name_opt, .. }) => { if let Some(name) = name_opt { let scope = ctx.privacy_scope_or_local(privacy_opt); let ctype = ctype_opt.is_some(); ctx.add_symbol(ASymbolKind::Macro { ctype }, hash, name, scope); } } PStmt::Enum(PEnumStmt { hash, privacy_opt, name_opt, .. }) => { if let Some(name) = name_opt { let scope = ctx.privacy_scope_or_local(privacy_opt); ctx.add_symbol(ASymbolKind::Enum, hash, name, scope); } } PStmt::DefFunc(PDefFuncStmt { hash, keyword: _, kind, privacy_opt, name_opt, onexit_opt, params, stmts, behind: _, .. }) => { ctx.deffunc_len += 1; let deffunc = ADefFunc::new(ctx.deffunc_len); let kind = match *kind { PDefFuncKind::DefFunc => ASymbolKind::DefFunc, PDefFuncKind::DefCFunc => ASymbolKind::DefCFunc, PDefFuncKind::ModInit | PDefFuncKind::ModTerm | PDefFuncKind::ModFunc => { ASymbolKind::ModFunc } PDefFuncKind::ModCFunc => ASymbolKind::ModCFunc, }; if let Some(name) = name_opt { if onexit_opt.is_none() { let scope = ctx.privacy_scope_or_global(privacy_opt); ctx.add_symbol(kind, hash, name, scope); } } let parent_deffunc = replace(&mut ctx.scope.deffunc_opt, Some(deffunc)); for param in params { if let Some(name) = &param.name_opt { let param_ty = param.param_ty_opt.as_ref().map(|&(t, _)| t); ctx.add_symbol( ASymbolKind::Param(param_ty), hash, name, ctx.deffunc_scope(), ); } } for stmt in stmts { on_stmt(stmt, ctx); } ctx.scope.deffunc_opt = parent_deffunc; } PStmt::UseLib(_) => {} PStmt::LibFunc(PLibFuncStmt { hash, privacy_opt, name_opt, onexit_opt, .. }) => { if let Some(name) = name_opt { if onexit_opt.is_none() { let scope = ctx.privacy_scope_or_local(privacy_opt); ctx.add_symbol(ASymbolKind::LibFunc, hash, name, scope); } } } PStmt::UseCom(PUseComStmt { hash, privacy_opt, name_opt, .. }) => { if let Some(name) = name_opt { let scope = ctx.privacy_scope_or_local(privacy_opt); ctx.add_symbol(ASymbolKind::ComInterface, hash, name, scope); } } PStmt::ComFunc(PComFuncStmt { hash, privacy_opt, name_opt, .. }) => { if let Some(name) = name_opt { let scope = ctx.privacy_scope_or_global(privacy_opt); ctx.add_symbol(ASymbolKind::ComFunc, hash, name, scope); } } PStmt::RegCmd(_) => {} PStmt::Cmd(PCmdStmt { hash, privacy_opt, name_opt, .. }) => { if let Some(name) = name_opt { let scope = ctx.privacy_scope_or_local(privacy_opt); ctx.add_symbol(ASymbolKind::PluginCmd, hash, name, scope); } } PStmt::Module(PModuleStmt { hash, keyword, name_opt, fields, stmts, behind, .. }) => { let module = AModule::new(ctx.doc, &mut ctx.module_len, name_opt); ctx.modules.insert( module.clone(), AModuleData { keyword_loc: keyword.body.loc.clone(), content_loc: hash.body.loc.unite(&behind), }, ); let parent_scope = replace( &mut ctx.scope, ALocalScope { module_opt: Some(module), deffunc_opt: None, }, ); if let Some(name) = name_opt { ctx.add_symbol(ASymbolKind::Module, hash, name, AScope::Global); } for field in fields.iter().filter_map(|param| param.name_opt.as_ref()) { ctx.add_symbol(ASymbolKind::Field, field, field, ctx.module_scope()); } for stmt in stmts { on_stmt(stmt, ctx); } ctx.scope = parent_scope; } PStmt::Global(_) => {} PStmt::Include(_) => {} PStmt::UnknownPreProc(_) => {} } } pub(crate) struct PreprocAnalysisResult { pub(crate) symbols: Vec<ASymbolData>, pub(crate) modules: HashMap<AModule, AModuleData>, } pub(crate) fn analyze_preproc(doc: DocId, root: &PRoot) -> PreprocAnalysisResul
t { let mut ctx = Ctx::default(); ctx.doc = doc; for stmt in &root.stmts { on_stmt(stmt, &mut ctx); } let Ctx { symbols, modules, .. } = ctx; PreprocAnalysisResult { symbols, modules } }
function_block-function_prefixed
[ { "content": "fn add_symbol(kind: ASymbolKind, name: &PToken, def_site: bool, ctx: &mut Ctx) {\n\n // 新しいシンボルを登録する。\n\n let symbol = ASymbol::new(ctx.symbols.len());\n\n\n\n let mut symbol_data = ASymbolData {\n\n kind,\n\n name: name.body.text.clone(),\n\n def_sites: vec![],\n\n ...
Rust
src/history.rs
nuta/nsh
4e90833e8d205d5311fbb118076568b810557c84
use crate::fuzzy::FuzzyVec; use crate::theme::ThemeColor; use std::collections::HashMap; use std::fs::{File, OpenOptions}; use std::io::{BufRead, BufReader, Write}; use std::path::{Path, PathBuf}; pub struct History { path: PathBuf, history: FuzzyVec, path2cwd: HashMap<String, PathBuf>, } impl History { pub fn new(history_file: &Path) -> History { let mut warned = false; let mut path2cwd = HashMap::new(); let mut history = FuzzyVec::new(); if let Ok(file) = File::open(history_file) { for (i, line) in BufReader::new(file).lines().enumerate() { if let Ok(line) = line { let cwd = line.split('\t').nth(1); let cmd = line.split('\t').nth(2); match (cwd, cmd, warned) { (Some(cwd), Some(cmd), _) => { path2cwd.insert(cmd.to_string(), PathBuf::from(cwd)); history.append(cmd.to_string()); } (_, _, false) => { print_err!( "nsh: warning: failed to parse ~/.nsh_history: at line {}", i + 1 ); warned = true; } (_, _, _) => (), } } } } History { path: history_file.to_owned(), history, path2cwd, } } pub fn len(&self) -> usize { self.history.len() } pub fn nth_last(&self, nth: usize) -> Option<String> { self.history.nth_last(nth) } pub fn search(&self, query: &str, filter_by_cwd: bool) -> Vec<(Option<ThemeColor>, &str)> { if filter_by_cwd { let cwd = std::env::current_dir().unwrap(); self.history .search(query) .iter() .filter(|(_, cmd)| match self.path2cwd.get(*cmd) { Some(path) if *path == cwd => true, Some(path) => { info!("path='{}' {}", path.display(), cwd.display()); false } _ => false, }) .cloned() .collect() } else { self.history.search(query) } } pub fn append(&mut self, cmd: &str) { if cmd.is_empty() { return; } if cmd.len() < 8 { return; } if let Some(last) = self.history.nth_last(0) { if last.as_str() == cmd { return; } } let cwd = std::env::current_dir().unwrap(); if let Ok(mut file) = OpenOptions::new().append(true).open(&self.path) { let time = std::time::SystemTime::now() .duration_since(std::time::UNIX_EPOCH) .expect("failed to get the UNIX timestamp") .as_secs() as usize; let dir = cwd.to_str().unwrap().to_owned(); file.write(format!("{}\t{}\t{}\n", time, dir, cmd).as_bytes()) .ok(); } self.history.append(cmd.to_string()); self.path2cwd.insert(cmd.to_string(), cwd); } } pub struct HistorySelector { offset: usize, input: String, } impl HistorySelector { pub fn new() -> HistorySelector { HistorySelector { offset: 0, input: String::new(), } } pub fn reset(&mut self) { self.offset = 0; } pub fn current(&self, history: &History) -> String { if self.offset == 0 { self.input.clone() } else { history.nth_last(self.offset - 1).unwrap() } } pub fn prev(&mut self, history: &History, input: &str) { if self.offset == 0 { self.input = input.to_string(); } let hist_len = history.len(); self.offset += 1; if self.offset >= hist_len { self.offset = hist_len; } } pub fn next(&mut self) { if self.offset > 0 { self.offset -= 1; } } }
use crate::fuzzy::FuzzyVec; use crate::theme::ThemeColor; use std::collections::HashMap; use std::fs::{File, OpenOptions}; use std::io::{BufRead, BufReader, Write}; use std::path::{Path, PathBuf}; pub struct History { path: PathBuf, history: FuzzyVec, path2cwd: HashMap<String, PathBuf>, } impl History { pub fn new(history_file: &Path) -> History { let mut warned = false; let mut path2cwd = HashMap::new(); let mut history = FuzzyVec::new(); if let Ok(file) = File::open(history_file) { for (i, line) in BufReader::new(file).lines().enumerate() { if let Ok(line) = line { let cwd = line.split('\t').nth(1); let cmd = line.split('\t').nth(2); match (cwd, cmd, warned) { (Some(cwd), Some(cmd), _) => { path2cwd.insert(cmd.to_string(), PathBuf::from(cwd)); history.append(cmd.to_string()); } (_, _, false) => { print_err!( "nsh: warning: failed to parse ~/.nsh_history: at line {}", i + 1 ); warned = true; } (_, _, _) => (), } } } } History { path: history_file.to_owned(), history, path2cwd, } } pub fn len(&self) -> usize { self.history.len() } pub fn nth_last(&self, nth: usize) -> Option<String> { self.history.nth_last(nth) } pub fn search(&self, query: &str, filter_by_cwd: bool) -> Vec<(Option<ThemeColor>, &str)> { if filter_by_cwd { let cwd = std::env::current_dir().unwrap(); self.history .search(query) .iter() .filter(|(_, cmd)| match self.path2cwd.get(*cmd) { Some(path) if *path == cwd => true, Some(path) => { info!("path='{}' {}", path.display(), cwd.display()); false } _ => false, }) .cloned() .collect() } else { self.history.search(query) } } pub fn append(&mut self, cmd: &str) { if cmd.is_empty() { return; } if cmd.len() < 8 { return; } if let Some(last) = self.history.nth_last(0) { if last.as_str() == cmd { return; } } let cwd = std::env::current_dir().unwrap(); if let Ok(mut file) = OpenOptions::new().append(true).open(&self.path) { let time = std::time::SystemTime::now() .duration_since(std::time::UNIX_EPOCH) .expect("failed to get the UNIX timestamp") .as_secs() as usize; let dir = cwd.to_str().unwrap().to_owned(); file.write(format!("{}\t{}\t{}\n", time, dir, cmd).as_bytes()) .ok(); } self.history.append(cmd.to_string()); self.path2cwd.insert(cmd.to_string(), cwd); } } pub struct HistorySelector { offset: usize, input: String, } impl HistorySelector {
pub fn reset(&mut self) { self.offset = 0; } pub fn current(&self, history: &History) -> String { if self.offset == 0 { self.input.clone() } else { history.nth_last(self.offset - 1).unwrap() } } pub fn prev(&mut self, history: &History, input: &str) { if self.offset == 0 { self.input = input.to_string(); } let hist_len = history.len(); self.offset += 1; if self.offset >= hist_len { self.offset = hist_len; } } pub fn next(&mut self) { if self.offset > 0 { self.offset -= 1; } } }
pub fn new() -> HistorySelector { HistorySelector { offset: 0, input: String::new(), } }
function_block-full_function
[ { "content": "fn path_completion(pattern: &str, only_dirs: bool) -> FuzzyVec {\n\n let home_dir = dirs::home_dir().unwrap();\n\n let current_dir = std::env::current_dir().unwrap();\n\n let mut dir = if pattern.is_empty() {\n\n current_dir.clone()\n\n } else if let Some(pattern) = pattern.stri...
Rust
shell_automaton/src/peer/message/write/peer_message_write_effects.rs
simplestaking/tezos-rs
d859dff0a8db4f5adb4885e4885217d5284f7861
use std::net::SocketAddr; use tezos_encoding::binary_writer::BinaryWriterError; use tezos_messages::p2p::binary_message::BinaryWrite; use tezos_messages::p2p::encoding::peer::{PeerMessage, PeerMessageResponse}; use crate::peer::binary_message::write::{ PeerBinaryMessageWriteSetContentAction, PeerBinaryMessageWriteState, }; use crate::peer::message::write::{PeerMessageWriteErrorAction, PeerMessageWriteSuccessAction}; use crate::peers::graylist::{PeerGraylistReason, PeersGraylistAddressAction}; use crate::service::{Service, StatisticsService}; use crate::{Action, ActionId, ActionWithMeta, State, Store}; use super::{PeerMessageWriteError, PeerMessageWriteNextAction}; fn binary_message_write_init<S: Service>( store: &mut Store<S>, address: SocketAddr, message: &PeerMessageResponse, encoded_message: Result<Vec<u8>, BinaryWriterError>, ) -> bool { match encoded_message { Ok(bytes) => store.dispatch(PeerBinaryMessageWriteSetContentAction { address, message: bytes, }), Err(err) => store.dispatch(PeerMessageWriteErrorAction { address, error: PeerMessageWriteError::Encode(format!( "error: {:?}, message: {:?}", err, message )), }), } } fn stats_message_write_start( _: &State, stats_service: Option<&mut StatisticsService>, message: &PeerMessage, action_id: ActionId, ) { if let Some(stats) = stats_service { let time: u64 = action_id.into(); match message { PeerMessage::GetBlockHeaders(m) => m.get_block_headers().iter().for_each(|b| { stats.block_header_download_start(b, time); }), PeerMessage::GetOperationsForBlocks(m) => m .get_operations_for_blocks() .iter() .for_each(|b| stats.block_operations_download_start(b.block_hash(), time)), _ => {} } } } pub fn peer_message_write_effects<S>(store: &mut Store<S>, action: &ActionWithMeta) where S: Service, { match &action.action { Action::PeerMessageWriteNext(content) => { let peer = match store.state.get().peers.get(&content.address) { Some(peer) => match peer.status.as_handshaked() { Some(v) => v, None => return, }, None => return, }; if let PeerBinaryMessageWriteState::Init { .. } = &peer.message_write.current { if let Some(next_message) = peer.message_write.queue.front() { stats_message_write_start( store.state.get(), store.service.statistics(), next_message.message(), action.id, ); let message_encode_result = next_message.as_bytes(); let next_message = next_message.clone(); binary_message_write_init( store, content.address, &next_message, message_encode_result, ); } } } Action::PeerMessageWriteInit(content) => { let peer = match store.state.get().peers.get(&content.address) { Some(peer) => match peer.status.as_handshaked() { Some(v) => v, None => return, }, None => return, }; if let PeerBinaryMessageWriteState::Init { .. } = &peer.message_write.current { stats_message_write_start( store.state.get(), store.service.statistics(), content.message.message(), action.id, ); let message = content.message.clone(); binary_message_write_init( store, content.address, &message, content.message.as_bytes(), ); } } Action::PeerBinaryMessageWriteReady(content) => { let peer = match store.state().peers.get(&content.address) { Some(peer) => match peer.status.as_handshaked() { Some(handshaked) => handshaked, None => return, }, None => return, }; if let PeerBinaryMessageWriteState::Ready { .. } = &peer.message_write.current { store.dispatch(PeerMessageWriteSuccessAction { address: content.address, }); } } Action::PeerMessageWriteSuccess(content) => { store.dispatch(PeerMessageWriteNextAction { address: content.address, }); } Action::PeerMessageWriteError(content) => { store.dispatch(PeersGraylistAddressAction { address: content.address, reason: PeerGraylistReason::MessageWriteError(content.error.clone()), }); } _ => {} } }
use std::net::SocketAddr; use tezos_encoding::binary_writer::BinaryWriterError; use tezos_messages::p2p::binary_message::BinaryWrite; use tezos_messages::p2p::encoding::peer::{PeerMessage, PeerMessageResponse}; use crate::peer::binary_message::write::{ PeerBinaryMessageWriteSetContentAction, PeerBinaryMessageWriteState, }; use crate::peer::message::write::{PeerMessageWriteErrorAction, PeerMessageWriteSuccessAction}; use crate::peers::graylist::{PeerGraylistReason, PeersGraylistAddressAction}; use crate::service::{Service, StatisticsService}; use crate::{Action, ActionId, ActionWithMeta, State, Store}; use super::{PeerMessageWriteError, PeerMessageWriteNextAction}; fn binary_message_write_init<S: Service>( store: &mut Store<S>, address: SocketAddr, message: &PeerMessageResponse, encoded_message: Result<Vec<u8>, BinaryWriterError>, ) -> bool { match encoded_message { Ok(bytes) => store.dispatch(PeerBinaryMessageWriteSetContentAction { address, message: bytes, }), Err(err) => store.dispatch(PeerMessageWriteErrorAction { address, error: PeerMessageWriteError::Encode(format!( "error: {:?}, message: {:?}", err, message )), }), } } fn stats_message_write_start( _: &State, stats_service: Option<&mut StatisticsService>, message: &PeerMessage, action_id: ActionId, ) { if let Some(stats) = stats_service { let time: u64 = action_id.into(); match message { PeerMessage::GetBlockHeaders(m) => m.get_block_headers().iter().for_each(|b| { stats.block_header_download_start(b, time); }), PeerMessage::GetOperationsForBlocks(m) => m .get_operations_for_blocks() .iter() .for_each(|b| stats.block_operations_download_start(b.block_hash(), time)), _ => {} } } } pub fn peer_message_write_effects<S>(store: &mut Store<S>, action: &ActionWithMeta) where S: Service, { match &action.action { Action::PeerMessageWriteNext(content) => { let peer = match store.state.get().peers.get(&content.address) { Some(peer) => match peer.status.as_handshaked() { Some(v) => v, None => return, }, None => return, }; if let PeerBinaryMessageWriteState::Init { .. } = &peer.message_write.current { if let Some(next_message) = peer.message_write.queue.front() { stats_message_write_start( store.state.get(), store.service.statistics(), next_message.message(), action.id, ); let message_encode_result = next_message.as_bytes(); let next_message = next_message.clone();
; } } } Action::PeerMessageWriteInit(content) => { let peer = match store.state.get().peers.get(&content.address) { Some(peer) => match peer.status.as_handshaked() { Some(v) => v, None => return, }, None => return, }; if let PeerBinaryMessageWriteState::Init { .. } = &peer.message_write.current { stats_message_write_start( store.state.get(), store.service.statistics(), content.message.message(), action.id, ); let message = content.message.clone(); binary_message_write_init( store, content.address, &message, content.message.as_bytes(), ); } } Action::PeerBinaryMessageWriteReady(content) => { let peer = match store.state().peers.get(&content.address) { Some(peer) => match peer.status.as_handshaked() { Some(handshaked) => handshaked, None => return, }, None => return, }; if let PeerBinaryMessageWriteState::Ready { .. } = &peer.message_write.current { store.dispatch(PeerMessageWriteSuccessAction { address: content.address, }); } } Action::PeerMessageWriteSuccess(content) => { store.dispatch(PeerMessageWriteNextAction { address: content.address, }); } Action::PeerMessageWriteError(content) => { store.dispatch(PeersGraylistAddressAction { address: content.address, reason: PeerGraylistReason::MessageWriteError(content.error.clone()), }); } _ => {} } }
binary_message_write_init( store, content.address, &next_message, message_encode_result, )
call_expression
[ { "content": "pub fn peer_message_read_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::PeerMessageReadInit(content) => {\n\n let peer = match state\n\n .peers\n\n .list\n\n .get_mut(&content.address)\n...
Rust
src/uu/seq/src/extendedbigdecimal.rs
353fc443/coreutils
ec386fa460e4fe4dfb7e6a0ec0ddcfabe0c41985
use std::cmp::Ordering; use std::fmt::Display; use std::ops::Add; use bigdecimal::BigDecimal; use num_bigint::BigInt; use num_bigint::ToBigInt; use num_traits::One; use num_traits::Zero; use crate::extendedbigint::ExtendedBigInt; #[derive(Debug, Clone)] pub enum ExtendedBigDecimal { BigDecimal(BigDecimal), Infinity, MinusInfinity, MinusZero, Nan, } fn ceil(x: BigDecimal) -> BigInt { if x.is_integer() { x.to_bigint().unwrap() } else { (x + BigDecimal::one().half()).round(0).to_bigint().unwrap() } } fn floor(x: BigDecimal) -> BigInt { if x.is_integer() { x.to_bigint().unwrap() } else { (x - BigDecimal::one().half()).round(0).to_bigint().unwrap() } } impl ExtendedBigDecimal { pub fn ceil(self) -> ExtendedBigInt { match self { ExtendedBigDecimal::BigDecimal(x) => ExtendedBigInt::BigInt(ceil(x)), other => From::from(other), } } pub fn floor(self) -> ExtendedBigInt { match self { ExtendedBigDecimal::BigDecimal(x) => ExtendedBigInt::BigInt(floor(x)), other => From::from(other), } } } impl From<ExtendedBigInt> for ExtendedBigDecimal { fn from(big_int: ExtendedBigInt) -> Self { match big_int { ExtendedBigInt::BigInt(n) => Self::BigDecimal(BigDecimal::from(n)), ExtendedBigInt::Infinity => ExtendedBigDecimal::Infinity, ExtendedBigInt::MinusInfinity => ExtendedBigDecimal::MinusInfinity, ExtendedBigInt::MinusZero => ExtendedBigDecimal::MinusZero, ExtendedBigInt::Nan => ExtendedBigDecimal::Nan, } } } impl Display for ExtendedBigDecimal { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { ExtendedBigDecimal::BigDecimal(x) => { let (n, p) = x.as_bigint_and_exponent(); match p { 0 => ExtendedBigDecimal::BigDecimal(BigDecimal::new(n * 10, 1)).fmt(f), _ => x.fmt(f), } } ExtendedBigDecimal::Infinity => f32::INFINITY.fmt(f), ExtendedBigDecimal::MinusInfinity => f32::NEG_INFINITY.fmt(f), ExtendedBigDecimal::MinusZero => { (0.0f32).fmt(f) } ExtendedBigDecimal::Nan => "nan".fmt(f), } } } impl Zero for ExtendedBigDecimal { fn zero() -> Self { ExtendedBigDecimal::BigDecimal(BigDecimal::zero()) } fn is_zero(&self) -> bool { match self { Self::BigDecimal(n) => n.is_zero(), Self::MinusZero => true, _ => false, } } } impl Add for ExtendedBigDecimal { type Output = Self; fn add(self, other: Self) -> Self { match (self, other) { (Self::BigDecimal(m), Self::BigDecimal(n)) => Self::BigDecimal(m.add(n)), (Self::BigDecimal(_), Self::MinusInfinity) => Self::MinusInfinity, (Self::BigDecimal(_), Self::Infinity) => Self::Infinity, (Self::BigDecimal(_), Self::Nan) => Self::Nan, (Self::BigDecimal(m), Self::MinusZero) => Self::BigDecimal(m), (Self::Infinity, Self::BigDecimal(_)) => Self::Infinity, (Self::Infinity, Self::Infinity) => Self::Infinity, (Self::Infinity, Self::MinusZero) => Self::Infinity, (Self::Infinity, Self::MinusInfinity) => Self::Nan, (Self::Infinity, Self::Nan) => Self::Nan, (Self::MinusInfinity, Self::BigDecimal(_)) => Self::MinusInfinity, (Self::MinusInfinity, Self::MinusInfinity) => Self::MinusInfinity, (Self::MinusInfinity, Self::MinusZero) => Self::MinusInfinity, (Self::MinusInfinity, Self::Infinity) => Self::Nan, (Self::MinusInfinity, Self::Nan) => Self::Nan, (Self::Nan, _) => Self::Nan, (Self::MinusZero, other) => other, } } } impl PartialEq for ExtendedBigDecimal { fn eq(&self, other: &Self) -> bool { match (self, other) { (Self::BigDecimal(m), Self::BigDecimal(n)) => m.eq(n), (Self::BigDecimal(_), Self::MinusInfinity) => false, (Self::BigDecimal(_), Self::Infinity) => false, (Self::BigDecimal(_), Self::Nan) => false, (Self::BigDecimal(_), Self::MinusZero) => false, (Self::Infinity, Self::BigDecimal(_)) => false, (Self::Infinity, Self::Infinity) => true, (Self::Infinity, Self::MinusZero) => false, (Self::Infinity, Self::MinusInfinity) => false, (Self::Infinity, Self::Nan) => false, (Self::MinusInfinity, Self::BigDecimal(_)) => false, (Self::MinusInfinity, Self::Infinity) => false, (Self::MinusInfinity, Self::MinusZero) => false, (Self::MinusInfinity, Self::MinusInfinity) => true, (Self::MinusInfinity, Self::Nan) => false, (Self::Nan, _) => false, (Self::MinusZero, Self::BigDecimal(_)) => false, (Self::MinusZero, Self::Infinity) => false, (Self::MinusZero, Self::MinusZero) => true, (Self::MinusZero, Self::MinusInfinity) => false, (Self::MinusZero, Self::Nan) => false, } } } impl PartialOrd for ExtendedBigDecimal { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { match (self, other) { (Self::BigDecimal(m), Self::BigDecimal(n)) => m.partial_cmp(n), (Self::BigDecimal(_), Self::MinusInfinity) => Some(Ordering::Greater), (Self::BigDecimal(_), Self::Infinity) => Some(Ordering::Less), (Self::BigDecimal(_), Self::Nan) => None, (Self::BigDecimal(m), Self::MinusZero) => m.partial_cmp(&BigDecimal::zero()), (Self::Infinity, Self::BigDecimal(_)) => Some(Ordering::Greater), (Self::Infinity, Self::Infinity) => Some(Ordering::Equal), (Self::Infinity, Self::MinusZero) => Some(Ordering::Greater), (Self::Infinity, Self::MinusInfinity) => Some(Ordering::Greater), (Self::Infinity, Self::Nan) => None, (Self::MinusInfinity, Self::BigDecimal(_)) => Some(Ordering::Less), (Self::MinusInfinity, Self::Infinity) => Some(Ordering::Less), (Self::MinusInfinity, Self::MinusZero) => Some(Ordering::Less), (Self::MinusInfinity, Self::MinusInfinity) => Some(Ordering::Equal), (Self::MinusInfinity, Self::Nan) => None, (Self::Nan, _) => None, (Self::MinusZero, Self::BigDecimal(n)) => BigDecimal::zero().partial_cmp(n), (Self::MinusZero, Self::Infinity) => Some(Ordering::Less), (Self::MinusZero, Self::MinusZero) => Some(Ordering::Equal), (Self::MinusZero, Self::MinusInfinity) => Some(Ordering::Greater), (Self::MinusZero, Self::Nan) => None, } } } #[cfg(test)] mod tests { use bigdecimal::BigDecimal; use num_traits::Zero; use crate::extendedbigdecimal::ExtendedBigDecimal; #[test] fn test_addition_infinity() { let summand1 = ExtendedBigDecimal::BigDecimal(BigDecimal::zero()); let summand2 = ExtendedBigDecimal::Infinity; assert_eq!(summand1 + summand2, ExtendedBigDecimal::Infinity); } #[test] fn test_addition_minus_infinity() { let summand1 = ExtendedBigDecimal::BigDecimal(BigDecimal::zero()); let summand2 = ExtendedBigDecimal::MinusInfinity; assert_eq!(summand1 + summand2, ExtendedBigDecimal::MinusInfinity); } #[test] fn test_addition_nan() { let summand1 = ExtendedBigDecimal::BigDecimal(BigDecimal::zero()); let summand2 = ExtendedBigDecimal::Nan; let sum = summand1 + summand2; match sum { ExtendedBigDecimal::Nan => (), _ => unreachable!(), } } #[test] fn test_display() { assert_eq!( format!("{}", ExtendedBigDecimal::BigDecimal(BigDecimal::zero())), "0.0" ); assert_eq!(format!("{}", ExtendedBigDecimal::Infinity), "inf"); assert_eq!(format!("{}", ExtendedBigDecimal::MinusInfinity), "-inf"); assert_eq!(format!("{}", ExtendedBigDecimal::Nan), "nan"); } }
use std::cmp::Ordering; use std::fmt::Display; use std::ops::Add; use bigdecimal::BigDecimal; use num_bigint::BigInt; use num_bigint::ToBigInt; use num_traits::One; use num_traits::Zero; use crate::extendedbigint::ExtendedBigInt; #[derive(Debug, Clone)] pub enum ExtendedBigDecimal { BigDecimal(BigDecimal), Infinity, MinusInfinity, MinusZero, Nan, } fn ceil(x: BigDecimal) -> BigInt { if x.is_integer() { x.to_bigint().unwrap() } else { (x + BigDecimal::one().half()).round(0).to_bigint().unwrap() } } fn floor(x: BigDecimal) -> BigInt { if x.is_integer() { x.to_bigint().unwrap() } else { (x - BigDecimal::one().half()).round(0).to_bigint().unwrap() } } impl ExtendedBigDecimal { pub fn ceil(self) -> ExtendedBigInt { match self { ExtendedBigDecimal::BigDecimal(x) => ExtendedBigInt::BigInt(ceil(x)), other => From::from(other), } } pub fn floor(self) -> ExtendedBigInt { match self { ExtendedBigDecimal::BigDecimal(x) => ExtendedBigInt::BigInt(floor(x)), other => From::from(other), } } } impl From<ExtendedBigInt> for ExtendedBigDecimal { fn from(big_int: ExtendedBigInt) -> Self { match big_int { ExtendedBigInt::BigInt(n) => Self::BigDecimal(BigDecimal::from(n)), ExtendedBigInt::Infinity => ExtendedBigDecimal::Infinity, ExtendedBigInt::MinusInfinity => ExtendedBigDecimal::MinusInfinity, ExtendedBigInt::MinusZero => ExtendedBigDecimal::MinusZero, ExtendedBigInt::Nan => ExtendedBigDecimal::Nan, } } } impl Display for ExtendedBigDecimal { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { ExtendedBigDecimal::BigDecimal(x) => { let (n, p) = x.as_bigint_and_exponent(); match p { 0 => ExtendedBigDecimal::BigDecimal(BigDecimal::new(n * 10, 1)).fmt(f), _ => x.fmt(f), } } ExtendedBigDecimal::Infinity => f32::INFINITY.fmt(f), ExtendedBigDecimal::MinusInfinity => f32::NEG_INFINITY.fmt(f), ExtendedBigDecimal::MinusZero => { (0.0f32).fmt(f) } ExtendedBigDecimal::Nan => "nan".fmt(f), } } } impl Zero for ExtendedBigDecimal { fn zero() -> Self { ExtendedBigDecimal::BigDecimal(BigDecimal::zero()) }
} impl Add for ExtendedBigDecimal { type Output = Self; fn add(self, other: Self) -> Self { match (self, other) { (Self::BigDecimal(m), Self::BigDecimal(n)) => Self::BigDecimal(m.add(n)), (Self::BigDecimal(_), Self::MinusInfinity) => Self::MinusInfinity, (Self::BigDecimal(_), Self::Infinity) => Self::Infinity, (Self::BigDecimal(_), Self::Nan) => Self::Nan, (Self::BigDecimal(m), Self::MinusZero) => Self::BigDecimal(m), (Self::Infinity, Self::BigDecimal(_)) => Self::Infinity, (Self::Infinity, Self::Infinity) => Self::Infinity, (Self::Infinity, Self::MinusZero) => Self::Infinity, (Self::Infinity, Self::MinusInfinity) => Self::Nan, (Self::Infinity, Self::Nan) => Self::Nan, (Self::MinusInfinity, Self::BigDecimal(_)) => Self::MinusInfinity, (Self::MinusInfinity, Self::MinusInfinity) => Self::MinusInfinity, (Self::MinusInfinity, Self::MinusZero) => Self::MinusInfinity, (Self::MinusInfinity, Self::Infinity) => Self::Nan, (Self::MinusInfinity, Self::Nan) => Self::Nan, (Self::Nan, _) => Self::Nan, (Self::MinusZero, other) => other, } } } impl PartialEq for ExtendedBigDecimal { fn eq(&self, other: &Self) -> bool { match (self, other) { (Self::BigDecimal(m), Self::BigDecimal(n)) => m.eq(n), (Self::BigDecimal(_), Self::MinusInfinity) => false, (Self::BigDecimal(_), Self::Infinity) => false, (Self::BigDecimal(_), Self::Nan) => false, (Self::BigDecimal(_), Self::MinusZero) => false, (Self::Infinity, Self::BigDecimal(_)) => false, (Self::Infinity, Self::Infinity) => true, (Self::Infinity, Self::MinusZero) => false, (Self::Infinity, Self::MinusInfinity) => false, (Self::Infinity, Self::Nan) => false, (Self::MinusInfinity, Self::BigDecimal(_)) => false, (Self::MinusInfinity, Self::Infinity) => false, (Self::MinusInfinity, Self::MinusZero) => false, (Self::MinusInfinity, Self::MinusInfinity) => true, (Self::MinusInfinity, Self::Nan) => false, (Self::Nan, _) => false, (Self::MinusZero, Self::BigDecimal(_)) => false, (Self::MinusZero, Self::Infinity) => false, (Self::MinusZero, Self::MinusZero) => true, (Self::MinusZero, Self::MinusInfinity) => false, (Self::MinusZero, Self::Nan) => false, } } } impl PartialOrd for ExtendedBigDecimal { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { match (self, other) { (Self::BigDecimal(m), Self::BigDecimal(n)) => m.partial_cmp(n), (Self::BigDecimal(_), Self::MinusInfinity) => Some(Ordering::Greater), (Self::BigDecimal(_), Self::Infinity) => Some(Ordering::Less), (Self::BigDecimal(_), Self::Nan) => None, (Self::BigDecimal(m), Self::MinusZero) => m.partial_cmp(&BigDecimal::zero()), (Self::Infinity, Self::BigDecimal(_)) => Some(Ordering::Greater), (Self::Infinity, Self::Infinity) => Some(Ordering::Equal), (Self::Infinity, Self::MinusZero) => Some(Ordering::Greater), (Self::Infinity, Self::MinusInfinity) => Some(Ordering::Greater), (Self::Infinity, Self::Nan) => None, (Self::MinusInfinity, Self::BigDecimal(_)) => Some(Ordering::Less), (Self::MinusInfinity, Self::Infinity) => Some(Ordering::Less), (Self::MinusInfinity, Self::MinusZero) => Some(Ordering::Less), (Self::MinusInfinity, Self::MinusInfinity) => Some(Ordering::Equal), (Self::MinusInfinity, Self::Nan) => None, (Self::Nan, _) => None, (Self::MinusZero, Self::BigDecimal(n)) => BigDecimal::zero().partial_cmp(n), (Self::MinusZero, Self::Infinity) => Some(Ordering::Less), (Self::MinusZero, Self::MinusZero) => Some(Ordering::Equal), (Self::MinusZero, Self::MinusInfinity) => Some(Ordering::Greater), (Self::MinusZero, Self::Nan) => None, } } } #[cfg(test)] mod tests { use bigdecimal::BigDecimal; use num_traits::Zero; use crate::extendedbigdecimal::ExtendedBigDecimal; #[test] fn test_addition_infinity() { let summand1 = ExtendedBigDecimal::BigDecimal(BigDecimal::zero()); let summand2 = ExtendedBigDecimal::Infinity; assert_eq!(summand1 + summand2, ExtendedBigDecimal::Infinity); } #[test] fn test_addition_minus_infinity() { let summand1 = ExtendedBigDecimal::BigDecimal(BigDecimal::zero()); let summand2 = ExtendedBigDecimal::MinusInfinity; assert_eq!(summand1 + summand2, ExtendedBigDecimal::MinusInfinity); } #[test] fn test_addition_nan() { let summand1 = ExtendedBigDecimal::BigDecimal(BigDecimal::zero()); let summand2 = ExtendedBigDecimal::Nan; let sum = summand1 + summand2; match sum { ExtendedBigDecimal::Nan => (), _ => unreachable!(), } } #[test] fn test_display() { assert_eq!( format!("{}", ExtendedBigDecimal::BigDecimal(BigDecimal::zero())), "0.0" ); assert_eq!(format!("{}", ExtendedBigDecimal::Infinity), "inf"); assert_eq!(format!("{}", ExtendedBigDecimal::MinusInfinity), "-inf"); assert_eq!(format!("{}", ExtendedBigDecimal::Nan), "nan"); } }
fn is_zero(&self) -> bool { match self { Self::BigDecimal(n) => n.is_zero(), Self::MinusZero => true, _ => false, } }
function_block-full_function
[ { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(mut args: impl uucore::Args) -> UResult<()> {\n\n let program = args.next().unwrap_or_else(|| OsString::from(\"test\"));\n\n let binary_name = uucore::util_name();\n\n let mut args: Vec<_> = args.collect();\n\n\n\n if binary_name.ends_with('...
Rust
gears_core/src/gear/mod.rs
XBagon/gears
5c42708dece4f0a288b2415a8efa23d3a35a5964
use crate::gear::special::GearSpecial; use crate::gear::{ command::{GearCommand, GearGenericCommand}, compound::GearCompound, internal::GearInternal, special::{io::Input, io::Output, literal::Literal}, }; use crate::ty::*; use crate::util::LiftSlotMap; use enum_dispatch::enum_dispatch; use slotmap::{new_key_type, SlotMap}; use thiserror::Error; pub mod command; pub mod compound; pub mod internal; pub mod special; new_key_type! { pub struct GearId; } impl Geared for TemplateGearId { fn evaluate(&self, register: &GearRegister, input: Vec<TypedValue>) -> Result<Vec<TypedValue>> { register.template_gears[*self].evaluate(register, input) } } new_key_type! { pub struct TemplateGearId; } type GearMap = LiftSlotMap<GearId, Gear>; type TemplateGearMap = SlotMap<TemplateGearId, Gear>; pub struct GearRegister { pub gears: GearMap, pub template_gears: TemplateGearMap, pub internal: internal::Gears, pub special: special::Gears, pub command: command::Gears, } impl GearRegister { pub fn init() -> Self { let mut template_gears = SlotMap::with_key(); Self { internal: internal::Gears::init(&mut template_gears), special: special::Gears::init(&mut template_gears), command: command::Gears::init(&mut template_gears), gears: LiftSlotMap::with_key(), template_gears, } } pub fn register(&mut self, gear: Gear) -> GearId { self.gears.insert(gear) } pub fn register_template(&mut self, gear: Gear) -> TemplateGearId { self.template_gears.insert(gear) } pub fn duplicate(&mut self, gear_id: GearId) -> GearId { let clone = self.gears[gear_id].clone(); self.gears.insert(clone) } pub fn instantiate(&mut self, template_gear_id: TemplateGearId) -> GearId { self.register(self.gear_from_template(template_gear_id)) } pub fn instantiator(&mut self, template_gear_id: TemplateGearId) -> GearBuilder { GearBuilder { gear: self.gear_from_template(template_gear_id), register: self, } } pub fn builder(&mut self, implementation: GearImplementation) -> GearBuilder { let gear = Gear { name: String::new(), inputs: vec![], outputs: vec![], implementation, }; GearBuilder { gear, register: self, } } fn gear_from_template(&self, template_gear_id: TemplateGearId) -> Gear { let template = &self.template_gears[template_gear_id]; Gear { name: template.name.clone(), inputs: template.inputs.clone(), outputs: template.outputs.clone(), implementation: GearImplementation::Template(template_gear_id), } } pub fn get_mut_implementation(&mut self, gear_id: GearId) -> Option<&mut GearImplementation> { let mut gear = &mut self.gears[gear_id]; if let GearImplementation::Template(template_gear_id) = gear.implementation { let template_gear = &self.template_gears[template_gear_id]; gear.implementation = template_gear.implementation.clone(); } Some(&mut gear.implementation) } pub fn get_template_gear_id(&self, gear_id: GearId) -> Option<TemplateGearId> { if let GearImplementation::Template(template_gear_id) = self.gears[gear_id].implementation { Some(template_gear_id) } else { None } } pub fn evaluate(&self, gear_id: GearId, input: Vec<TypedValue>) -> Result<Vec<TypedValue>> { self.gears[gear_id].evaluate(self, input) } } #[must_use] pub struct GearBuilder<'a> { register: &'a mut GearRegister, pub gear: Gear, } impl<'a> GearBuilder<'a> { pub fn instantiate(self) -> GearId { self.register.register(self.gear) } pub fn templatize(self) -> TemplateGearId { self.register.register_template(self.gear) } pub fn name(mut self, name: String) -> Self { self.gear.name = name; self } pub fn input(mut self, io_info: IOInformation) -> Self { self.gear.inputs.push(io_info); self } pub fn output(mut self, io_info: IOInformation) -> Self { self.gear.outputs.push(io_info); self } } impl Default for GearRegister { fn default() -> Self { Self::init() } } #[derive(Clone)] pub struct Gear { pub name: String, pub inputs: Vec<IOInformation>, pub outputs: Vec<IOInformation>, pub implementation: GearImplementation, } impl Geared for Gear { fn evaluate(&self, register: &GearRegister, input: Vec<TypedValue>) -> Result<Vec<TypedValue>> { self.implementation.evaluate(register, input) } } #[derive(Clone)] pub struct IOInformation { pub name: String, pub ty: TypeDiscriminant, } impl IOInformation { pub fn new(name: String, ty: TypeDiscriminant) -> Self { IOInformation { name, ty } } } #[enum_dispatch] #[derive(Clone)] pub enum GearImplementation { GearInternal, GearCompound, GearSpecial, GearCommand, GearGenericCommand, Template(TemplateGearId), } #[derive(Error, Debug)] pub enum Error { #[error("Error occurred in evaluation")] GearInternalError(#[from] Box<dyn std::error::Error>), #[error("IOError occured")] IOError(#[from] std::io::Error), #[error("IOError occured")] FromUTF8Error(#[from] std::string::FromUtf8Error), #[error("This `GearSpecial` isn't evaluable")] NonEvaluable, #[error("Terminated by signal: {0}")] TerminatedBySignal(i32), } pub type Result<T> = std::result::Result<T, Error>; #[enum_dispatch(GearImplementation, GearSpecial)] pub trait Geared { fn evaluate(&self, register: &GearRegister, input: Vec<TypedValue>) -> Result<Vec<TypedValue>>; }
use crate::gear::special::GearSpecial; use crate::gear::{ command::{GearCommand, GearGenericCommand}, compound::GearCompound, internal::GearInternal, special::{io::Input, io::Output, literal::Literal}, }; use crate::ty::*; use crate::util::LiftSlotMap; use enum_dispatch::enum_dispatch; use slotmap::{new_key_type, SlotMap}; use thiserror::Error; pub mod command; pub mod compound; pub mod internal; pub mod special; new_key_type! { pub struct GearId; } impl Geared for TemplateGearId { fn evaluate(&self, register: &GearRegister, input: Vec<TypedValue>) -> Result<Vec<TypedValue>> { register.template_gears[*self].evaluate(register, input) } } new_key_type! { pub struct TemplateGearId; } type GearMap = LiftSlotMap<GearId, Gear>; type TemplateGearMap = SlotMap<TemplateGearId, Gear>; pub struct GearRegister { pub gears: GearMap, pub template_gears: TemplateGearMap, pub internal: internal::Gears, pub special: special::Gears, pub command: command::Gears, } impl GearRegister {
pub fn register(&mut self, gear: Gear) -> GearId { self.gears.insert(gear) } pub fn register_template(&mut self, gear: Gear) -> TemplateGearId { self.template_gears.insert(gear) } pub fn duplicate(&mut self, gear_id: GearId) -> GearId { let clone = self.gears[gear_id].clone(); self.gears.insert(clone) } pub fn instantiate(&mut self, template_gear_id: TemplateGearId) -> GearId { self.register(self.gear_from_template(template_gear_id)) } pub fn instantiator(&mut self, template_gear_id: TemplateGearId) -> GearBuilder { GearBuilder { gear: self.gear_from_template(template_gear_id), register: self, } } pub fn builder(&mut self, implementation: GearImplementation) -> GearBuilder { let gear = Gear { name: String::new(), inputs: vec![], outputs: vec![], implementation, }; GearBuilder { gear, register: self, } } fn gear_from_template(&self, template_gear_id: TemplateGearId) -> Gear { let template = &self.template_gears[template_gear_id]; Gear { name: template.name.clone(), inputs: template.inputs.clone(), outputs: template.outputs.clone(), implementation: GearImplementation::Template(template_gear_id), } } pub fn get_mut_implementation(&mut self, gear_id: GearId) -> Option<&mut GearImplementation> { let mut gear = &mut self.gears[gear_id]; if let GearImplementation::Template(template_gear_id) = gear.implementation { let template_gear = &self.template_gears[template_gear_id]; gear.implementation = template_gear.implementation.clone(); } Some(&mut gear.implementation) } pub fn get_template_gear_id(&self, gear_id: GearId) -> Option<TemplateGearId> { if let GearImplementation::Template(template_gear_id) = self.gears[gear_id].implementation { Some(template_gear_id) } else { None } } pub fn evaluate(&self, gear_id: GearId, input: Vec<TypedValue>) -> Result<Vec<TypedValue>> { self.gears[gear_id].evaluate(self, input) } } #[must_use] pub struct GearBuilder<'a> { register: &'a mut GearRegister, pub gear: Gear, } impl<'a> GearBuilder<'a> { pub fn instantiate(self) -> GearId { self.register.register(self.gear) } pub fn templatize(self) -> TemplateGearId { self.register.register_template(self.gear) } pub fn name(mut self, name: String) -> Self { self.gear.name = name; self } pub fn input(mut self, io_info: IOInformation) -> Self { self.gear.inputs.push(io_info); self } pub fn output(mut self, io_info: IOInformation) -> Self { self.gear.outputs.push(io_info); self } } impl Default for GearRegister { fn default() -> Self { Self::init() } } #[derive(Clone)] pub struct Gear { pub name: String, pub inputs: Vec<IOInformation>, pub outputs: Vec<IOInformation>, pub implementation: GearImplementation, } impl Geared for Gear { fn evaluate(&self, register: &GearRegister, input: Vec<TypedValue>) -> Result<Vec<TypedValue>> { self.implementation.evaluate(register, input) } } #[derive(Clone)] pub struct IOInformation { pub name: String, pub ty: TypeDiscriminant, } impl IOInformation { pub fn new(name: String, ty: TypeDiscriminant) -> Self { IOInformation { name, ty } } } #[enum_dispatch] #[derive(Clone)] pub enum GearImplementation { GearInternal, GearCompound, GearSpecial, GearCommand, GearGenericCommand, Template(TemplateGearId), } #[derive(Error, Debug)] pub enum Error { #[error("Error occurred in evaluation")] GearInternalError(#[from] Box<dyn std::error::Error>), #[error("IOError occured")] IOError(#[from] std::io::Error), #[error("IOError occured")] FromUTF8Error(#[from] std::string::FromUtf8Error), #[error("This `GearSpecial` isn't evaluable")] NonEvaluable, #[error("Terminated by signal: {0}")] TerminatedBySignal(i32), } pub type Result<T> = std::result::Result<T, Error>; #[enum_dispatch(GearImplementation, GearSpecial)] pub trait Geared { fn evaluate(&self, register: &GearRegister, input: Vec<TypedValue>) -> Result<Vec<TypedValue>>; }
pub fn init() -> Self { let mut template_gears = SlotMap::with_key(); Self { internal: internal::Gears::init(&mut template_gears), special: special::Gears::init(&mut template_gears), command: command::Gears::init(&mut template_gears), gears: LiftSlotMap::with_key(), template_gears, } }
function_block-function_prefix_line
[ { "content": "use super::*;\n\n\n\npub mod io;\n\npub mod literal;\n\n\n\npub struct Gears {\n\n pub io: io::Gears,\n\n}\n\n\n\nimpl Gears {\n\n pub fn init(template_gears: &mut TemplateGearMap) -> Self {\n\n Self {\n\n io: io::Gears::init(template_gears),\n\n }\n\n }\n\n}\n\n\...
Rust
atomics/src/atomic_primitive/atomic_f32.rs
BrianMcDonaldWS/rpc-perf
e36466ce611d151757cf4e8dcfebb1f7f32263d7
use crate::{AtomicPrimitive, Ordering}; #[cfg(feature = "serde")] use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer}; pub struct AtomicF32 { pub(crate) inner: core::sync::atomic::AtomicU32, } impl AtomicPrimitive for AtomicF32 { type Primitive = f32; fn new(value: Self::Primitive) -> Self { let value = unsafe { std::mem::transmute(value) }; Self { inner: core::sync::atomic::AtomicU32::new(value), } } fn get_mut(&mut self) -> &mut Self::Primitive { unsafe { &mut *(self.inner.get_mut() as *mut u32 as *mut f32) } } fn into_inner(self) -> Self::Primitive { f32::from_bits(self.inner.into_inner()) } fn load(&self, order: Ordering) -> Self::Primitive { f32::from_bits(self.inner.load(order)) } fn store(&self, value: Self::Primitive, order: Ordering) { self.inner.store(value.to_bits(), order) } fn swap(&self, value: Self::Primitive, order: Ordering) -> Self::Primitive { f32::from_bits(self.inner.swap(value.to_bits(), order)) } fn compare_and_swap( &self, current: Self::Primitive, new: Self::Primitive, order: Ordering, ) -> Self::Primitive { f32::from_bits( self.inner .compare_and_swap(current.to_bits(), new.to_bits(), order), ) } fn compare_exchange( &self, current: Self::Primitive, new: Self::Primitive, success: Ordering, failure: Ordering, ) -> Result<Self::Primitive, Self::Primitive> { self.inner .compare_exchange(current.to_bits(), new.to_bits(), success, failure) .map(f32::from_bits) .map_err(f32::from_bits) } fn compare_exchange_weak( &self, current: Self::Primitive, new: Self::Primitive, success: Ordering, failure: Ordering, ) -> Result<Self::Primitive, Self::Primitive> { self.inner .compare_exchange_weak(current.to_bits(), new.to_bits(), success, failure) .map(f32::from_bits) .map_err(f32::from_bits) } } impl Default for AtomicF32 { fn default() -> Self { Self::new(Default::default()) } } impl PartialEq for AtomicF32 { fn eq(&self, other: &Self) -> bool { self.load(Ordering::SeqCst) == other.load(Ordering::SeqCst) } } impl Eq for AtomicF32 {} impl std::fmt::Debug for AtomicF32 { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{:?}", self.load(Ordering::Relaxed)) } } #[cfg(feature = "serde")] struct AtomicF32Visitor; #[cfg(feature = "serde")] impl<'de> Visitor<'de> for AtomicF32Visitor { type Value = AtomicF32; fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { formatter.write_str("a 32bit floating point number") } fn visit_i8<E>(self, value: i8) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(Self::Value::new(f32::from(value))) } fn visit_i16<E>(self, value: i16) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(Self::Value::new(f32::from(value))) } fn visit_i32<E>(self, value: i32) -> Result<Self::Value, E> where E: serde::de::Error, { use std::convert::TryFrom; if let Ok(value) = i16::try_from(value).map(f32::from) { Ok(Self::Value::new(value)) } else { Err(E::custom(format!("f32 is out of range: {}", value))) } } fn visit_i64<E>(self, value: i64) -> Result<Self::Value, E> where E: serde::de::Error, { use std::convert::TryFrom; if let Ok(value) = i16::try_from(value).map(f32::from) { Ok(Self::Value::new(value)) } else { Err(E::custom(format!("f32 is out of range: {}", value))) } } fn visit_u8<E>(self, value: u8) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(Self::Value::new(f32::from(value))) } fn visit_u16<E>(self, value: u16) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(Self::Value::new(f32::from(value))) } fn visit_u32<E>(self, value: u32) -> Result<Self::Value, E> where E: serde::de::Error, { use std::convert::TryFrom; if let Ok(value) = u16::try_from(value).map(f32::from) { Ok(Self::Value::new(value)) } else { Err(E::custom(format!("f32 is out of range: {}", value))) } } fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E> where E: serde::de::Error, { use std::convert::TryFrom; if let Ok(value) = u16::try_from(value).map(f32::from) { Ok(Self::Value::new(value)) } else { Err(E::custom(format!("f32 is out of range: {}", value))) } } fn visit_f32<E>(self, value: f32) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(Self::Value::new(value)) } fn visit_f64<E>(self, value: f64) -> Result<Self::Value, E> where E: serde::de::Error, { if value >= f64::from(std::f32::MIN) && value <= f64::from(std::f32::MAX) { Ok(Self::Value::new(value as f32)) } else { Err(E::custom(format!("f32 is out of range: {}", value))) } } } #[cfg(feature = "serde")] impl<'de> Deserialize<'de> for AtomicF32 { fn deserialize<D>(deserializer: D) -> Result<AtomicF32, D::Error> where D: Deserializer<'de>, { deserializer.deserialize_any(AtomicF32Visitor) } } #[cfg(feature = "serde")] impl Serialize for AtomicF32 { #[inline] fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { serializer.serialize_some(&self.load(Ordering::SeqCst)) } } #[cfg(test)] mod tests { use super::*; #[test] fn load() { let atomic = AtomicF32::new(0.0); assert_eq!(atomic.load(Ordering::SeqCst), 0.0); } #[test] fn store() { let atomic = AtomicF32::new(0.0); atomic.store(3.14, Ordering::SeqCst); assert_eq!(atomic.into_inner(), 3.14); } #[test] fn swap() { let atomic = AtomicF32::new(0.0); assert_eq!(atomic.swap(3.14, Ordering::SeqCst), 0.0); } #[test] fn compare_and_swap() { let atomic = AtomicF32::new(0.0); assert_eq!(atomic.compare_and_swap(0.0, 3.14, Ordering::SeqCst), 0.0); assert_eq!(atomic.compare_and_swap(0.0, 42.0, Ordering::SeqCst), 3.14); } #[test] fn compare_exchange() { let atomic = AtomicF32::new(0.0); assert_eq!( atomic.compare_exchange(0.0, 3.14, Ordering::SeqCst, Ordering::SeqCst), Ok(0.0) ); assert_eq!( atomic.compare_exchange(0.0, 42.0, Ordering::SeqCst, Ordering::SeqCst), Err(3.14) ); } #[test] fn compare_exchange_weak() { let atomic = AtomicF32::new(0.0); loop { if atomic .compare_exchange(0.0, 3.14, Ordering::SeqCst, Ordering::SeqCst) .is_ok() { break; } } assert_eq!(atomic.into_inner(), 3.14); } }
use crate::{AtomicPrimitive, Ordering}; #[cfg(feature = "serde")] use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer}; pub struct AtomicF32 { pub(crate) inner: core::sync::atomic::AtomicU32, } impl AtomicPrimitive for AtomicF32 { type Primitive = f32; fn new(value: Self::Primitive) -> Self { let value = unsafe { std::mem::transmute(value) }; Self { inner: core::sync::atomic::AtomicU32::new(value), } } fn get_mut(&mut self) -> &mut Self::Primitive { unsafe { &mut *(self.inner.get_mut() as *mut u32 as *mut f32) } } fn into_inner(self) -> Self::Primitive { f32::from_bits(self.inner.into_inner()) } fn load(&self, order: Ordering) -> Self::Primitive { f32::from_bits(self.inner.load(order)) } fn store(&self, value: Self::Primitive, order: Ordering) { self.inner.store(value.to_bits(), order) } fn swap(&self, value: Self::Primitive, order: Ordering) -> Self::Primitive { f32::from_bits(self.inner.swap(value.to_bits(), order)) } fn compare_and_swap( &self, current: Self::Primitive, new: Self::Primitive, order: Ordering, ) -> Self::Primitive { f32::from_bits( self.inner .compare_and_swap(current.to_bits(), new.to_bits(), order), ) } fn compare_exchange( &self, current: Self::Primitive, new: Self::Primitive, success: Ordering, failure: Ordering, ) -> Result<Self::Primitive, Self::Primitive> { self.inner .compare_exchange(current.to_bits(), new.to_bits(), success, failure) .map(f32::from_bits) .map_err(f32::from_bits) } fn compare_exchange_weak( &self, current: Self::Primitive, new: Self::Primitive, success: Ordering, failure: Ordering, ) -> Result<Self::Primitive, Self::Primitive> { self.inner .compare_exchange_weak(current.to_bits(), new.to_bits(), success, failure) .map(f32::from_bits) .map_err(f32::from_bits) } } impl Default for AtomicF32 { fn default() -> Self { Self::new(Default::default()) } } impl PartialEq for AtomicF32 { fn eq(&self, other: &Self) -> bool { self.load(Ordering::SeqCst) == other.load(Ordering::SeqCst) } } impl Eq for AtomicF32 {} impl std::fmt::Debug for AtomicF32 { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{:?}", self.load(Ordering::Relaxed)) } } #[cfg(feature = "serde")] struct AtomicF32Visitor; #[cfg(feature = "serde")] impl<'de> Visitor<'de> for AtomicF32Visitor { type Value = AtomicF32; fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { formatter.write_str("a 32bit floating point number") } fn visit_i8<E>(self, value: i8) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(Self::Value::new(f32::from(value))) } fn visit_i16<E>(self, value: i16) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(Self::Value::new(f32::from(value))) } fn visit_i32<E>(self, value: i32) -> Result<Self::Value, E> where E: serde::de::Error, { use std::convert::TryFrom; if let Ok(value) = i16::try_from(value).map(f32::from) { Ok(Self::Value::new(value)) } else { Err(E::custom(format!("f32 is out of range: {}", value))) } } fn visit_i64<E>(self, value: i64) -> Result<Self::Value, E> where E: serde::de::Error, { use std::convert::TryFrom;
} fn visit_u8<E>(self, value: u8) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(Self::Value::new(f32::from(value))) } fn visit_u16<E>(self, value: u16) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(Self::Value::new(f32::from(value))) } fn visit_u32<E>(self, value: u32) -> Result<Self::Value, E> where E: serde::de::Error, { use std::convert::TryFrom; if let Ok(value) = u16::try_from(value).map(f32::from) { Ok(Self::Value::new(value)) } else { Err(E::custom(format!("f32 is out of range: {}", value))) } } fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E> where E: serde::de::Error, { use std::convert::TryFrom; if let Ok(value) = u16::try_from(value).map(f32::from) { Ok(Self::Value::new(value)) } else { Err(E::custom(format!("f32 is out of range: {}", value))) } } fn visit_f32<E>(self, value: f32) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(Self::Value::new(value)) } fn visit_f64<E>(self, value: f64) -> Result<Self::Value, E> where E: serde::de::Error, { if value >= f64::from(std::f32::MIN) && value <= f64::from(std::f32::MAX) { Ok(Self::Value::new(value as f32)) } else { Err(E::custom(format!("f32 is out of range: {}", value))) } } } #[cfg(feature = "serde")] impl<'de> Deserialize<'de> for AtomicF32 { fn deserialize<D>(deserializer: D) -> Result<AtomicF32, D::Error> where D: Deserializer<'de>, { deserializer.deserialize_any(AtomicF32Visitor) } } #[cfg(feature = "serde")] impl Serialize for AtomicF32 { #[inline] fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { serializer.serialize_some(&self.load(Ordering::SeqCst)) } } #[cfg(test)] mod tests { use super::*; #[test] fn load() { let atomic = AtomicF32::new(0.0); assert_eq!(atomic.load(Ordering::SeqCst), 0.0); } #[test] fn store() { let atomic = AtomicF32::new(0.0); atomic.store(3.14, Ordering::SeqCst); assert_eq!(atomic.into_inner(), 3.14); } #[test] fn swap() { let atomic = AtomicF32::new(0.0); assert_eq!(atomic.swap(3.14, Ordering::SeqCst), 0.0); } #[test] fn compare_and_swap() { let atomic = AtomicF32::new(0.0); assert_eq!(atomic.compare_and_swap(0.0, 3.14, Ordering::SeqCst), 0.0); assert_eq!(atomic.compare_and_swap(0.0, 42.0, Ordering::SeqCst), 3.14); } #[test] fn compare_exchange() { let atomic = AtomicF32::new(0.0); assert_eq!( atomic.compare_exchange(0.0, 3.14, Ordering::SeqCst, Ordering::SeqCst), Ok(0.0) ); assert_eq!( atomic.compare_exchange(0.0, 42.0, Ordering::SeqCst, Ordering::SeqCst), Err(3.14) ); } #[test] fn compare_exchange_weak() { let atomic = AtomicF32::new(0.0); loop { if atomic .compare_exchange(0.0, 3.14, Ordering::SeqCst, Ordering::SeqCst) .is_ok() { break; } } assert_eq!(atomic.into_inner(), 3.14); } }
if let Ok(value) = i16::try_from(value).map(f32::from) { Ok(Self::Value::new(value)) } else { Err(E::custom(format!("f32 is out of range: {}", value))) }
if_condition
[ { "content": "/// Counter primitives are capable of saturating addition and subtraction\n\npub trait CounterPrimitive: PartialEq + Copy + Default {\n\n /// Perform saturating addition on this `CounterPrimitive`\n\n fn saturating_add(self, rhs: Self) -> Self;\n\n /// Perform saturating subtraction on th...
Rust
rita/src/rita_exit/database/sms.rs
darkdrgn2k/althea_rs
a331a1b73339f770c1d2baa1c1e75e825493ed8f
use crate::rita_exit::database::database_tools::text_sent; use crate::rita_exit::database::database_tools::verify_client; use crate::rita_exit::database::get_exit_info; use crate::rita_exit::database::struct_tools::texts_sent; use althea_types::{ExitClientDetails, ExitClientIdentity, ExitState}; use diesel; use diesel::prelude::*; use failure::Error; use phonenumber::PhoneNumber; use reqwest; use settings::exit::PhoneVerifSettings; use std::time::Duration; #[derive(Serialize)] pub struct SmsCheck { api_key: String, verification_code: String, phone_number: String, country_code: String, } fn check_text(number: String, code: String, api_key: String) -> Result<bool, Error> { trace!("About to check text message status for {}", number); let number: PhoneNumber = number.parse()?; let client = reqwest::Client::builder() .timeout(Duration::from_secs(1)) .build()?; let res = client .get("https://api.authy.com/protected/json/phones/verification/check") .form(&SmsCheck { api_key, verification_code: code, phone_number: number.national().to_string(), country_code: number.code().value().to_string(), }) .send()?; Ok(res.status().is_success()) } #[derive(Serialize)] pub struct SmsRequest { api_key: String, via: String, phone_number: String, country_code: String, } fn send_text(number: String, api_key: String) -> Result<(), Error> { info!("Sending message for {}", number); let number: PhoneNumber = number.parse()?; let client = reqwest::Client::builder() .timeout(Duration::from_secs(1)) .build()?; let res = client .post("https://api.authy.com/protected/json/phones/verification/start") .form(&SmsRequest { api_key, via: "sms".to_string(), phone_number: number.national().to_string(), country_code: number.code().value().to_string(), }) .send()?; if res.status().is_success() { Ok(()) } else { bail!("SMS API failure! Maybe bad number?") } } pub fn handle_sms_registration( client: &ExitClientIdentity, their_record: &exit_db::models::Client, api_key: String, conn: &PgConnection, ) -> Result<ExitState, Error> { trace!("Handling phone registration for {:?}", client); let text_num = texts_sent(their_record); let sent_more_than_allowed_texts = text_num > 10; match ( client.reg_details.phone.clone(), client.reg_details.phone_code.clone(), sent_more_than_allowed_texts, ) { (Some(number), Some(code), true) => { if check_text(number, code, api_key)? { verify_client(&client, true, conn)?; Ok(ExitState::Registered { our_details: ExitClientDetails { client_internal_ip: their_record.internal_ip.parse()?, }, general_details: get_exit_info(), message: "Registration OK".to_string(), }) } else { Ok(ExitState::Pending { general_details: get_exit_info(), message: "awaiting phone verification".to_string(), email_code: None, phone_code: None, }) } } (Some(_number), None, true) => Ok(ExitState::Pending { general_details: get_exit_info(), message: "awaiting phone verification".to_string(), email_code: None, phone_code: None, }), (Some(number), None, false) => { send_text(number, api_key)?; text_sent(&client, &conn, text_num)?; Ok(ExitState::Pending { general_details: get_exit_info(), message: "awaiting phone verification".to_string(), email_code: None, phone_code: None, }) } (Some(number), Some(code), false) => { if check_text(number, code, api_key)? { verify_client(&client, true, conn)?; Ok(ExitState::Registered { our_details: ExitClientDetails { client_internal_ip: their_record.internal_ip.parse()?, }, general_details: get_exit_info(), message: "Registration OK".to_string(), }) } else { Ok(ExitState::Pending { general_details: get_exit_info(), message: "awaiting phone verification".to_string(), email_code: None, phone_code: None, }) } } (None, _, _) => Ok(ExitState::Denied { message: "This exit requires a phone number to register!".to_string(), }), } } #[derive(Serialize)] pub struct SmsNotification { #[serde(rename = "To")] to: String, #[serde(rename = "From")] from: String, #[serde(rename = "Body")] body: String, } pub fn send_low_balance_sms(number: &str, phone: PhoneVerifSettings) -> Result<(), Error> { info!("Sending low balance message for {}", number); let url = format!( "https://api.twilio.com/2010-04-01/Accounts/{}/Messages.json", phone.twillio_account_id ); let number: PhoneNumber = number.parse()?; let client = reqwest::Client::builder() .timeout(Duration::from_secs(1)) .build()?; let res = client .post(&url) .basic_auth(phone.twillio_account_id, Some(phone.twillio_auth_token)) .form(&SmsNotification { to: number.to_string(), from: phone.notification_number, body: phone.balance_notification_body, }) .send()?; if res.status().is_success() { Ok(()) } else { bail!("SMS API failure! Maybe bad number?") } }
use crate::rita_exit::database::database_tools::text_sent; use crate::rita_exit::database::database_tools::verify_client; use crate::rita_exit::database::get_exit_info; use crate::rita_exit::database::struct_tools::texts_sent; use althea_types::{ExitClientDetails, ExitClientIdentity, ExitState}; use diesel; use diesel::prelude::*; use failure::Error; use phonenumber::PhoneNumber; use reqwest; use settings::exit::PhoneVerifSettings; use std::time::Duration; #[derive(Serialize)] pub struct SmsCheck { api_key: String, verification_code: String, phone_number: String, country_code: String, } fn check_text(number: String, code: String, api_key: String) -> Result<bool, Error> { trace!("About to check text message status for {}", number); let number: PhoneNumber = number.parse()?; let client = reqwest::Client::builder() .timeout(Duration::from_secs(1)) .build()?; let res = client .get("https://api.authy.com/protected/json/phones/verification/check") .form(&SmsCheck { api_key, verification_code: code, phone_number: number.national().to_string(), country_code: number.code().value().to_string(), }) .send()?; Ok(res.status().is_success()) } #[derive(Serialize)] pub struct SmsRequest { api_key: String, via: String, phone_number: String, country_code: String, } fn send_text(number: String, api_key: String) -> Result<(), Error> { info!("Sending message for {}", number); let number: PhoneNumber = number.parse()?; let client = reqwest::Client::builder() .timeout(Duration::from_secs(1)) .build()?; let res = client .post("https://api.authy.com/protected/json/phones/verification/start") .form(&SmsRequest { api_key, via: "sms".to_string(), phone_number: number.national().to_string(), country_code: number.code().value().to_string(), }) .send()?; if res.status().is_success() { Ok(()) } else { bail!("SMS API failure! Maybe bad number?") } } pub fn handle_sms_registration( client: &ExitClientIdentity, their_record: &exit_db::models::Client, api_key: String, conn: &PgConnection, ) -> Result<ExitState, Error> { trace!("Handling phone registration for {:?}", client); let text_num = texts_sent(their_record); let sent_more_than_allowed_texts = text_num > 10; match ( client.reg_details.phone.clone(), client.reg_details.phone_code.clone(), sent_more_than_allowed_texts, ) { (Some(number), Some(code), true) => { if check_text(number, code, api_key)? { verify_client(&client, true, conn)?; Ok(ExitState::Registered { our_details: ExitClientDetails { client_internal_ip: their_record.internal_ip.parse()?, }, general_details: get_exit_info(), message: "Registration OK".to_string(), }) } else { Ok(ExitState::Pending { general_details: get_exit_info(), message: "awaiting phone verification".to_string(), email_code: None, phone_code: None, }) } } (Some(_number), None, true) => Ok(ExitState::Pending { general_details: get_exit_info(), message: "awaiting phone verification".to_string(), email_code: None, phone_code: None, }), (Some(number), None, false) => { send_text(number, api_key)?; text_sent(&client, &conn, text_num)?; Ok(ExitState::Pending { general_details: get_exit_info(), message: "awaiting phone verification".to_string(), email_code: None, phone_code: None, }) } (Some(number), Some(code), false) => { if check_text(number, code, api_key)? { verify_client(&client, true, conn)?; Ok(ExitState::Registered { our_details: ExitClientDetails { client_internal_ip: their_record.internal_ip.parse()?, }, general_details: get_exit_info(), message: "Registration OK".to_string(), }) } else { Ok(ExitState::Pending { general_details: get_exit_info(), message: "awaiting phone verification".to_string(), email_code: None, phone_code: None, }) } } (None, _, _) => Ok(ExitState::Denied { message: "This exit requires a phone number to register!".to_string(), }), } } #[derive(Serialize)] pub struct SmsNotification { #[serde(rename = "To")] to: String, #[serde(rename = "From")] from: String, #[serde(rename = "Body")] body: String, } pub fn send_low_balance_sms(number: &str, phone: PhoneVerifSettings) -> Result<(), Error> { info!("Sending low balance message for {}", number); let url = format!( "https://api.twilio.com/2010-04-01/Accounts/{}/Messages.json", phone.twillio_account_id ); let number: PhoneNumber = number.parse()?; let client = reqwest::Client::builder() .timeout(Duration::from_secs(1)) .
build()?; let res = client .post(&url) .basic_auth(phone.twillio_account_id, Some(phone.twillio_auth_token)) .form(&SmsNotification { to: number.to_string(), from: phone.notification_number, body: phone.balance_notification_body, }) .send()?; if res.status().is_success() { Ok(()) } else { bail!("SMS API failure! Maybe bad number?") } }
function_block-function_prefix_line
[]
Rust
src/stm32f407.rs
SweGecko/vectrex-cart
8f24cfab5c53c418487d872878d98afc0fdbe5ec
/* * Custom wrappers for some CPU registers */ use volatile_register::{RO, RW, WO}; #[repr(C)] pub struct Syscfg { _fsmc: u32, _pmc: u32, pub exticr1: RW<u16>, _reserved: u16, } const SYSCFG_ADDR: u32 = 0x4001_3800; impl Syscfg { pub fn syscfg() -> &'static mut Syscfg { unsafe { &mut *(SYSCFG_ADDR as *mut Syscfg) } } } #[repr(C)] pub struct Exti { pub imr: RW<u32>, _emr: u32, pub rtsr: RW<u32>, pub ftsr: RW<u32>, _swier: u32, pub pr: RW<u32>, } const EXTI_ADDR: u32 = 0x4001_3C00; impl Exti { pub fn exti() -> &'static mut Exti { unsafe { &mut *(EXTI_ADDR as *mut Exti) } } } #[repr(C)] pub struct Gpio { pub moder0: RW<u16>, pub moder1: RW<u16>, pub otyper0: RW<u8>, pub otyper1: RW<u8>, _reserved1: u16, pub ospeedr0: RW<u16>, pub ospeedr1: RW<u16>, pub pupdr0: RW<u16>, pub pupdr1: RW<u16>, pub idr: RO<u16>, _reserved2: u16, pub odr0: WO<u8>, pub odr1: WO<u8>, _reserved3: u16, pub bsrr: WO<u32>, _lckr: u32, _afrl: u32, _afrh: u32, } impl Gpio { pub fn gpioa() -> &'static mut Gpio { unsafe { &mut *(0x4002_0000 as *mut Gpio) } } pub fn gpiob() -> &'static mut Gpio { unsafe { &mut *(0x4002_0400 as *mut Gpio) } } pub fn gpiod() -> &'static mut Gpio { unsafe { &mut *(0x4002_0C00 as *mut Gpio) } } pub fn gpioe() -> &'static mut Gpio { unsafe { &mut *(0x4002_1000 as *mut Gpio) } } } /* macro_rules! rpt { ( 0; $x:block ) => {}; ( 1; $x:block ) => { $x; }; ( 2; $x:block ) => { $x; $x; }; ( 3; $x:block ) => { rpt!(2; $x); $x; }; ( 4; $x:block) => { rpt!(2; $x); rpt!(2; $x); }; ( 5; $x:block ) => { rpt!(4; $x); $x; }; ( 6 ; $x:block) => { rpt!(4; $x); rpt!(2; $x); }; ( 7 ; $x:block) => { rpt!(6; $x); $x; }; ( 8 ; $x:block) => { rpt!(6; $x); rpt!(2; $x); }; ( 9; $x:block ) => { rpt!(8; $x); $x; }; ( 10; $x:block ) => { rpt!(8; $x); rpt!(2; $x); }; // Repeat $block n*m times (0 <= n <= 10, 0 <= m <= 10) ( $n:tt; $m:tt; $block:block ) => { rpt!($n; { rpt!($m; $block); }); }; } macro_rules! rpt_nop { // Nop n times (0 <= n <= 10) ( $n:tt ) => { rpt!($n; { asm::nop() }) }; // Nop n*m times (0 <= n <= 10, 0 <= m <= 10) ( $n:tt; $m:tt ) => { rpt!($n; $m; { asm::nop() }) }; } */
/* * Custom wrappers for some CPU registers */ use volatile_register::{RO, RW, WO}; #[repr(C)] pub struct Syscfg { _fsmc: u32, _pmc: u32, pub exticr1: RW<u16>, _reserved: u16, } const SYSCFG_ADDR: u32 = 0x4001_3800; impl Syscfg { pub fn syscfg() -> &'static mut Syscfg { unsafe { &mut *(SYSCFG_ADDR as *mut Syscfg) } } } #[repr(C)] pub struct Exti { pub imr: RW<u32>, _emr: u32, pub rtsr: RW<u32>, pub ftsr: RW<u32>, _swier: u32, pub pr: RW<u32>, } const EXTI_ADDR: u32 = 0x4001_3C00; impl Exti { pub fn exti() -> &'static mut Exti { unsafe { &mut *(EXTI_ADDR as *mut Exti) } } } #[repr(C)] pub struct Gpio { pub moder0: RW<u16>, pub moder1: RW<u16>, pub otyper0: RW<u8>, pub otyper1: RW<u8>, _reserved1: u16, pub ospeedr0: RW<u16>, pub ospeedr1: RW<u16>, pub pupdr0: RW<u16>, pub pupdr1: RW<u16>, pub idr: RO<u16>, _reserved2: u16, pub odr0: WO<u8>, pub odr1: WO<u8>, _reserved3: u16, pub bsrr: WO<u32>, _lckr: u32, _afrl: u32, _afrh: u32, } impl Gpio { pub fn gpioa() -> &'static mut Gpio { unsafe { &mut *(0x4002_0000 as *mut Gpio) } } pub fn gpiob() -> &'static mut Gpio { unsafe { &mut *(0x4002_0400 as *mut Gpio) } } pub fn gpiod() -> &'static mut Gpio { unsafe { &mut *(0x4002_0C00 as *mut Gpio) } } pub fn gpioe() -> &'static mut Gpio { unsafe { &mut *(0x4002_1000 as *mut Gpio) } } } /* macro_rules! rpt { ( 0; $x:block )
<= 10, 0 <= m <= 10) ( $n:tt; $m:tt ) => { rpt!($n; $m; { asm::nop() }) }; } */
=> {}; ( 1; $x:block ) => { $x; }; ( 2; $x:block ) => { $x; $x; }; ( 3; $x:block ) => { rpt!(2; $x); $x; }; ( 4; $x:block) => { rpt!(2; $x); rpt!(2; $x); }; ( 5; $x:block ) => { rpt!(4; $x); $x; }; ( 6 ; $x:block) => { rpt!(4; $x); rpt!(2; $x); }; ( 7 ; $x:block) => { rpt!(6; $x); $x; }; ( 8 ; $x:block) => { rpt!(6; $x); rpt!(2; $x); }; ( 9; $x:block ) => { rpt!(8; $x); $x; }; ( 10; $x:block ) => { rpt!(8; $x); rpt!(2; $x); }; // Repeat $block n*m times (0 <= n <= 10, 0 <= m <= 10) ( $n:tt; $m:tt; $block:block ) => { rpt!($n; { rpt!($m; $block); }); }; } macro_rules! rpt_nop { // Nop n times (0 <= n <= 10) ( $n:tt ) => { rpt!($n; { asm::nop() }) }; // Nop n*m times (0 <= n
random
[ { "content": "#[derive(Debug, PartialEq, PartialOrd, Eq, Ord)]\n\nstruct CartHdr {\n\n name: Vec<u8>,\n\n year: u32,\n\n path: PathBuf,\n\n}\n\n\n", "file_path": "build.rs", "rank": 0, "score": 29200.684055384212 }, { "content": "fn main() {\n\n // Put the linker script somewhere...
Rust
src/api/context/propagation/composite_propagator.rs
bnjjj/opentelemetry-rust
8a828a81b3c9750ce4ccebe47d104e69929e8ee6
use crate::api::{self, HttpTextFormat}; use std::fmt::Debug; #[derive(Debug)] pub struct HttpTextCompositePropagator { propagators: Vec<Box<dyn HttpTextFormat + Send + Sync>>, } impl HttpTextCompositePropagator { pub fn new(propagators: Vec<Box<dyn HttpTextFormat + Send + Sync>>) -> Self { HttpTextCompositePropagator { propagators } } } impl HttpTextFormat for HttpTextCompositePropagator { fn inject_context(&self, context: &api::Context, carrier: &mut dyn api::Carrier) { for propagator in &self.propagators { propagator.inject_context(context, carrier) } } fn extract_with_context(&self, cx: &api::Context, carrier: &dyn api::Carrier) -> api::Context { self.propagators .iter() .fold(cx.clone(), |current_cx, propagator| { propagator.extract_with_context(&current_cx, carrier) }) } } #[cfg(test)] mod tests { use super::*; use crate::api::trace::b3_propagator::B3Encoding; use crate::api::TraceContextExt; use crate::api::{B3Propagator, Context, SpanContext, SpanId, TraceContextPropagator, TraceId}; use std::collections::HashMap; fn test_data() -> Vec<(&'static str, &'static str)> { vec![ ( "b3", "00000000000000000000000000000001-0000000000000001-0", ), ( "traceparent", "00-00000000000000000000000000000001-0000000000000001-00", ), ] } #[derive(Debug)] struct TestSpan(api::SpanContext); impl api::Span for TestSpan { fn add_event_with_timestamp( &self, _name: String, _timestamp: std::time::SystemTime, _attributes: Vec<api::KeyValue>, ) { } fn span_context(&self) -> api::SpanContext { self.0.clone() } fn is_recording(&self) -> bool { false } fn set_attribute(&self, _attribute: api::KeyValue) {} fn set_status(&self, _code: api::StatusCode, _message: String) {} fn update_name(&self, _new_name: String) {} fn end(&self) {} } #[test] fn inject_multiple_propagators() { let b3 = B3Propagator::with_encoding(B3Encoding::SingleHeader); let trace_context = TraceContextPropagator::new(); let composite_propagator = HttpTextCompositePropagator { propagators: vec![Box::new(b3), Box::new(trace_context)], }; let cx = Context::default().with_span(TestSpan(SpanContext::new( TraceId::from_u128(1), SpanId::from_u64(1), 0, false, ))); let mut carrier = HashMap::new(); composite_propagator.inject_context(&cx, &mut carrier); for (header_name, header_value) in test_data() { assert_eq!(carrier.get(header_name), Some(&header_value.to_string())); } } #[test] fn extract_multiple_propagators() { let b3 = B3Propagator::with_encoding(B3Encoding::SingleHeader); let trace_context = TraceContextPropagator::new(); let composite_propagator = HttpTextCompositePropagator { propagators: vec![Box::new(b3), Box::new(trace_context)], }; for (header_name, header_value) in test_data() { let mut carrier = HashMap::new(); carrier.insert(header_name.to_string(), header_value.to_string()); assert_eq!( composite_propagator.extract(&carrier).remote_span_context(), Some(&SpanContext::new( TraceId::from_u128(1), SpanId::from_u64(1), 0, true, )) ); } } }
use crate::api::{self, HttpTextFormat}; use std::fmt::Debug; #[derive(Debug)] pub struct HttpTextCompositePropagator { propagators: Vec<Box<dyn HttpTextFormat + Send + Sync>>, } impl HttpTextCompositePropagator { pub fn new(propagators: Vec<Box<dyn HttpTextFormat + Send + Sync>>) -> Self { HttpTextCompositePropagator { propagators } } } impl HttpTextFormat for HttpTextCompositePropagator { fn inject_context(&self, context: &api::Context, carrier: &mut dyn api::Carrier) { for propagator in &self.propagators { propagator.inject_context(context, carrier) } } fn extract_with_context(&self, cx: &api::Context, carrier: &dyn api::Carrier) -> api::Context { self.propagators .iter() .fold(cx.clone(), |current_cx, propagator| { propagator.extract_with_context(&current_cx, carrier) }) } } #[cfg(test)] mod tests { use super::*; use crate::api::trace::b3_propagator::B3Encoding; use crate::api::TraceContextExt; use crate::api::{B3Propagator, Context, SpanContext, SpanId, TraceContextPropagator, TraceId}; use std::collections::HashMap; fn test_data() -> Vec<(&'static str, &'static str)> { vec![ ( "b3", "00000000000000000000000000000001-0000000000000001-0", ), ( "traceparent", "00-00000000000000000000000000000001-0000000000000001-00", ), ] } #[derive(Debug)] struct TestSpan(api::SpanContext); impl api::Span for TestSpan { fn add_event_with_timestamp( &self, _name: String, _timestamp: std::time::SystemTime, _attributes: Vec<api::KeyValue>, ) { } fn span_context(&self) -> api::SpanContext { self.0.clone() } fn is_recording(&self) -> bool { false } fn set_attribute(&self, _attribute: api::KeyValue) {} fn set_status(&self, _code: api::StatusCode, _message: String) {} fn update_name(&self, _new_name: String) {} fn end(&self) {} } #[test] fn inject_multiple_propagators() { let b3 = B3Propagator::with_encoding(B3Encoding::SingleHeader); let trace_context = TraceContextPropagator::new(); let composite_propagato
ace_context = TraceContextPropagator::new(); let composite_propagator = HttpTextCompositePropagator { propagators: vec![Box::new(b3), Box::new(trace_context)], }; for (header_name, header_value) in test_data() { let mut carrier = HashMap::new(); carrier.insert(header_name.to_string(), header_value.to_string()); assert_eq!( composite_propagator.extract(&carrier).remote_span_context(), Some(&SpanContext::new( TraceId::from_u128(1), SpanId::from_u64(1), 0, true, )) ); } } }
r = HttpTextCompositePropagator { propagators: vec![Box::new(b3), Box::new(trace_context)], }; let cx = Context::default().with_span(TestSpan(SpanContext::new( TraceId::from_u128(1), SpanId::from_u64(1), 0, false, ))); let mut carrier = HashMap::new(); composite_propagator.inject_context(&cx, &mut carrier); for (header_name, header_value) in test_data() { assert_eq!(carrier.get(header_name), Some(&header_value.to_string())); } } #[test] fn extract_multiple_propagators() { let b3 = B3Propagator::with_encoding(B3Encoding::SingleHeader); let tr
random
[ { "content": "/// Sets the given [`HttpTextFormat`] propagator as the current global propagator.\n\n///\n\n/// [`HttpTextFormat`]: ../api/context/propagation/trait.HttpTextFormat.html\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use opentelemetry::{api, global};\n\n///\n\n/// // create your http text propa...
Rust
src/hubris/elaborate/pattern_matching/mod.rs
jroesch/hubris
0333d4d26b7d66924acfe065c94d914e0ba011e4
use super::super::ast::{self}; use super::super::core::{self, Term}; use super::{LocalElabCx, Error}; use std::collections::HashMap; struct PatternMatchCx<'ecx, 'cx: 'ecx> { elab_cx: &'ecx mut LocalElabCx<'cx>, } mod renamer; mod simplify; use self::simplify::*; impl<'ecx, 'cx: 'ecx>PatternMatchCx<'ecx, 'cx> { fn new(elab_cx: &'ecx mut LocalElabCx<'cx>) -> PatternMatchCx<'ecx, 'cx> { PatternMatchCx { elab_cx: elab_cx, } } #[inline] fn enter_pattern_scope<F, R>(&mut self, name_and_type: Vec<(ast::Name, core::Term)>, body: F) -> Result<R, Error> where F: FnOnce(&mut PatternMatchCx, Vec<core::Name>) -> Result<R, Error> { let mut locals = vec![]; let old_context = self.elab_cx.locals.clone(); let old_locals_in_order = self.elab_cx.locals_in_order.clone(); for (name, ty) in name_and_type { let repr = match name.clone().repr { ast::NameKind::Qualified(..) => panic!(), ast::NameKind::Unqualified(s) => s, ast::NameKind::Placeholder => "_".to_string(), }; let local = self.elab_cx.cx.ty_cx.local_with_repr_and_mode(repr, ty, core::BindingMode::Explicit); self.elab_cx.locals.insert(name, local.clone()); self.elab_cx.locals_in_order.push(local.clone()); locals.push(local); } let result = try!(body(self, locals)); self.elab_cx.locals = old_context; self.elab_cx.locals_in_order = old_locals_in_order; Ok(result) } fn elaborate_simple_match(&mut self, simple_match: SimpleMatch) -> Result<core::Term, Error> { let SimpleMatch { scrutinee, cases, pattern_type, } = simple_match; let escrutinee = try!(self.elab_cx.elaborate_term(scrutinee)); let scrutinee_ty = try!(self.elab_cx.cx.ty_cx.type_check_term(&escrutinee, None)).1; let (inductive_ty, args) = scrutinee_ty.uncurry(); let inductive_ty = match inductive_ty { Term::Var { name } => name, other => panic!("{}", other), }; let datatype = match self.elab_cx.cx.ty_cx.types.get(&inductive_ty) { None => panic!("can't fine dt decl"), Some(dt) => dt.clone(), }; let ctor_map : HashMap<_, _> = datatype.ctors .clone() .into_iter() .collect(); let cases : Vec<_> = try!(cases.into_iter() .map(|c| self.elaborate_simple_case(c, &scrutinee_ty, &ctor_map)) .collect()); match pattern_type { PatternType::Cases => { let cases_on = inductive_ty.in_scope("cases_on".to_string()).unwrap(); let head = try!(self.elab_cx.apply_implicit_args(cases_on.to_term())); let mut args = vec![escrutinee]; args.extend(cases.into_iter()); let result = Term::apply_all(head, args); debug!("elaborated_match: {}", result); Ok(result) } } } fn simple_pattern_binders(&mut self, simple_pattern: SimplePattern, scrutinee_ty: &core::Term, ctor_map: &HashMap<core::Name, core::Term>) -> Result<Vec<(ast::Name, core::Term)>, Error> { match simple_pattern { SimplePattern::Name(n) => { let elab_name = try!(self.elab_cx.cx.elaborate_global_name(n.clone())); match ctor_map.get(&elab_name) { None => return Ok(vec![(n, scrutinee_ty.clone())]), Some(ctor_ty) => { return Ok(vec![]); } } } SimplePattern::Constructor(ctor, args) => { let elab_name = try!(self.elab_cx.cx.elaborate_global_name(ctor.clone())); match ctor_map.get(&elab_name) { None => panic!("not the right"), Some(ctor_ty) => { debug!("{:?}", ctor_ty.binders()); let (inductive_ty, i_args) = scrutinee_ty.uncurry(); let mut ctor_ty = ctor_ty.clone(); for arg in i_args { match ctor_ty { Term::Forall { term, .. } => { debug!("arg {}", arg); ctor_ty = term.instantiate(&arg); } _ => panic!() } } debug!("ctor_ty {}", ctor_ty); let binders = ctor_ty.binders() .unwrap_or(vec![]) .iter() .cloned() .zip(args.into_iter()) .map(|(t, n)| { (n, t.clone()) }).collect(); return Ok(binders); } } } } } fn elaborate_simple_case(&mut self, simple_case: SimpleCase, scrutinee_ty: &core::Term, ctor_map: &HashMap<core::Name, core::Term>) -> Result<core::Term, Error> { let SimpleCase { pattern, rhs, } = simple_case; debug!("pattern: {} rhs: {}", pattern, rhs); let binders = try!(self.simple_pattern_binders( pattern, scrutinee_ty, ctor_map)); for &(ref n, ref ty) in &binders { debug!("{} {}", n, ty); } self.enter_pattern_scope(binders, move |pat_cx, names| { match rhs { SimpleMatchArm::Term(rhs) => Ok(Term::abstract_lambda(names, try!(pat_cx.elab_cx.elaborate_term(rhs)))), SimpleMatchArm::Match(mat) => pat_cx.elaborate_simple_match(mat) } }) } } pub fn elaborate_pattern_match<'ecx>( elab_cx: &mut LocalElabCx<'ecx>, scrutinee: ast::Term, cases: Vec<ast::Case>) -> Result<Term, Error> { let mut pmcx = PatternMatchCx::new(elab_cx); let simplified_match = simplify_match(scrutinee, cases); debug!("simplified_match: {}", simplified_match); pmcx.elaborate_simple_match(simplified_match) }
use super::super::ast::{self}; use super::super::core::{self, Term}; use super::{LocalElabCx, Error}; use std::collections::HashMap; struct PatternMatchCx<'ecx, 'cx: 'ecx> { elab_cx: &'ecx mut LocalElabCx<'cx>, } mod renamer; mod simplify; use self::simplify::*; impl<'ecx, 'cx: 'ecx>PatternMatchCx<'ecx, 'cx> { fn new(elab_cx: &'ecx mut LocalElabCx<'cx>) -> PatternMatchCx<'ecx, 'cx> { PatternMatchCx { elab_cx: elab_cx, } } #[inline] fn enter_pattern_scope<F, R>(&mut self, name_and_type: Vec<(ast::Name, core::Term)>, body: F) -> Result<R, Error> where F: FnOnce(&mut PatternMatchCx, Vec<core::Name>) -> Result<R, Error> { let mut locals = vec![]; let old_context = self.elab_cx.locals.clone(); let old_locals_in_order = self.elab_cx.locals_in_order.clone(); for (name, ty) in name_and_type { let repr = match name.clone().repr { ast::NameKind::Qualified(..) => panic!(), ast::NameKind::Unqualified(s) => s, ast::NameKind::Placeholder => "_".to_string(), }; let local = self.elab_cx.cx.ty_cx.local_with_repr_and_mode(repr, ty, core::BindingMode::Explicit); self.elab_cx.locals.insert(name, local.clone()); self.elab_cx.locals_in_order.push(local.clone()); locals.push(local); } let result = try!(body(self, locals)); self.elab_cx.locals = old_context; self.elab_cx.locals_in_order = old_locals_in_order; Ok(result) } fn elaborate_simple_match(&mut self, simple_match: SimpleMatch) -> Result<core::Term, Error> { let SimpleMatch { scrutinee, cases, pattern_type, } = simple_match; let escrutinee = try!(self.elab_cx.elaborate_term(scrutinee)); let scrutinee_ty = try!(self.elab_cx.cx.ty_cx.type_check_term(&escrutinee, None)).1; let (inductive_ty, args) = scrutinee_ty.uncurry(); let inductive_ty = match inductive_ty { Term::Var { name } => name, other => panic!("{}", other), }; let datatype = match self.elab_cx.cx.ty_cx.types.get(&inductive_ty) { None => panic!("can't fine dt decl"), Some(dt) => dt.clone(), }; let ctor_map : HashMap<_, _> = datatype.ctors .clone() .into_iter() .collect(); let cases : Vec<_> = try!(cases.into_iter() .map(|c| self.elaborate_simple_case(c, &scrutinee_ty, &ctor_map)) .collect()); match pattern_type { PatternType::Cases => { let cases_on = inductive_ty.in_scope("cases_on".to_string()).unwrap(); let head = try!(self.elab_cx.apply_implicit_args(cases_on.to_term())); let mut args = vec![escrutinee]; args.extend(cases.into_iter()); let result = Term::apply_all(head, args); debug!("elaborated_match: {}", result); Ok(result) } } } fn simple_pattern_binders(&mut self, simple_pattern: SimplePattern, scrutinee_ty: &core::Term, ctor_map: &HashMap<core::Name, core::Term>) -> Result<Vec<(ast::Name, core::Term)>, Error> { match simple_pattern { SimplePattern::Name(n) => { let elab_name = try!(self.elab_cx.cx.elaborate_global_name(n.clone())); match ctor_map.get(&elab_name) { None => return Ok(vec![(n, scrutinee_ty.clone())]), Some(ctor_ty) => { return Ok(vec![]); } } } SimplePattern::Constructor(ctor, args) => { let elab_name = try!(self.elab_cx.cx.elaborate_global_name(ctor.clone())); match ctor_map.get(&elab_name) { None => panic!("not the right"), Some(ctor_ty) => { debug!("{:?}", ctor_ty.binders()); let (inductive_ty, i_args) = scrutinee_ty.uncurry(); let mut ctor_ty = ctor_ty.clone(); for arg in i_args { match ctor_ty { Term::Forall { term, .. } => { debug!("arg {}", arg); ctor_ty = term.instantiate(&arg); } _ => panic!() } } debug!("ctor_ty {}", ctor_ty); let binders = ctor_ty.binders() .unwrap_or(vec![]) .iter() .cloned() .zip(args.into_iter()) .
fn elaborate_simple_case(&mut self, simple_case: SimpleCase, scrutinee_ty: &core::Term, ctor_map: &HashMap<core::Name, core::Term>) -> Result<core::Term, Error> { let SimpleCase { pattern, rhs, } = simple_case; debug!("pattern: {} rhs: {}", pattern, rhs); let binders = try!(self.simple_pattern_binders( pattern, scrutinee_ty, ctor_map)); for &(ref n, ref ty) in &binders { debug!("{} {}", n, ty); } self.enter_pattern_scope(binders, move |pat_cx, names| { match rhs { SimpleMatchArm::Term(rhs) => Ok(Term::abstract_lambda(names, try!(pat_cx.elab_cx.elaborate_term(rhs)))), SimpleMatchArm::Match(mat) => pat_cx.elaborate_simple_match(mat) } }) } } pub fn elaborate_pattern_match<'ecx>( elab_cx: &mut LocalElabCx<'ecx>, scrutinee: ast::Term, cases: Vec<ast::Case>) -> Result<Term, Error> { let mut pmcx = PatternMatchCx::new(elab_cx); let simplified_match = simplify_match(scrutinee, cases); debug!("simplified_match: {}", simplified_match); pmcx.elaborate_simple_match(simplified_match) }
map(|(t, n)| { (n, t.clone()) }).collect(); return Ok(binders); } } } } }
function_block-function_prefix_line
[ { "content": "pub fn simplify_match(scrutinee: ast::Term, cases: Vec<ast::Case>) -> SimpleMatch {\n\n let mut simple_cases = vec![];\n\n\n\n for case in cases {\n\n let rhs = SimpleMatchArm::Term(case.rhs);\n\n simple_cases.push(simplify_pattern(case.pattern, rhs));\n\n }\n\n\n\n let s...
Rust
src/model/category_lookup.rs
creinig/naday
975eb02a0e2e71bfe63ed8efd29141ddc5d77f7d
use super::Category; use anyhow::{bail, Result}; use std::collections::HashMap; use std::rc::Rc; #[derive(Debug)] pub struct CategoryLookup { categories: HashMap<String, Rc<Category>>, by_name_or_alias: HashMap<String, Rc<Category>>, } impl CategoryLookup { pub fn new() -> CategoryLookup { CategoryLookup { categories: HashMap::new(), by_name_or_alias: HashMap::new(), } } pub fn add(&mut self, category: Category) -> Result<()> { if self.categories.contains_key(&category.name.to_lowercase()) { return Ok(()); } for name in category.all_names() { if self.by_name_or_alias.contains_key(&name.to_lowercase()) { bail!( "Duplicate category key: '{}' is used by '{}' and '{}'", name, category.name, self.find(name).unwrap().name ); } } let cat_rc = Rc::new(category); self.categories .insert(String::from(&cat_rc.name).to_lowercase(), cat_rc.clone()); for name in cat_rc.all_names() { self.by_name_or_alias .insert(name.to_string().to_lowercase(), cat_rc.clone()); } Ok(()) } pub fn find<S: AsRef<str>>(&self, alias_or_name: S) -> Option<Rc<Category>> { let lc = alias_or_name.as_ref().to_lowercase(); match self.by_name_or_alias.get(&lc) { Some(cat) => Some(cat.clone()), None => None, } } #[cfg(test)] pub fn len(&self) -> usize { self.categories.len() } pub fn iter(&self) -> std::collections::hash_map::Values<'_, String, Rc<Category>> { self.categories.values() } } #[cfg(test)] mod tests { use super::*; #[test] fn basic_add_and_find() { let mut lookup = CategoryLookup::new(); lookup .add(Category::new("Pushups", 1.0, vec!["pu", "push"])) .unwrap(); lookup .add(Category::new("Situps", 1.0, vec!["su", "si"])) .unwrap(); lookup .add(Category::new("Burpees", 1.5, vec!["bu", "oof"])) .unwrap(); assert_eq!("Burpees", &(lookup.find("Burpees").unwrap().name)); assert_eq!("Burpees", &(lookup.find("bUrPeEs").unwrap().name)); assert_eq!("Burpees", &(lookup.find("oof").unwrap().name)); assert_eq!("Burpees", &(lookup.find("OOF").unwrap().name)); assert_eq!("Situps", &(lookup.find("su").unwrap().name)); assert_eq!("Pushups", &(lookup.find("push").unwrap().name)); } #[test] fn duplicates() { let mut lookup = CategoryLookup::new(); assert_eq!(0, lookup.len()); lookup .add(Category::new("Pushups", 1.0, vec!["pu", "push"])) .unwrap(); lookup .add(Category::new("Pushups", 1.0, vec!["pu", "push"])) .unwrap(); lookup .add(Category::new("Pushups", 1.0, vec!["pu", "push"])) .unwrap(); assert_eq!(1, lookup.len()); } #[test] fn duplicate_alias() { let mut lookup = CategoryLookup::new(); assert_eq!(0, lookup.len()); lookup .add(Category::new("Pushups", 1.0, vec!["pu", "push"])) .unwrap(); lookup .add(Category::new("Pushdowns", 1.0, vec!["pd", "push"])) .expect_err("Should return an error for duplicate key"); } }
use super::Category; use anyhow::{bail, Result}; use std::collections::HashMap; use std::rc::Rc; #[derive(Debug)] pub struct CategoryLookup { categories: HashMap<String, Rc<Category>>, by_name_or_alias: HashMap<String, Rc<Category>>, } impl CategoryLookup { pub fn new() -> CategoryLookup { CategoryLookup { categories: HashMap::new(), by_name_or_alias: HashMap::new(), } } pub fn add(&mut self, category: Category) -> Result<()> { if self.categories.contains_key(&category.name.to_lowercase()) { return Ok(()); } for name in category.all_names() { if self.by_name_or_alias.contains_key(&name.to_lowercase()) { bail!( "Duplicate category key: '{}' is used by '{}' and '{}'", name, category.name, self.find(name).unwrap().name ); } } let cat_rc = Rc::new(category); self.categories .insert(String::from(&cat_rc.name).to_lowercase(), cat_rc.clone()); for name in cat_rc.all_names() { self.by_name_or_alias .insert(name.to_string().to_low
nwrap(); lookup .add(Category::new("Pushdowns", 1.0, vec!["pd", "push"])) .expect_err("Should return an error for duplicate key"); } }
ercase(), cat_rc.clone()); } Ok(()) } pub fn find<S: AsRef<str>>(&self, alias_or_name: S) -> Option<Rc<Category>> { let lc = alias_or_name.as_ref().to_lowercase(); match self.by_name_or_alias.get(&lc) { Some(cat) => Some(cat.clone()), None => None, } } #[cfg(test)] pub fn len(&self) -> usize { self.categories.len() } pub fn iter(&self) -> std::collections::hash_map::Values<'_, String, Rc<Category>> { self.categories.values() } } #[cfg(test)] mod tests { use super::*; #[test] fn basic_add_and_find() { let mut lookup = CategoryLookup::new(); lookup .add(Category::new("Pushups", 1.0, vec!["pu", "push"])) .unwrap(); lookup .add(Category::new("Situps", 1.0, vec!["su", "si"])) .unwrap(); lookup .add(Category::new("Burpees", 1.5, vec!["bu", "oof"])) .unwrap(); assert_eq!("Burpees", &(lookup.find("Burpees").unwrap().name)); assert_eq!("Burpees", &(lookup.find("bUrPeEs").unwrap().name)); assert_eq!("Burpees", &(lookup.find("oof").unwrap().name)); assert_eq!("Burpees", &(lookup.find("OOF").unwrap().name)); assert_eq!("Situps", &(lookup.find("su").unwrap().name)); assert_eq!("Pushups", &(lookup.find("push").unwrap().name)); } #[test] fn duplicates() { let mut lookup = CategoryLookup::new(); assert_eq!(0, lookup.len()); lookup .add(Category::new("Pushups", 1.0, vec!["pu", "push"])) .unwrap(); lookup .add(Category::new("Pushups", 1.0, vec!["pu", "push"])) .unwrap(); lookup .add(Category::new("Pushups", 1.0, vec!["pu", "push"])) .unwrap(); assert_eq!(1, lookup.len()); } #[test] fn duplicate_alias() { let mut lookup = CategoryLookup::new(); assert_eq!(0, lookup.len()); lookup .add(Category::new("Pushups", 1.0, vec!["pu", "push"])) .u
random
[ { "content": "/// Read all categories and return a populated lookup structure\n\npub fn read_categories(cfg: &Config) -> Result<CategoryLookup> {\n\n let categories = category::read_categories(cfg)?;\n\n let mut lookup = CategoryLookup::new();\n\n\n\n for category in categories {\n\n lookup.add(...
Rust
program/src/orderbook.rs
solindex/orderbook
ed5e4246aa8ec5b5eef84aa0982bc50622cb9b5d
use crate::{ critbit::{LeafNode, Node, NodeHandle, Slab}, error::AoError, processor::new_order, state::{Event, EventQueue, SelfTradeBehavior, Side}, utils::{fp32_div, fp32_mul}, }; use borsh::{BorshDeserialize, BorshSerialize}; use solana_program::{account_info::AccountInfo, msg, program_error::ProgramError}; #[derive(BorshSerialize, BorshDeserialize, Debug)] pub struct OrderSummary { pub posted_order_id: Option<u128>, #[allow(missing_docs)] pub total_base_qty: u64, #[allow(missing_docs)] pub total_quote_qty: u64, #[allow(missing_docs)] pub total_base_qty_posted: u64, } pub const ORDER_SUMMARY_SIZE: u32 = 41; pub(crate) struct OrderBookState<'a> { bids: Slab<'a>, asks: Slab<'a>, callback_id_len: usize, } impl<'ob> OrderBookState<'ob> { pub(crate) fn new_safe( bids_account: &AccountInfo<'ob>, asks_account: &AccountInfo<'ob>, callback_info_len: usize, callback_id_len: usize, ) -> Result<Self, ProgramError> { let bids = Slab::new_from_acc_info(bids_account, callback_info_len); let asks = Slab::new_from_acc_info(asks_account, callback_info_len); if !(bids.check(Side::Bid) && asks.check(Side::Ask)) { return Err(ProgramError::InvalidAccountData); } Ok(Self { bids, asks, callback_id_len, }) } pub fn find_bbo(&self, side: Side) -> Option<NodeHandle> { match side { Side::Bid => self.bids.find_max(), Side::Ask => self.asks.find_min(), } } #[cfg(feature = "no-entrypoint")] pub fn get_spread(&self) -> (Option<u64>, Option<u64>) { let best_bid_price = self .bids .find_max() .map(|h| self.bids.get_node(h).unwrap().as_leaf().unwrap().price()); let best_ask_price = self .asks .find_max() .map(|h| self.asks.get_node(h).unwrap().as_leaf().unwrap().price()); (best_bid_price, best_ask_price) } pub fn get_tree(&mut self, side: Side) -> &mut Slab<'ob> { match side { Side::Bid => &mut self.bids, Side::Ask => &mut self.asks, } } pub(crate) fn commit_changes(&self) { self.bids.write_header(); self.asks.write_header(); } pub(crate) fn new_order( &mut self, params: new_order::Params, event_queue: &mut EventQueue, min_base_order_size: u64, ) -> Result<OrderSummary, AoError> { let new_order::Params { max_base_qty, max_quote_qty, side, limit_price, callback_info, post_only, post_allowed, self_trade_behavior, mut match_limit, } = params; let mut base_qty_remaining = max_base_qty; let mut quote_qty_remaining = max_quote_qty; let mut crossed = true; loop { if match_limit == 0 { break; } let best_bo_h = match self.find_bbo(side.opposite()) { None => { crossed = false; break; } Some(h) => h, }; let mut best_bo_ref = self .get_tree(side.opposite()) .get_node(best_bo_h) .unwrap() .as_leaf() .unwrap() .to_owned(); let trade_price = best_bo_ref.price(); crossed = match side { Side::Bid => limit_price >= trade_price, Side::Ask => limit_price <= trade_price, }; if post_only || !crossed { break; } let offer_size = best_bo_ref.base_quantity; let base_trade_qty = offer_size .min(base_qty_remaining) .min(fp32_div(quote_qty_remaining, best_bo_ref.price())); if base_trade_qty == 0 { break; } if self_trade_behavior != SelfTradeBehavior::DecrementTake { let order_would_self_trade = &callback_info[..self.callback_id_len] == (&self .get_tree(side.opposite()) .get_callback_info(best_bo_ref.callback_info_pt as usize) as &[u8]); if order_would_self_trade { let best_offer_id = best_bo_ref.order_id(); let cancelled_provide_base_qty; match self_trade_behavior { SelfTradeBehavior::CancelProvide => { cancelled_provide_base_qty = std::cmp::min(base_qty_remaining, best_bo_ref.base_quantity); } SelfTradeBehavior::AbortTransaction => return Err(AoError::WouldSelfTrade), SelfTradeBehavior::DecrementTake => unreachable!(), }; let remaining_provide_base_qty = best_bo_ref.base_quantity - cancelled_provide_base_qty; let delete = remaining_provide_base_qty == 0; let provide_out = Event::Out { side: side.opposite(), delete, order_id: best_offer_id, base_size: cancelled_provide_base_qty, callback_info: self .get_tree(side.opposite()) .get_callback_info(best_bo_ref.callback_info_pt as usize) .to_owned(), }; event_queue .push_back(provide_out) .map_err(|_| AoError::EventQueueFull)?; if delete { self.get_tree(side.opposite()) .remove_by_key(best_offer_id) .unwrap(); } else { best_bo_ref.set_base_quantity(remaining_provide_base_qty); self.get_tree(side.opposite()) .write_node(&Node::Leaf(best_bo_ref), best_bo_h); } continue; } } let quote_maker_qty = fp32_mul(base_trade_qty, trade_price); let maker_fill = Event::Fill { taker_side: side, maker_callback_info: self .get_tree(side.opposite()) .get_callback_info(best_bo_ref.callback_info_pt as usize) .to_owned(), taker_callback_info: callback_info.clone(), maker_order_id: best_bo_ref.order_id(), quote_size: quote_maker_qty, base_size: base_trade_qty, }; event_queue .push_back(maker_fill) .map_err(|_| AoError::EventQueueFull)?; best_bo_ref.set_base_quantity(best_bo_ref.base_quantity - base_trade_qty); base_qty_remaining -= base_trade_qty; quote_qty_remaining -= quote_maker_qty; if best_bo_ref.base_quantity <= min_base_order_size { let best_offer_id = best_bo_ref.order_id(); let cur_side = side.opposite(); let out_event = Event::Out { side: cur_side, order_id: best_offer_id, base_size: best_bo_ref.base_quantity, callback_info: self .get_tree(side.opposite()) .get_callback_info(best_bo_ref.callback_info_pt as usize) .to_owned(), delete: true, }; self.get_tree(cur_side) .remove_by_key(best_offer_id) .unwrap(); event_queue .push_back(out_event) .map_err(|_| AoError::EventQueueFull)?; } else { self.get_tree(side.opposite()) .write_node(&Node::Leaf(best_bo_ref), best_bo_h); } match_limit -= 1; } let base_qty_to_post = std::cmp::min( fp32_div(quote_qty_remaining, limit_price), base_qty_remaining, ); if crossed || !post_allowed || base_qty_to_post <= min_base_order_size { return Ok(OrderSummary { posted_order_id: None, total_base_qty: max_base_qty - base_qty_remaining, total_quote_qty: max_quote_qty - quote_qty_remaining, total_base_qty_posted: 0, }); } let new_leaf_order_id = event_queue.gen_order_id(limit_price, side); let callback_info_offset = self .get_tree(side) .write_callback_info(&callback_info) .unwrap(); let new_leaf = Node::Leaf(LeafNode { key: new_leaf_order_id, callback_info_pt: callback_info_offset, base_quantity: base_qty_to_post, }); let insert_result = self.get_tree(side).insert_leaf(&new_leaf); if let Err(AoError::SlabOutOfSpace) = insert_result { msg!("Orderbook is full! booting lest aggressive orders..."); let order = match side { Side::Bid => self.get_tree(Side::Bid).remove_min().unwrap(), Side::Ask => self.get_tree(Side::Ask).remove_max().unwrap(), }; let l = order.as_leaf().unwrap(); let out = Event::Out { side: Side::Bid, delete: true, order_id: l.order_id(), base_size: l.base_quantity, callback_info: self .get_tree(side) .get_callback_info(l.callback_info_pt as usize) .to_owned(), }; event_queue .push_back(out) .map_err(|_| AoError::EventQueueFull)?; self.get_tree(side).insert_leaf(&new_leaf).unwrap(); } else { insert_result.unwrap(); } base_qty_remaining -= base_qty_to_post; quote_qty_remaining -= fp32_mul(base_qty_to_post, limit_price); Ok(OrderSummary { posted_order_id: Some(new_leaf_order_id), total_base_qty: max_base_qty - base_qty_remaining, total_quote_qty: max_quote_qty - quote_qty_remaining, total_base_qty_posted: base_qty_to_post, }) } pub fn is_empty(&self) -> bool { self.asks.root().is_none() && self.bids.root().is_none() } }
use crate::{ critbit::{LeafNode, Node, NodeHandle, Slab}, error::AoError, processor::new_order, state::{Event, EventQueue, SelfTradeBehavior, Side}, utils::{fp32_div, fp32_mul}, }; use borsh::{BorshDeserialize, BorshSerialize}; use solana_program::{account_info::AccountInfo, msg, program_error::ProgramError}; #[derive(BorshSerialize, BorshDeserialize, Debug)] pub struct OrderSummary { pub posted_order_id: Option<u128>, #[allow(missing_docs)] pub total_base_qty: u64, #[allow(missing_docs)] pub total_quote_qty: u64, #[allow(missing_docs)] pub total_base_qty_posted: u64, } pub const ORDER_SUMMARY_SIZE: u32 = 41; pub(crate) struct OrderBookState<'a> { bids: Slab<'a>, asks: Slab<'a>, callback_id_len: usize, } impl<'ob> OrderBookState<'ob> { pub(crate) fn new_safe( bids_account: &AccountInfo<'ob>, asks_account: &AccountInfo<'ob>, callback_info_len: usize, callback_id_len: usize, ) -> Result<Self, ProgramError> { let bids = Slab::new_from_acc_info(bids_account, callback_info_len); let asks = Slab::new_from_acc_info(asks_account, callback_info_len); if !(bids.check(Side::Bid) && asks.check(Side::Ask)) { return Err(ProgramError::InvalidAccountData); } Ok(Self { bids, asks, callback_id_len, }) } pub fn find_bbo(&self, side: Side) -> Option<NodeHandle> { match side { Side::Bid => self.bids.find_max(), Side::Ask => self.asks.find_min(), } } #[cfg(feature = "no-entrypoint")] pub fn get_spread(&self) -> (Option<u64>, Option<u64>) { let best_bid_price = self .bids .find_max() .map(|h| self.bids.get_node(h).unwrap().as_leaf().unwrap().price()); let best_ask_price = self .asks .find_max() .map(|h| self.asks.get_node(h).unwrap().as_leaf().unwrap().price()); (best_bid_price, best_ask_price) } pub fn get_tree(&mut self, side: Side) -> &mut Slab<'ob> { match side { Side::Bid => &mut self.bids, Side::Ask => &mut self.asks, } } pub(crate) fn commit_changes(&self) { self.bids.write_header(); self.asks.write_header(); } pub(crate) fn new_order( &mut self, params: new_order::Params, event_queue: &mut EventQueue, min_base_order_size: u64, ) -> Result<OrderSummary, AoError> { let new_order::Params { max_base_qty, max_quote_qty, side, limit_price, callback_info, post_only, post_allowed, self_trade_behavior, mut match_limit, } = params; let mut base_qty_remaining = max_base_qty; let mut quote_qty_remaining = max_quote_qty; let mut crossed = true; loop { if match_limit == 0 { break; } let best_bo_h = match self.find_bbo(side.opposite()) { None => { crossed = false; break; } Some(h) => h, }; let mut best_bo_ref = self .get_tree(side.opposite()) .get_node(best_bo_h) .unwrap() .as_leaf() .unwrap() .to_owned(); let trade_price = best_bo_ref.price(); crossed =
; if post_only || !crossed { break; } let offer_size = best_bo_ref.base_quantity; let base_trade_qty = offer_size .min(base_qty_remaining) .min(fp32_div(quote_qty_remaining, best_bo_ref.price())); if base_trade_qty == 0 { break; } if self_trade_behavior != SelfTradeBehavior::DecrementTake { let order_would_self_trade = &callback_info[..self.callback_id_len] == (&self .get_tree(side.opposite()) .get_callback_info(best_bo_ref.callback_info_pt as usize) as &[u8]); if order_would_self_trade { let best_offer_id = best_bo_ref.order_id(); let cancelled_provide_base_qty; match self_trade_behavior { SelfTradeBehavior::CancelProvide => { cancelled_provide_base_qty = std::cmp::min(base_qty_remaining, best_bo_ref.base_quantity); } SelfTradeBehavior::AbortTransaction => return Err(AoError::WouldSelfTrade), SelfTradeBehavior::DecrementTake => unreachable!(), }; let remaining_provide_base_qty = best_bo_ref.base_quantity - cancelled_provide_base_qty; let delete = remaining_provide_base_qty == 0; let provide_out = Event::Out { side: side.opposite(), delete, order_id: best_offer_id, base_size: cancelled_provide_base_qty, callback_info: self .get_tree(side.opposite()) .get_callback_info(best_bo_ref.callback_info_pt as usize) .to_owned(), }; event_queue .push_back(provide_out) .map_err(|_| AoError::EventQueueFull)?; if delete { self.get_tree(side.opposite()) .remove_by_key(best_offer_id) .unwrap(); } else { best_bo_ref.set_base_quantity(remaining_provide_base_qty); self.get_tree(side.opposite()) .write_node(&Node::Leaf(best_bo_ref), best_bo_h); } continue; } } let quote_maker_qty = fp32_mul(base_trade_qty, trade_price); let maker_fill = Event::Fill { taker_side: side, maker_callback_info: self .get_tree(side.opposite()) .get_callback_info(best_bo_ref.callback_info_pt as usize) .to_owned(), taker_callback_info: callback_info.clone(), maker_order_id: best_bo_ref.order_id(), quote_size: quote_maker_qty, base_size: base_trade_qty, }; event_queue .push_back(maker_fill) .map_err(|_| AoError::EventQueueFull)?; best_bo_ref.set_base_quantity(best_bo_ref.base_quantity - base_trade_qty); base_qty_remaining -= base_trade_qty; quote_qty_remaining -= quote_maker_qty; if best_bo_ref.base_quantity <= min_base_order_size { let best_offer_id = best_bo_ref.order_id(); let cur_side = side.opposite(); let out_event = Event::Out { side: cur_side, order_id: best_offer_id, base_size: best_bo_ref.base_quantity, callback_info: self .get_tree(side.opposite()) .get_callback_info(best_bo_ref.callback_info_pt as usize) .to_owned(), delete: true, }; self.get_tree(cur_side) .remove_by_key(best_offer_id) .unwrap(); event_queue .push_back(out_event) .map_err(|_| AoError::EventQueueFull)?; } else { self.get_tree(side.opposite()) .write_node(&Node::Leaf(best_bo_ref), best_bo_h); } match_limit -= 1; } let base_qty_to_post = std::cmp::min( fp32_div(quote_qty_remaining, limit_price), base_qty_remaining, ); if crossed || !post_allowed || base_qty_to_post <= min_base_order_size { return Ok(OrderSummary { posted_order_id: None, total_base_qty: max_base_qty - base_qty_remaining, total_quote_qty: max_quote_qty - quote_qty_remaining, total_base_qty_posted: 0, }); } let new_leaf_order_id = event_queue.gen_order_id(limit_price, side); let callback_info_offset = self .get_tree(side) .write_callback_info(&callback_info) .unwrap(); let new_leaf = Node::Leaf(LeafNode { key: new_leaf_order_id, callback_info_pt: callback_info_offset, base_quantity: base_qty_to_post, }); let insert_result = self.get_tree(side).insert_leaf(&new_leaf); if let Err(AoError::SlabOutOfSpace) = insert_result { msg!("Orderbook is full! booting lest aggressive orders..."); let order = match side { Side::Bid => self.get_tree(Side::Bid).remove_min().unwrap(), Side::Ask => self.get_tree(Side::Ask).remove_max().unwrap(), }; let l = order.as_leaf().unwrap(); let out = Event::Out { side: Side::Bid, delete: true, order_id: l.order_id(), base_size: l.base_quantity, callback_info: self .get_tree(side) .get_callback_info(l.callback_info_pt as usize) .to_owned(), }; event_queue .push_back(out) .map_err(|_| AoError::EventQueueFull)?; self.get_tree(side).insert_leaf(&new_leaf).unwrap(); } else { insert_result.unwrap(); } base_qty_remaining -= base_qty_to_post; quote_qty_remaining -= fp32_mul(base_qty_to_post, limit_price); Ok(OrderSummary { posted_order_id: Some(new_leaf_order_id), total_base_qty: max_base_qty - base_qty_remaining, total_quote_qty: max_quote_qty - quote_qty_remaining, total_base_qty_posted: base_qty_to_post, }) } pub fn is_empty(&self) -> bool { self.asks.root().is_none() && self.bids.root().is_none() } }
match side { Side::Bid => limit_price >= trade_price, Side::Ask => limit_price <= trade_price, }
if_condition
[ { "content": "#[wasm_bindgen]\n\npub fn find_max(data: &mut [u8], callback_info_len: u64, slot_size: u64) -> Option<u32> {\n\n let slab = Slab::new(\n\n Rc::new(RefCell::new(data)),\n\n callback_info_len as usize,\n\n slot_size as usize,\n\n );\n\n slab.find_max()\n\n}\n\n\n", ...
Rust
glib/src/char.rs
YaLTeR/gtk-rs
b10a29d60458d33642c05421b0ece8d67582229e
use crate::translate::FromGlib; use crate::translate::ToGlib; use libc::{c_char, c_uchar}; #[derive(Debug, Copy, Clone, Eq, PartialEq)] pub struct Char(pub c_char); impl Char { pub fn new(c: char) -> Option<Char> { if c as u32 > 255 { None } else { Some(Char(c as c_char)) } } } impl From<Char> for char { fn from(c: Char) -> char { c.0 as u8 as char } } #[doc(hidden)] impl FromGlib<c_char> for Char { unsafe fn from_glib(value: c_char) -> Self { Char(value) } } #[doc(hidden)] impl ToGlib for Char { type GlibType = c_char; fn to_glib(&self) -> c_char { self.0 } } #[derive(Debug, Copy, Clone, Eq, PartialEq)] pub struct UChar(pub c_uchar); impl UChar { pub fn new(c: char) -> Option<UChar> { if c as u32 > 255 { None } else { Some(UChar(c as c_uchar)) } } } impl From<UChar> for char { fn from(c: UChar) -> char { c.0 as char } } #[doc(hidden)] impl FromGlib<c_uchar> for UChar { unsafe fn from_glib(value: c_uchar) -> Self { UChar(value) } } #[doc(hidden)] impl ToGlib for UChar { type GlibType = c_uchar; fn to_glib(&self) -> c_uchar { self.0 } } #[cfg(test)] mod tests { use super::*; use crate::translate::from_glib; #[test] fn converts_single_byte_chars() { assert_eq!(Char::new(0 as char), Some(Char(0_i8))); assert_eq!(UChar::new(0 as char), Some(UChar(0_u8))); assert_eq!(UChar::new(255 as char), Some(UChar(255_u8))); assert_eq!(UChar::new('ñ'), Some(UChar(241_u8))); } #[test] fn refuses_multibyte_chars() { assert_eq!(Char::new('☔'), None); assert_eq!(UChar::new('☔'), None); } #[test] fn into_i8() { assert_eq!(Char::new('A').unwrap().to_glib(), 65_i8); } #[test] fn into_u8() { assert_eq!(UChar::new('A').unwrap().to_glib(), 65_u8); } #[test] fn into_char() { assert_eq!(char::from(Char(65_i8)), 'A'); assert_eq!('ñ', UChar(241_u8).into()); } #[test] fn convert_from_glib() { assert_eq!(Char(65_i8), unsafe { from_glib(65_i8) }); assert_eq!(UChar(241_u8), unsafe { from_glib(241_u8) }); } }
use crate::translate::FromGlib; use crate::translate::ToGlib; use libc::{c_char, c_uchar}; #[derive(Debug, Copy, Clone, Eq, PartialEq)] pub struct Char(pub c_char); impl Char { pub fn new(c: char) -> Option<Char> { if c as u32 > 255 { None
har) -> char { c.0 as char } } #[doc(hidden)] impl FromGlib<c_uchar> for UChar { unsafe fn from_glib(value: c_uchar) -> Self { UChar(value) } } #[doc(hidden)] impl ToGlib for UChar { type GlibType = c_uchar; fn to_glib(&self) -> c_uchar { self.0 } } #[cfg(test)] mod tests { use super::*; use crate::translate::from_glib; #[test] fn converts_single_byte_chars() { assert_eq!(Char::new(0 as char), Some(Char(0_i8))); assert_eq!(UChar::new(0 as char), Some(UChar(0_u8))); assert_eq!(UChar::new(255 as char), Some(UChar(255_u8))); assert_eq!(UChar::new('ñ'), Some(UChar(241_u8))); } #[test] fn refuses_multibyte_chars() { assert_eq!(Char::new('☔'), None); assert_eq!(UChar::new('☔'), None); } #[test] fn into_i8() { assert_eq!(Char::new('A').unwrap().to_glib(), 65_i8); } #[test] fn into_u8() { assert_eq!(UChar::new('A').unwrap().to_glib(), 65_u8); } #[test] fn into_char() { assert_eq!(char::from(Char(65_i8)), 'A'); assert_eq!('ñ', UChar(241_u8).into()); } #[test] fn convert_from_glib() { assert_eq!(Char(65_i8), unsafe { from_glib(65_i8) }); assert_eq!(UChar(241_u8), unsafe { from_glib(241_u8) }); } }
} else { Some(Char(c as c_char)) } } } impl From<Char> for char { fn from(c: Char) -> char { c.0 as u8 as char } } #[doc(hidden)] impl FromGlib<c_char> for Char { unsafe fn from_glib(value: c_char) -> Self { Char(value) } } #[doc(hidden)] impl ToGlib for Char { type GlibType = c_char; fn to_glib(&self) -> c_char { self.0 } } #[derive(Debug, Copy, Clone, Eq, PartialEq)] pub struct UChar(pub c_uchar); impl UChar { pub fn new(c: char) -> Option<UChar> { if c as u32 > 255 { None } else { Some(UChar(c as c_uchar)) } } } impl From<UChar> for char { fn from(c: UC
random
[ { "content": "#[doc(alias = \"gdk_keyval_convert_case\")]\n\npub fn keyval_convert_case(symbol: u32) -> (u32, u32) {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut lower = mem::MaybeUninit::uninit();\n\n let mut upper = mem::MaybeUninit::uninit();\n\n ffi::gdk_keyval_...
Rust
iota-conversion/unit_converter.rs
zesterer/bee-p
375357bdfe8f670e4d26b62a7683d97f339f056f
use super::iota_units::IotaUnits; pub fn convert_units(amount: u64, from: IotaUnits, to: IotaUnits) -> u64 { let amount_in_source = amount * 10_u64.pow(u32::from(from.value())); convert_units_helper(amount_in_source, to) } fn convert_units_helper(amount: u64, to: IotaUnits) -> u64 { amount / 10_u64.pow(u32::from(to.value())) } pub fn convert_raw_iota_amount_to_display_text(amount: u64, extended: bool) -> String { let unit = find_optimal_iota_unit_to_display(amount); let amount_in_display_unit = convert_amount_to(amount, unit); create_amount_with_unit_display_text(amount_in_display_unit, unit, extended) } fn create_amount_with_unit_display_text(amount: f64, unit: IotaUnits, extended: bool) -> String { if unit == IotaUnits::Iota { format!("{} {}", amount, unit.unit()) } else if extended { format!("{:.15} {}", amount, unit.unit()) } else { format!("{:.2} {}", amount, unit.unit()) } } pub fn convert_amount_to(amount: u64, target: IotaUnits) -> f64 { amount as f64 / 10_u64.pow(u32::from(target.value())) as f64 } pub fn find_optimal_iota_unit_to_display(amount: u64) -> IotaUnits { let length = amount.to_string().len(); if length >= 1 && length <= 3 { IotaUnits::Iota } else if length > 3 && length <= 6 { IotaUnits::KiloIota } else if length > 6 && length <= 9 { IotaUnits::MegaIota } else if length > 9 && length <= 12 { IotaUnits::GigaIota } else if length > 12 && length <= 15 { IotaUnits::TeraIota } else if length > 15 && length <= 18 { IotaUnits::PetaIota } else { panic!("Invalid number") } } #[cfg(test)] mod tests { use super::*; #[test] fn test_convert_unit_i_to_ki() { assert_eq!(1, convert_units(1000, IotaUnits::Iota, IotaUnits::KiloIota)); } #[test] fn test_convert_unit_ki_to_mi() { assert_eq!( 1, convert_units(1000, IotaUnits::KiloIota, IotaUnits::MegaIota) ); } #[test] fn test_convert_unit_mi_to_gi() { assert_eq!( 1, convert_units(1000, IotaUnits::MegaIota, IotaUnits::GigaIota) ); } #[test] fn test_convert_unit_gi_to_ti() { assert_eq!( 1, convert_units(1000, IotaUnits::GigaIota, IotaUnits::TeraIota) ); } #[test] fn test_convert_unit_ti_to_pi() { assert_eq!( 1, convert_units(1000, IotaUnits::TeraIota, IotaUnits::PetaIota) ); } #[test] fn test_find_optimize_unit_to_display() { assert_eq!(find_optimal_iota_unit_to_display(1), IotaUnits::Iota); assert_eq!(find_optimal_iota_unit_to_display(1000), IotaUnits::KiloIota); assert_eq!( find_optimal_iota_unit_to_display(1000000), IotaUnits::MegaIota ); assert_eq!( find_optimal_iota_unit_to_display(1000000000), IotaUnits::GigaIota ); assert_eq!( find_optimal_iota_unit_to_display(1000000000000), IotaUnits::TeraIota ); assert_eq!( find_optimal_iota_unit_to_display(1000000000000000), IotaUnits::PetaIota ); } #[test] fn test_convert_raw_iota_amount_to_display_text() { assert_eq!(convert_raw_iota_amount_to_display_text(1, false), "1 i"); assert_eq!( convert_raw_iota_amount_to_display_text(1000, false), "1.00 Ki" ); assert_eq!( convert_raw_iota_amount_to_display_text(1000000, false), "1.00 Mi" ); assert_eq!( convert_raw_iota_amount_to_display_text(1000000000, false), "1.00 Gi" ); assert_eq!( convert_raw_iota_amount_to_display_text(1000000000000, false), "1.00 Ti" ); assert_eq!( convert_raw_iota_amount_to_display_text(1000000000000000, false), "1.00 Pi" ); assert_eq!( convert_raw_iota_amount_to_display_text(1900000000000002, true), "1.900000000000002 Pi" ); } }
use super::iota_units::IotaUnits; pub fn convert_units(amount: u64, from: IotaUnits, to: IotaUnits) -> u64 { let amount_in_source = amount * 10_u64.pow(u32::from(from.value())); convert_units_helper(amount_in_source, to) } fn convert_units_helper(amount: u64, to: IotaUnits) -> u64 { amount / 10_u64.pow(u32::from(to.value())) } pub fn convert_raw_iota_amount_to_display_text(amount: u64, extended: bool) -> String { let unit = find_optimal_iota_unit_to_display(amount); let amount_in_display_unit = convert_amount_to(amount, unit); create_amount_with_unit_display_text(amount_in_display_unit, unit, extended) } fn create_amount_with_unit_display_text(amount: f64, unit: IotaUnits, extended: bool) -> String { if unit == IotaUnits::Iota { format!("{} {}", amount, unit.unit()) } else if extended { format!("{:.15} {}", amount, unit.unit()) } else { format!("{:.2} {}", amount, unit.unit()) } } pub fn convert_amount_to(amount: u64, target: IotaUnits) -> f64 { amount as f64 / 10_u64.pow(u32::from(target.value())) as f64 } pub fn find_optimal_iota_unit_to_display(amount: u64) -> IotaUnits { let length = amount.to_string().len(); if length >= 1 && length <= 3 { IotaUnits::Iota } else if length > 3 && length <= 6 { IotaUnits::KiloIota } else if length > 6 && length <= 9 { IotaUnits::MegaIota } else if length > 9 && length <= 12 { IotaUnits::GigaIota } else if length > 12 && length <= 15 { IotaUnits::TeraIota } else if length > 15 && length <= 18 { IotaUnits::PetaIota } else { panic!("Invalid number") } } #[cfg(test)] mod tests { use super::*; #[test] fn test_convert_unit_i_to_ki() { assert_eq!(1, convert_units(1000, IotaUnits::Iota, IotaUnits::KiloIota)); } #[test] fn test_convert_unit_ki_to_mi() { assert_eq!( 1, convert_units(1000, IotaUnits::KiloIota, IotaUnits::MegaIota) ); } #[test] fn test_convert_unit_mi_to_gi() { assert_eq!( 1, convert_units(1000, IotaUnits::MegaIota, IotaUnits::GigaIota) ); } #[test] fn test_convert_unit_gi_to_ti() { assert_eq!( 1, convert_units(1000, IotaUnits::GigaIota, IotaUnits::TeraIota) ); } #[test] fn test_convert_unit_ti_to_pi() { assert_eq!( 1, convert_units(1000, IotaUnits::TeraIota, IotaUnits::PetaIota) ); } #[tes
_display_text(1000000000000, false), "1.00 Ti" ); assert_eq!( convert_raw_iota_amount_to_display_text(1000000000000000, false), "1.00 Pi" ); assert_eq!( convert_raw_iota_amount_to_display_text(1900000000000002, true), "1.900000000000002 Pi" ); } }
t] fn test_find_optimize_unit_to_display() { assert_eq!(find_optimal_iota_unit_to_display(1), IotaUnits::Iota); assert_eq!(find_optimal_iota_unit_to_display(1000), IotaUnits::KiloIota); assert_eq!( find_optimal_iota_unit_to_display(1000000), IotaUnits::MegaIota ); assert_eq!( find_optimal_iota_unit_to_display(1000000000), IotaUnits::GigaIota ); assert_eq!( find_optimal_iota_unit_to_display(1000000000000), IotaUnits::TeraIota ); assert_eq!( find_optimal_iota_unit_to_display(1000000000000000), IotaUnits::PetaIota ); } #[test] fn test_convert_raw_iota_amount_to_display_text() { assert_eq!(convert_raw_iota_amount_to_display_text(1, false), "1 i"); assert_eq!( convert_raw_iota_amount_to_display_text(1000, false), "1.00 Ki" ); assert_eq!( convert_raw_iota_amount_to_display_text(1000000, false), "1.00 Mi" ); assert_eq!( convert_raw_iota_amount_to_display_text(1000000000, false), "1.00 Gi" ); assert_eq!( convert_raw_iota_amount_to
random
[ { "content": "/// Converts a tryte-encoded string into a UTF-8 string containing ascii characters\n\npub fn to_string(input_trytes: &str) -> Result<String> {\n\n ensure!(\n\n input_trytes.len() % 2 == 0,\n\n iota_constants::INVALID_TRYTES_INPUT_ERROR\n\n );\n\n let mut tmp = String::new()...
Rust
src/core/string.rs
phR0ze/rs
33573ef35ec6964f4aa15340941636fb1a77f6ed
use crate::errors::*; use std::{ffi::OsStr, path::Path, str}; pub trait StringExt { fn size(&self) -> usize; fn trim_suffix<T: Into<String>>(&self, suffix: T) -> String; } impl StringExt for str { fn size(&self) -> usize { self.chars().count() } fn trim_suffix<T: Into<String>>(&self, suffix: T) -> String { let target = suffix.into(); match self.ends_with(&target) { true => self[..self.len() - target.len()].to_owned(), _ => self.to_owned(), } } } impl StringExt for String { fn size(&self) -> usize { self.chars().count() } fn trim_suffix<T: Into<String>>(&self, suffix: T) -> String { let target = suffix.into(); match self.ends_with(&target) { true => self[..self.len() - target.len()].to_owned(), _ => self.to_owned(), } } } pub trait ToStringExt { fn to_string(&self) -> FuResult<String>; } impl ToStringExt for Path { fn to_string(&self) -> FuResult<String> { let _str = self.to_str().ok_or_else(|| PathError::failed_to_string(self))?; Ok(String::from(_str)) } } impl ToStringExt for OsStr { fn to_string(&self) -> FuResult<String> { Ok(String::from(self.to_str().ok_or(StringError::FailedToString)?)) } } #[cfg(test)] mod tests { use crate::prelude::*; use std::{ ffi::OsStr, path::{Path, PathBuf}, }; #[test] fn test_str_size() { assert_eq!("foo".size(), 3); assert_eq!("ƒoo".len(), 4); assert_eq!("ƒoo".size(), 3); } #[test] fn test_string_size() { assert_eq!("foo".to_string().size(), 3); assert_eq!("ƒoo".to_string().len(), 4); assert_eq!("ƒoo".to_string().size(), 3); } #[test] fn test_str_trim_suffix() { assert_eq!("foo".trim_suffix("oo"), "f".to_string()); assert_eq!("ƒoo".trim_suffix("o"), "ƒo".to_string()); } #[test] fn test_string_trim_suffix() { assert_eq!("foo".to_string().trim_suffix("oo"), "f".to_string()); assert_eq!("ƒoo".to_string().trim_suffix("o"), "ƒo".to_string()); } #[test] fn test_osstr_to_string() { assert_eq!(OsStr::new("foo").to_string().unwrap(), "foo".to_string()); } #[test] fn test_path_to_string() { assert_eq!(Path::new("/foo").to_string().unwrap(), "/foo".to_string()); assert_eq!(PathBuf::from("/foo").to_string().unwrap(), "/foo".to_string()); } }
use crate::errors::*; use std::{ffi::OsStr, path::Path, str}; pub trait StringExt { fn size(&self) -> usize; fn trim_suffix<T: Into<String>>(&self, suffix: T) -> String; } impl StringExt for str { fn size(&self) -> usize { self.chars().count() } fn trim_suffix<T: Into<String>>(&self, suffix: T) -> String { let target = suffix.into();
} } impl StringExt for String { fn size(&self) -> usize { self.chars().count() } fn trim_suffix<T: Into<String>>(&self, suffix: T) -> String { let target = suffix.into(); match self.ends_with(&target) { true => self[..self.len() - target.len()].to_owned(), _ => self.to_owned(), } } } pub trait ToStringExt { fn to_string(&self) -> FuResult<String>; } impl ToStringExt for Path { fn to_string(&self) -> FuResult<String> { let _str = self.to_str().ok_or_else(|| PathError::failed_to_string(self))?; Ok(String::from(_str)) } } impl ToStringExt for OsStr { fn to_string(&self) -> FuResult<String> { Ok(String::from(self.to_str().ok_or(StringError::FailedToString)?)) } } #[cfg(test)] mod tests { use crate::prelude::*; use std::{ ffi::OsStr, path::{Path, PathBuf}, }; #[test] fn test_str_size() { assert_eq!("foo".size(), 3); assert_eq!("ƒoo".len(), 4); assert_eq!("ƒoo".size(), 3); } #[test] fn test_string_size() { assert_eq!("foo".to_string().size(), 3); assert_eq!("ƒoo".to_string().len(), 4); assert_eq!("ƒoo".to_string().size(), 3); } #[test] fn test_str_trim_suffix() { assert_eq!("foo".trim_suffix("oo"), "f".to_string()); assert_eq!("ƒoo".trim_suffix("o"), "ƒo".to_string()); } #[test] fn test_string_trim_suffix() { assert_eq!("foo".to_string().trim_suffix("oo"), "f".to_string()); assert_eq!("ƒoo".to_string().trim_suffix("o"), "ƒo".to_string()); } #[test] fn test_osstr_to_string() { assert_eq!(OsStr::new("foo").to_string().unwrap(), "foo".to_string()); } #[test] fn test_path_to_string() { assert_eq!(Path::new("/foo").to_string().unwrap(), "/foo".to_string()); assert_eq!(PathBuf::from("/foo").to_string().unwrap(), "/foo".to_string()); } }
match self.ends_with(&target) { true => self[..self.len() - target.len()].to_owned(), _ => self.to_owned(), }
if_condition
[ { "content": "/// Set the timezone from the given value\n\npub fn set_timezone(tz: &str) {\n\n env::set_var(\"TZ\", tz);\n\n unsafe {\n\n c::tzset();\n\n }\n\n}\n\n\n\n// libc types specific to `time` not exposed by base libc crate\n\nmod c {\n\n extern \"C\" {\n\n // `gmtime_r` conver...
Rust
cranelift/native/src/lib.rs
yuyang-ok/wasmtime
67c0d55fbb1f86af0595815e3a9c7f39593f3bd0
#![deny( missing_docs, trivial_numeric_casts, unused_extern_crates, )] /* riscv64gc backend have to use is_riscv_feature_detected which is unstable. */ #![feature(stdsimd)] #![warn(unused_import_braces)] #![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))] #![cfg_attr(feature = "cargo-clippy", allow(clippy::new_without_default))] #![cfg_attr( feature = "cargo-clippy", warn( clippy::float_arithmetic, clippy::mut_mut, clippy::nonminimal_bool, clippy::map_unwrap_or, clippy::clippy::print_stdout, clippy::unicode_not_nfc, clippy::use_self ) )] use cranelift_codegen::isa; use target_lexicon::Triple; pub fn builder() -> Result<isa::Builder, &'static str> { builder_with_options(true) } pub fn builder_with_options(infer_native_flags: bool) -> Result<isa::Builder, &'static str> { let mut isa_builder = isa::lookup(Triple::host()).map_err(|err| match err { isa::LookupError::SupportDisabled => "support for architecture disabled at compile time", isa::LookupError::Unsupported => "unsupported architecture", })?; #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] { use cranelift_codegen::settings::Configurable; if !std::is_x86_feature_detected!("sse2") { return Err("x86 support requires SSE2"); } if !infer_native_flags { return Ok(isa_builder); } isa_builder.set("has_sse3", "false").unwrap(); isa_builder.set("has_ssse3", "false").unwrap(); isa_builder.set("has_sse41", "false").unwrap(); isa_builder.set("has_sse42", "false").unwrap(); if std::is_x86_feature_detected!("sse3") { isa_builder.enable("has_sse3").unwrap(); } if std::is_x86_feature_detected!("ssse3") { isa_builder.enable("has_ssse3").unwrap(); } if std::is_x86_feature_detected!("sse4.1") { isa_builder.enable("has_sse41").unwrap(); } if std::is_x86_feature_detected!("sse4.2") { isa_builder.enable("has_sse42").unwrap(); } if std::is_x86_feature_detected!("popcnt") { isa_builder.enable("has_popcnt").unwrap(); } if std::is_x86_feature_detected!("avx") { isa_builder.enable("has_avx").unwrap(); } if std::is_x86_feature_detected!("avx2") { isa_builder.enable("has_avx2").unwrap(); } if std::is_x86_feature_detected!("bmi1") { isa_builder.enable("has_bmi1").unwrap(); } if std::is_x86_feature_detected!("bmi2") { isa_builder.enable("has_bmi2").unwrap(); } if std::is_x86_feature_detected!("avx512bitalg") { isa_builder.enable("has_avx512bitalg").unwrap(); } if std::is_x86_feature_detected!("avx512dq") { isa_builder.enable("has_avx512dq").unwrap(); } if std::is_x86_feature_detected!("avx512f") { isa_builder.enable("has_avx512f").unwrap(); } if std::is_x86_feature_detected!("avx512vl") { isa_builder.enable("has_avx512vl").unwrap(); } if std::is_x86_feature_detected!("avx512vbmi") { isa_builder.enable("has_avx512vbmi").unwrap(); } if std::is_x86_feature_detected!("lzcnt") { isa_builder.enable("has_lzcnt").unwrap(); } } #[cfg(target_arch = "aarch64")] { use cranelift_codegen::settings::Configurable; if !infer_native_flags { return Ok(isa_builder); } if std::arch::is_aarch64_feature_detected!("lse") { isa_builder.enable("has_lse").unwrap(); } } #[cfg(target_arch = "riscv64")] { use cranelift_codegen::settings::Configurable; if !infer_native_flags { return Ok(isa_builder); } if std::arch::is_riscv_feature_detected!("m") { isa_builder.enable("has_extension_m").unwrap(); } if std::arch::is_riscv_feature_detected!("a") { isa_builder.enable("has_extension_a").unwrap(); } if std::arch::is_riscv_feature_detected!("f") { isa_builder.enable("has_extension_f").unwrap(); } if std::arch::is_riscv_feature_detected!("d") { isa_builder.enable("has_extension_d").unwrap(); } if std::arch::is_riscv_feature_detected!("v") { isa_builder.enable("has_extension_v").unwrap(); } if std::arch::is_riscv_feature_detected!("zba") { isa_builder.enable("has_extendion_zba").unwrap(); } if std::arch::is_riscv_feature_detected!("zbb") { isa_builder.enable("has_extendion_zbb").unwrap(); } if std::arch::is_riscv_feature_detected!("zbc") { isa_builder.enable("has_extendion_zbc").unwrap(); } if std::arch::is_riscv_feature_detected!("zbs") { isa_builder.enable("has_extendion_zbs").unwrap(); } if std::arch::is_riscv_feature_detected!("zbkb") { isa_builder.enable("has_extendion_zbkb").unwrap(); } } #[cfg(all(target_arch = "s390x", target_os = "linux"))] { use cranelift_codegen::settings::Configurable; if !infer_native_flags { return Ok(isa_builder); } let v = unsafe { libc::getauxval(libc::AT_HWCAP) }; const HWCAP_S390X_VXRS_EXT2: libc::c_ulong = 32768; if (v & HWCAP_S390X_VXRS_EXT2) != 0 { isa_builder.enable("has_vxrs_ext2").unwrap(); isa_builder.enable("has_mie2").unwrap(); } } drop(&mut isa_builder); drop(infer_native_flags); Ok(isa_builder) } #[cfg(test)] mod tests { use super::builder; use cranelift_codegen::isa::CallConv; use cranelift_codegen::settings; #[test] fn test() { if let Ok(isa_builder) = builder() { let flag_builder = settings::builder(); let isa = isa_builder .finish(settings::Flags::new(flag_builder)) .unwrap(); if cfg!(all(target_os = "macos", target_arch = "aarch64")) { assert_eq!(isa.default_call_conv(), CallConv::AppleAarch64); } else if cfg!(any(unix, target_os = "nebulet")) { assert_eq!(isa.default_call_conv(), CallConv::SystemV); } else if cfg!(windows) { assert_eq!(isa.default_call_conv(), CallConv::WindowsFastcall); } if cfg!(target_pointer_width = "64") { assert_eq!(isa.pointer_bits(), 64); } else if cfg!(target_pointer_width = "32") { assert_eq!(isa.pointer_bits(), 32); } else if cfg!(target_pointer_width = "16") { assert_eq!(isa.pointer_bits(), 16); } } } } pub const VERSION: &str = env!("CARGO_PKG_VERSION");
#![deny( missing_docs, trivial_numeric_casts, unused_extern_crates, )] /* riscv64gc backend have to use is_riscv_feature_detected which is unstable. */ #![feature(stdsimd)] #![warn(unused_import_braces)] #![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))] #![cfg_attr(feature = "cargo-clippy", allow(clippy::new_without_default))] #![cfg_attr( feature = "cargo-clippy", warn( clippy::float_arithmetic, clippy::mut_mut, clippy::nonminimal_bool, clippy::map_unwrap_or, clippy::clippy::print_stdout, clippy::unicode_not_nfc, clippy::use_self ) )] use cranelift_codegen::isa; use target_lexicon::Triple; pub fn builder() -> Result<isa::Builder, &'static str> { builder_with_options(true) } pub fn builder_with_options(infer_native_flags: bool) -> Result<isa::Builder, &'static str> { let mut isa_builder = isa::lookup(Triple::host()).map_err(|err| match err { isa::LookupError::SupportDisabled => "support for architecture disabled at compile time", isa::LookupError::Unsupported => "unsupported architecture", })?; #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] { use cranelift_codegen::settings::Configurable; if !std::is_x86_feature_detected!("sse2") { return Err("x86 support requires SSE2"); } if !infer_native_flags { return Ok(isa_builder); } isa_builder.set("has_sse3", "false").unwrap(); isa_builder.set("has_ssse3", "false").unwrap(); isa_builder.set("has_sse41", "false").unwrap(); isa_builder.set("has_sse42", "false").unwrap(); if std::is_x86_feature_detected!("sse3") { isa_builder.enable("has_sse3").unwrap(); } if std::is_x86_feature_detected!("ssse3") { isa_builder.enable("has_ssse3").unwrap(); } if std::is_x86_feature_detected!("sse4.1") { isa_builder.enable("has_sse41").unwrap(); } if std::is_x86_feature_detected!("sse4.2") { isa_builder.enable("has_sse42").unwrap(); } if std::is_x86_feature_detected!("popcnt") { isa_builder.enable("has_popcnt").unwrap(); } if std::is_x86_feature_detected!("avx") { isa_builder.enable("has_avx").unwrap(); } if std::is_x86_feature_detected!("avx2") { isa_builder.enable("has_avx2").unwrap(); } if std::is_x86_feature_detected!("bmi1") { isa_builder.enable("has_bmi1").unwrap(); } if std::is_x86_feature_detected!("bmi2") { isa_builder.enable("has_bmi2").unwrap(); } if std::is_x86_feature_detected!("avx512bitalg") { isa_builder.enable("has_avx512bitalg").unwrap(); } if std::is_x86_feature_detected!("avx512dq") {
if !infer_native_flags { return Ok(isa_builder); } let v = unsafe { libc::getauxval(libc::AT_HWCAP) }; const HWCAP_S390X_VXRS_EXT2: libc::c_ulong = 32768; if (v & HWCAP_S390X_VXRS_EXT2) != 0 { isa_builder.enable("has_vxrs_ext2").unwrap(); isa_builder.enable("has_mie2").unwrap(); } } drop(&mut isa_builder); drop(infer_native_flags); Ok(isa_builder) } #[cfg(test)] mod tests { use super::builder; use cranelift_codegen::isa::CallConv; use cranelift_codegen::settings; #[test] fn test() { if let Ok(isa_builder) = builder() { let flag_builder = settings::builder(); let isa = isa_builder .finish(settings::Flags::new(flag_builder)) .unwrap(); if cfg!(all(target_os = "macos", target_arch = "aarch64")) { assert_eq!(isa.default_call_conv(), CallConv::AppleAarch64); } else if cfg!(any(unix, target_os = "nebulet")) { assert_eq!(isa.default_call_conv(), CallConv::SystemV); } else if cfg!(windows) { assert_eq!(isa.default_call_conv(), CallConv::WindowsFastcall); } if cfg!(target_pointer_width = "64") { assert_eq!(isa.pointer_bits(), 64); } else if cfg!(target_pointer_width = "32") { assert_eq!(isa.pointer_bits(), 32); } else if cfg!(target_pointer_width = "16") { assert_eq!(isa.pointer_bits(), 16); } } } } pub const VERSION: &str = env!("CARGO_PKG_VERSION");
isa_builder.enable("has_avx512dq").unwrap(); } if std::is_x86_feature_detected!("avx512f") { isa_builder.enable("has_avx512f").unwrap(); } if std::is_x86_feature_detected!("avx512vl") { isa_builder.enable("has_avx512vl").unwrap(); } if std::is_x86_feature_detected!("avx512vbmi") { isa_builder.enable("has_avx512vbmi").unwrap(); } if std::is_x86_feature_detected!("lzcnt") { isa_builder.enable("has_lzcnt").unwrap(); } } #[cfg(target_arch = "aarch64")] { use cranelift_codegen::settings::Configurable; if !infer_native_flags { return Ok(isa_builder); } if std::arch::is_aarch64_feature_detected!("lse") { isa_builder.enable("has_lse").unwrap(); } } #[cfg(target_arch = "riscv64")] { use cranelift_codegen::settings::Configurable; if !infer_native_flags { return Ok(isa_builder); } if std::arch::is_riscv_feature_detected!("m") { isa_builder.enable("has_extension_m").unwrap(); } if std::arch::is_riscv_feature_detected!("a") { isa_builder.enable("has_extension_a").unwrap(); } if std::arch::is_riscv_feature_detected!("f") { isa_builder.enable("has_extension_f").unwrap(); } if std::arch::is_riscv_feature_detected!("d") { isa_builder.enable("has_extension_d").unwrap(); } if std::arch::is_riscv_feature_detected!("v") { isa_builder.enable("has_extension_v").unwrap(); } if std::arch::is_riscv_feature_detected!("zba") { isa_builder.enable("has_extendion_zba").unwrap(); } if std::arch::is_riscv_feature_detected!("zbb") { isa_builder.enable("has_extendion_zbb").unwrap(); } if std::arch::is_riscv_feature_detected!("zbc") { isa_builder.enable("has_extendion_zbc").unwrap(); } if std::arch::is_riscv_feature_detected!("zbs") { isa_builder.enable("has_extendion_zbs").unwrap(); } if std::arch::is_riscv_feature_detected!("zbkb") { isa_builder.enable("has_extendion_zbkb").unwrap(); } } #[cfg(all(target_arch = "s390x", target_os = "linux"))] { use cranelift_codegen::settings::Configurable;
random
[ { "content": "// Configure the test suite environment.\n\n// Test programs use these environment variables to determine what behavior\n\n// is expected: different errnos are expected on windows, mac, and other unixes,\n\n// and other filesystem operations are supported or not.\n\npub fn test_suite_environment()...
Rust
x86_64/src/smbios.rs
Gnurou/crosvm
307168a1eb35dda5b71cdef1d534882c893ef686
use std::mem; use std::result; use std::slice; use std::fs::OpenOptions; use std::io::prelude::*; use std::path::{Path, PathBuf}; use data_model::DataInit; use remain::sorted; use thiserror::Error; use vm_memory::{GuestAddress, GuestMemory}; #[sorted] #[derive(Error, Debug)] pub enum Error { #[error("The SMBIOS table has too little address space to be stored")] AddressOverflow, #[error("Failure while zeroing out the memory for the SMBIOS table")] Clear, #[error("Failure to verify host SMBIOS entry checksum")] InvalidChecksum, #[error("Failure to read host SMBIOS data")] InvalidInput, #[error("Failure while reading SMBIOS data file")] IoFailed, #[error("There was too little guest memory to store the SMBIOS table")] NotEnoughMemory, #[error("Failure to write additional data to memory")] WriteData, #[error("Failure to write SMBIOS entrypoint structure")] WriteSmbiosEp, } pub type Result<T> = result::Result<T, Error>; const SMBIOS_START: u64 = 0xf0000; const SM2_MAGIC_IDENT: &[u8; 4usize] = b"_SM_"; const SM3_MAGIC_IDENT: &[u8; 5usize] = b"_SM3_"; const BIOS_INFORMATION: u8 = 0; const SYSTEM_INFORMATION: u8 = 1; const END_OF_TABLE: u8 = 127; const PCI_SUPPORTED: u64 = 1 << 7; const IS_VIRTUAL_MACHINE: u8 = 1 << 4; fn compute_checksum<T: Copy>(v: &T) -> u8 { let v_slice = unsafe { slice::from_raw_parts(v as *const T as *const u8, mem::size_of::<T>()) }; let mut checksum: u8 = 0; for i in v_slice.iter() { checksum = checksum.wrapping_add(*i); } (!checksum).wrapping_add(1) } #[repr(packed)] #[derive(Default, Copy)] pub struct Smbios23Intermediate { pub signature: [u8; 5usize], pub checksum: u8, pub length: u16, pub address: u32, pub count: u16, pub revision: u8, } unsafe impl data_model::DataInit for Smbios23Intermediate {} impl Clone for Smbios23Intermediate { fn clone(&self) -> Self { *self } } #[repr(packed)] #[derive(Default, Copy)] pub struct Smbios23Entrypoint { pub signature: [u8; 4usize], pub checksum: u8, pub length: u8, pub majorver: u8, pub minorver: u8, pub max_size: u16, pub revision: u8, pub reserved: [u8; 5usize], pub dmi: Smbios23Intermediate, } unsafe impl data_model::DataInit for Smbios23Entrypoint {} impl Clone for Smbios23Entrypoint { fn clone(&self) -> Self { *self } } #[repr(packed)] #[derive(Default, Copy)] pub struct Smbios30Entrypoint { pub signature: [u8; 5usize], pub checksum: u8, pub length: u8, pub majorver: u8, pub minorver: u8, pub docrev: u8, pub revision: u8, pub reserved: u8, pub max_size: u32, pub physptr: u64, } unsafe impl data_model::DataInit for Smbios30Entrypoint {} impl Clone for Smbios30Entrypoint { fn clone(&self) -> Self { *self } } #[repr(packed)] #[derive(Default, Copy)] pub struct SmbiosBiosInfo { pub typ: u8, pub length: u8, pub handle: u16, pub vendor: u8, pub version: u8, pub start_addr: u16, pub release_date: u8, pub rom_size: u8, pub characteristics: u64, pub characteristics_ext1: u8, pub characteristics_ext2: u8, } impl Clone for SmbiosBiosInfo { fn clone(&self) -> Self { *self } } unsafe impl data_model::DataInit for SmbiosBiosInfo {} #[repr(packed)] #[derive(Default, Copy)] pub struct SmbiosSysInfo { pub typ: u8, pub length: u8, pub handle: u16, pub manufacturer: u8, pub product_name: u8, pub version: u8, pub serial_number: u8, pub uuid: [u8; 16usize], pub wake_up_type: u8, pub sku: u8, pub family: u8, } impl Clone for SmbiosSysInfo { fn clone(&self) -> Self { *self } } unsafe impl data_model::DataInit for SmbiosSysInfo {} fn write_and_incr<T: DataInit>( mem: &GuestMemory, val: T, mut curptr: GuestAddress, ) -> Result<GuestAddress> { mem.write_obj_at_addr(val, curptr) .map_err(|_| Error::WriteData)?; curptr = curptr .checked_add(mem::size_of::<T>() as u64) .ok_or(Error::NotEnoughMemory)?; Ok(curptr) } fn write_string(mem: &GuestMemory, val: &str, mut curptr: GuestAddress) -> Result<GuestAddress> { for c in val.as_bytes().iter() { curptr = write_and_incr(mem, *c, curptr)?; } curptr = write_and_incr(mem, 0_u8, curptr)?; Ok(curptr) } fn setup_smbios_from_file(mem: &GuestMemory, path: &Path) -> Result<()> { let mut sme_path = PathBuf::from(path); sme_path.push("smbios_entry_point"); let mut sme = Vec::new(); OpenOptions::new() .read(true) .open(&sme_path) .map_err(|_| Error::IoFailed)? .read_to_end(&mut sme) .map_err(|_| Error::IoFailed)?; let mut dmi_path = PathBuf::from(path); dmi_path.push("DMI"); let mut dmi = Vec::new(); OpenOptions::new() .read(true) .open(&dmi_path) .map_err(|_| Error::IoFailed)? .read_to_end(&mut dmi) .map_err(|_| Error::IoFailed)?; if sme.len() == mem::size_of::<Smbios30Entrypoint>() && sme.starts_with(SM3_MAGIC_IDENT) { let mut smbios_ep = Smbios30Entrypoint::default(); smbios_ep.as_mut_slice().copy_from_slice(&sme); let physptr = GuestAddress(SMBIOS_START) .checked_add(mem::size_of::<Smbios30Entrypoint>() as u64) .ok_or(Error::NotEnoughMemory)?; mem.write_at_addr(&dmi, physptr) .map_err(|_| Error::NotEnoughMemory)?; smbios_ep.physptr = physptr.offset(); smbios_ep.checksum = 0; smbios_ep.checksum = compute_checksum(&smbios_ep); mem.write_obj_at_addr(smbios_ep, GuestAddress(SMBIOS_START)) .map_err(|_| Error::NotEnoughMemory)?; return Ok(()); } if sme.len() == mem::size_of::<Smbios23Entrypoint>() && sme.starts_with(SM2_MAGIC_IDENT) { let mut smbios_ep = Smbios23Entrypoint::default(); smbios_ep.as_mut_slice().copy_from_slice(&sme); let physptr = GuestAddress(SMBIOS_START) .checked_add(mem::size_of::<Smbios23Entrypoint>() as u64) .ok_or(Error::NotEnoughMemory)?; mem.write_at_addr(&dmi, physptr) .map_err(|_| Error::NotEnoughMemory)?; smbios_ep.dmi.address = physptr.offset() as u32; smbios_ep.dmi.checksum = 0; smbios_ep.dmi.checksum = compute_checksum(&smbios_ep.dmi); smbios_ep.checksum = 0; smbios_ep.checksum = compute_checksum(&smbios_ep); mem.write_obj_at_addr(smbios_ep, GuestAddress(SMBIOS_START)) .map_err(|_| Error::WriteSmbiosEp)?; return Ok(()); } Err(Error::InvalidInput) } pub fn setup_smbios(mem: &GuestMemory, dmi_path: Option<PathBuf>) -> Result<()> { if let Some(dmi_path) = dmi_path { return setup_smbios_from_file(mem, &dmi_path); } let physptr = GuestAddress(SMBIOS_START) .checked_add(mem::size_of::<Smbios30Entrypoint>() as u64) .ok_or(Error::NotEnoughMemory)?; let mut curptr = physptr; let mut handle = 0; { handle += 1; let smbios_biosinfo = SmbiosBiosInfo { typ: BIOS_INFORMATION, length: mem::size_of::<SmbiosBiosInfo>() as u8, handle, vendor: 1, version: 2, characteristics: PCI_SUPPORTED, characteristics_ext2: IS_VIRTUAL_MACHINE, ..Default::default() }; curptr = write_and_incr(mem, smbios_biosinfo, curptr)?; curptr = write_string(mem, "crosvm", curptr)?; curptr = write_string(mem, "0", curptr)?; curptr = write_and_incr(mem, 0_u8, curptr)?; } { handle += 1; let smbios_sysinfo = SmbiosSysInfo { typ: SYSTEM_INFORMATION, length: mem::size_of::<SmbiosSysInfo>() as u8, handle, manufacturer: 1, product_name: 2, ..Default::default() }; curptr = write_and_incr(mem, smbios_sysinfo, curptr)?; curptr = write_string(mem, "ChromiumOS", curptr)?; curptr = write_string(mem, "crosvm", curptr)?; curptr = write_and_incr(mem, 0u8, curptr)?; } { handle += 1; let smbios_sysinfo = SmbiosSysInfo { typ: END_OF_TABLE, length: mem::size_of::<SmbiosSysInfo>() as u8, handle, ..Default::default() }; curptr = write_and_incr(mem, smbios_sysinfo, curptr)?; curptr = write_and_incr(mem, 0_u8, curptr)?; } { let mut smbios_ep = Smbios30Entrypoint::default(); smbios_ep.signature = *SM3_MAGIC_IDENT; smbios_ep.length = mem::size_of::<Smbios30Entrypoint>() as u8; smbios_ep.majorver = 0x03; smbios_ep.minorver = 0x02; smbios_ep.docrev = 0x00; smbios_ep.revision = 0x01; smbios_ep.max_size = curptr.offset_from(physptr) as u32; smbios_ep.physptr = physptr.offset(); smbios_ep.checksum = compute_checksum(&smbios_ep); mem.write_obj_at_addr(smbios_ep, GuestAddress(SMBIOS_START)) .map_err(|_| Error::WriteSmbiosEp)?; } Ok(()) } #[cfg(test)] mod tests { use super::*; #[test] fn struct_size() { assert_eq!( mem::size_of::<Smbios23Entrypoint>(), 0x1fusize, concat!("Size of: ", stringify!(Smbios23Entrypoint)) ); assert_eq!( mem::size_of::<Smbios30Entrypoint>(), 0x18usize, concat!("Size of: ", stringify!(Smbios30Entrypoint)) ); assert_eq!( mem::size_of::<SmbiosBiosInfo>(), 0x14usize, concat!("Size of: ", stringify!(SmbiosBiosInfo)) ); assert_eq!( mem::size_of::<SmbiosSysInfo>(), 0x1busize, concat!("Size of: ", stringify!(SmbiosSysInfo)) ); } #[test] fn entrypoint_checksum() { let mem = GuestMemory::new(&[(GuestAddress(SMBIOS_START), 4096)]).unwrap(); setup_smbios(&mem, None).unwrap(); let smbios_ep: Smbios30Entrypoint = mem.read_obj_from_addr(GuestAddress(SMBIOS_START)).unwrap(); assert_eq!(compute_checksum(&smbios_ep), 0); } }
use std::mem; use std::result; use std::slice; use std::fs::OpenOptions; use std::io::prelude::*; use std::path::{Path, PathBuf}; use data_model::DataInit; use remain::sorted; use thiserror::Error; use vm_memory::{GuestAddress, GuestMemory}; #[sorted] #[derive(Error, Debug)] pub enum Error { #[error("The SMBIOS table has too little address space to be stored")] AddressOverflow, #[error("Failure while zeroing out the memory for the SMBIOS table")] Clear, #[error("Failure to verify host SMBIOS entry checksum")] InvalidChecksum, #[error("Failure to read host SMBIOS data")] InvalidInput, #[error("Failure while reading SMBIOS data file")] IoFailed, #[error("There was too little guest memory to store the SMBIOS table")] NotEnoughMemory, #[error("Failure to write additional data to memory")] WriteData, #[error("Failure to write SMBIOS entrypoint structure")] WriteSmbiosEp, } pub type Result<T> = result::Result<T, Error>; const SMBIOS_START: u64 = 0xf0000; const SM2_MAGIC_IDENT: &[u8; 4usize] = b"_SM_"; const SM3_MAGIC_IDENT: &[u8; 5usize] = b"_SM3_"; const BIOS_INFORMATION: u8 = 0; const SYSTEM_INFORMATION: u8 = 1; const END_OF_TABLE: u8 = 127; const PCI_SUPPORTED: u64 = 1 << 7; const IS_VIRTUAL_MACHINE: u8 = 1 << 4; fn compute_checksum<T: Copy>(v: &T) -> u8 { let v_slice = unsafe { slice::from_raw_parts(v as *const T as *const u8, mem::size_of::<T>()) }; let mut checksum: u8 = 0; for i in v_slice.iter() { checksum = checksum.wrapping_add(*i); } (!checksum).wrapping_add(1) } #[repr(packed)] #[derive(Default, Copy)] pub struct Smbios23Intermediate { pub signature: [u8; 5usize], pub checksum: u8, pub length: u16, pub address: u32, pub count: u16, pub revision: u8, } unsafe impl data_model::DataInit for Smbios23Intermediate {} impl Clone for Smbios23Intermediate { fn clone(&self) -> Self { *self } } #[repr(packed)] #[derive(Default, Copy)] pub struct Smbios23Entrypoint { pub signature: [u8; 4usize], pub checksum: u8, pub length: u8, pub majorver: u8, pub minorver: u8, pub max_size: u16, pub revision: u8, pub reserved: [u8; 5usize], pub dmi: Smbios23Intermediate, } unsafe impl data_model::DataInit for Smbios23Entrypoint {} impl Clone for Smbios23Entrypoint { fn clone(&self) -> Self { *self } } #[repr(packed)] #[derive(Default, Copy)] pub struct Smbios30Entrypoint { pub signature: [u8; 5usize], pub checksum: u8, pub length: u8, pub majorver: u8, pub minorver: u8, pub docrev: u8, pub revision: u8, pub reserved: u8, pub max_size: u32, pub physptr: u64, } unsafe impl data_model::DataInit for Smbios30Entrypoint {} impl Clone for Smbios30Entrypoint { fn clone(&self) -> Self { *self } } #[repr(packed)] #[derive(Default, Copy)] pub struct SmbiosBiosInfo { pub typ: u8, pub length: u8, pub handle: u16, pub vendor: u8, pub version: u8, pub start_addr: u16, pub release_date: u8, pub rom_size: u8, pub characteristics: u64, pub characteristics_ext1: u8, pub characteristics_ext2: u8, } impl Clone for SmbiosBiosInfo { fn clone(&self) -> Self { *self } } unsafe impl data_model::DataInit for SmbiosBiosInfo {} #[repr(packed)] #[derive(Default, Copy)] pub struct SmbiosSysInfo { pub typ: u8, pub length: u8, pub handle: u16, pub manufacturer: u8, pub product_name: u8, pub version: u8, pub serial_number: u8, pub uuid: [u8; 16usize], pub wake_up_type: u8, pub sku: u8, pub family: u8, } impl Clone for SmbiosSysInfo { fn clone(&self) -> Self { *self } } unsafe impl data_model::DataInit for SmbiosSysInfo {} fn write_and_incr<T: DataInit>( mem: &GuestMemory, val: T, mut curptr: GuestAddress, ) -> Result<GuestAddress> { mem.write_obj_at_addr(val, curpt
fn write_string(mem: &GuestMemory, val: &str, mut curptr: GuestAddress) -> Result<GuestAddress> { for c in val.as_bytes().iter() { curptr = write_and_incr(mem, *c, curptr)?; } curptr = write_and_incr(mem, 0_u8, curptr)?; Ok(curptr) } fn setup_smbios_from_file(mem: &GuestMemory, path: &Path) -> Result<()> { let mut sme_path = PathBuf::from(path); sme_path.push("smbios_entry_point"); let mut sme = Vec::new(); OpenOptions::new() .read(true) .open(&sme_path) .map_err(|_| Error::IoFailed)? .read_to_end(&mut sme) .map_err(|_| Error::IoFailed)?; let mut dmi_path = PathBuf::from(path); dmi_path.push("DMI"); let mut dmi = Vec::new(); OpenOptions::new() .read(true) .open(&dmi_path) .map_err(|_| Error::IoFailed)? .read_to_end(&mut dmi) .map_err(|_| Error::IoFailed)?; if sme.len() == mem::size_of::<Smbios30Entrypoint>() && sme.starts_with(SM3_MAGIC_IDENT) { let mut smbios_ep = Smbios30Entrypoint::default(); smbios_ep.as_mut_slice().copy_from_slice(&sme); let physptr = GuestAddress(SMBIOS_START) .checked_add(mem::size_of::<Smbios30Entrypoint>() as u64) .ok_or(Error::NotEnoughMemory)?; mem.write_at_addr(&dmi, physptr) .map_err(|_| Error::NotEnoughMemory)?; smbios_ep.physptr = physptr.offset(); smbios_ep.checksum = 0; smbios_ep.checksum = compute_checksum(&smbios_ep); mem.write_obj_at_addr(smbios_ep, GuestAddress(SMBIOS_START)) .map_err(|_| Error::NotEnoughMemory)?; return Ok(()); } if sme.len() == mem::size_of::<Smbios23Entrypoint>() && sme.starts_with(SM2_MAGIC_IDENT) { let mut smbios_ep = Smbios23Entrypoint::default(); smbios_ep.as_mut_slice().copy_from_slice(&sme); let physptr = GuestAddress(SMBIOS_START) .checked_add(mem::size_of::<Smbios23Entrypoint>() as u64) .ok_or(Error::NotEnoughMemory)?; mem.write_at_addr(&dmi, physptr) .map_err(|_| Error::NotEnoughMemory)?; smbios_ep.dmi.address = physptr.offset() as u32; smbios_ep.dmi.checksum = 0; smbios_ep.dmi.checksum = compute_checksum(&smbios_ep.dmi); smbios_ep.checksum = 0; smbios_ep.checksum = compute_checksum(&smbios_ep); mem.write_obj_at_addr(smbios_ep, GuestAddress(SMBIOS_START)) .map_err(|_| Error::WriteSmbiosEp)?; return Ok(()); } Err(Error::InvalidInput) } pub fn setup_smbios(mem: &GuestMemory, dmi_path: Option<PathBuf>) -> Result<()> { if let Some(dmi_path) = dmi_path { return setup_smbios_from_file(mem, &dmi_path); } let physptr = GuestAddress(SMBIOS_START) .checked_add(mem::size_of::<Smbios30Entrypoint>() as u64) .ok_or(Error::NotEnoughMemory)?; let mut curptr = physptr; let mut handle = 0; { handle += 1; let smbios_biosinfo = SmbiosBiosInfo { typ: BIOS_INFORMATION, length: mem::size_of::<SmbiosBiosInfo>() as u8, handle, vendor: 1, version: 2, characteristics: PCI_SUPPORTED, characteristics_ext2: IS_VIRTUAL_MACHINE, ..Default::default() }; curptr = write_and_incr(mem, smbios_biosinfo, curptr)?; curptr = write_string(mem, "crosvm", curptr)?; curptr = write_string(mem, "0", curptr)?; curptr = write_and_incr(mem, 0_u8, curptr)?; } { handle += 1; let smbios_sysinfo = SmbiosSysInfo { typ: SYSTEM_INFORMATION, length: mem::size_of::<SmbiosSysInfo>() as u8, handle, manufacturer: 1, product_name: 2, ..Default::default() }; curptr = write_and_incr(mem, smbios_sysinfo, curptr)?; curptr = write_string(mem, "ChromiumOS", curptr)?; curptr = write_string(mem, "crosvm", curptr)?; curptr = write_and_incr(mem, 0u8, curptr)?; } { handle += 1; let smbios_sysinfo = SmbiosSysInfo { typ: END_OF_TABLE, length: mem::size_of::<SmbiosSysInfo>() as u8, handle, ..Default::default() }; curptr = write_and_incr(mem, smbios_sysinfo, curptr)?; curptr = write_and_incr(mem, 0_u8, curptr)?; } { let mut smbios_ep = Smbios30Entrypoint::default(); smbios_ep.signature = *SM3_MAGIC_IDENT; smbios_ep.length = mem::size_of::<Smbios30Entrypoint>() as u8; smbios_ep.majorver = 0x03; smbios_ep.minorver = 0x02; smbios_ep.docrev = 0x00; smbios_ep.revision = 0x01; smbios_ep.max_size = curptr.offset_from(physptr) as u32; smbios_ep.physptr = physptr.offset(); smbios_ep.checksum = compute_checksum(&smbios_ep); mem.write_obj_at_addr(smbios_ep, GuestAddress(SMBIOS_START)) .map_err(|_| Error::WriteSmbiosEp)?; } Ok(()) } #[cfg(test)] mod tests { use super::*; #[test] fn struct_size() { assert_eq!( mem::size_of::<Smbios23Entrypoint>(), 0x1fusize, concat!("Size of: ", stringify!(Smbios23Entrypoint)) ); assert_eq!( mem::size_of::<Smbios30Entrypoint>(), 0x18usize, concat!("Size of: ", stringify!(Smbios30Entrypoint)) ); assert_eq!( mem::size_of::<SmbiosBiosInfo>(), 0x14usize, concat!("Size of: ", stringify!(SmbiosBiosInfo)) ); assert_eq!( mem::size_of::<SmbiosSysInfo>(), 0x1busize, concat!("Size of: ", stringify!(SmbiosSysInfo)) ); } #[test] fn entrypoint_checksum() { let mem = GuestMemory::new(&[(GuestAddress(SMBIOS_START), 4096)]).unwrap(); setup_smbios(&mem, None).unwrap(); let smbios_ep: Smbios30Entrypoint = mem.read_obj_from_addr(GuestAddress(SMBIOS_START)).unwrap(); assert_eq!(compute_checksum(&smbios_ep), 0); } }
r) .map_err(|_| Error::WriteData)?; curptr = curptr .checked_add(mem::size_of::<T>() as u64) .ok_or(Error::NotEnoughMemory)?; Ok(curptr) }
function_block-function_prefixed
[ { "content": "/// Write a protective MBR for a disk of the given total size (in bytes).\n\n///\n\n/// This should be written at the start of the disk, before the GPT header. It is one `SECTOR_SIZE`\n\n/// long.\n\npub fn write_protective_mbr(file: &mut impl Write, disk_size: u64) -> Result<(), Error> {\n\n /...
Rust
sulis_core/src/util.rs
ThyWoof/sulis
e89eda94a1a72228224e1926d307aa4c9228bdcb
mod point; pub use self::point::{Offset, Point, Rect, Scale}; pub mod size; pub use self::size::Size; use std::cmp::Ordering; use std::f32; use std::fmt; use std::fs; use std::io::{Error, ErrorKind}; use std::ops::*; use std::panic; use std::path::PathBuf; use std::time::Duration; use backtrace::Backtrace; use log::LevelFilter; use flexi_logger::{opt_format, Duplicate, FileSpec, Logger, LogSpecBuilder, LoggerHandle}; use rand::{self, distributions::uniform::{SampleUniform}, seq::SliceRandom, Rng}; use rand_pcg::Pcg64Mcg; use crate::config::{self, Config}; use crate::resource::write_to_file; const MAX_ULPS: i32 = 100; const MAX_DIFF: f32 = 2.0 * std::f32::EPSILON; pub fn approx_eq_slice(a: &[f32], b: &[f32]) -> bool { if a.len() != b.len() { return false ; } for (a, b) in a.iter().zip(b.iter()) { if !approx_eq(*a, *b) { return false; } } true } pub fn approx_eq(a: f32, b: f32) -> bool { if (a - b).abs() <= MAX_DIFF { return true; } if a.signum() != b.signum() { return false; } let a_int = a.to_bits() as i32; let b_int = b.to_bits() as i32; i32::abs(a_int - b_int) <= MAX_ULPS } #[derive(Clone)] pub struct ReproducibleRandom { seed: u128, gen: Pcg64Mcg, } impl ReproducibleRandom { pub fn new(seed: Option<u128>) -> ReproducibleRandom { let seed = match seed { Some(s) => s, None => rand::thread_rng().gen::<u64>() as u128, }; ReproducibleRandom { seed, gen: Pcg64Mcg::new(seed), } } pub fn gen<T: SampleUniform + PartialOrd>(&mut self, min: T, max: T) -> T { self.gen.gen_range(min..max) } pub fn shuffle<T>(&mut self, values: &mut [T]) { values.shuffle(&mut self.gen); } pub fn seed(&self) -> u128 { self.seed } } impl std::fmt::Debug for ReproducibleRandom { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let state = serde_json::to_string(&self.gen).map_err(|_| std::fmt::Error)?; write!(f, "Random: {}", state) } } pub fn shuffle<T>(values: &mut [T]) { values.shuffle(&mut rand::thread_rng()); } pub fn gen_rand<T: SampleUniform + PartialOrd>(min: T, max: T) -> T { rand::thread_rng().gen_range(min..max) } fn active_resources_file_path() -> PathBuf { let mut path = config::USER_DIR.clone(); path.push("active_resources.yml"); path } #[derive(Deserialize, Serialize, Debug, Clone)] pub struct ActiveResources { pub campaign: Option<String>, pub mods: Vec<String>, } impl ActiveResources { pub fn read() -> ActiveResources { let path = active_resources_file_path(); let data = match fs::read_to_string(path) { Ok(data) => data, Err(_) => { info!("active_resources file not found"); return ActiveResources::default(); } }; let active_resources: ActiveResources = match serde_yaml::from_str(&data) { Ok(val) => val, Err(e) => { warn!("Error reading active resources file"); warn!("{}", e); return ActiveResources::default(); } }; active_resources } pub fn write(&self) { let file = active_resources_file_path(); match write_to_file(file, self) { Ok(()) => (), Err(e) => { warn!("Error writing active resources file"); warn!("{}", e); } } } pub fn directories(&self) -> Vec<String> { let mut dirs = vec![Config::resources_config().directory]; if let Some(ref dir) = self.campaign { dirs.push(dir.to_string()); } for mod_dir in self.mods.iter() { dirs.push(mod_dir.to_string()); } dirs } } impl Default for ActiveResources { fn default() -> Self { ActiveResources { campaign: None, mods: Vec::new(), } } } #[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq)] #[serde(deny_unknown_fields, untagged)] pub enum ExtInt { Int(u32), Infinity, } impl Ord for ExtInt { fn cmp(&self, other: &ExtInt) -> Ordering { match self { ExtInt::Int(val) => match other { ExtInt::Int(other) => val.cmp(other), ExtInt::Infinity => Ordering::Less, }, ExtInt::Infinity => match other { ExtInt::Int(_) => Ordering::Greater, ExtInt::Infinity => Ordering::Equal, }, } } } impl PartialOrd for ExtInt { fn partial_cmp(&self, other: &ExtInt) -> Option<Ordering> { Some(self.cmp(other)) } } impl ExtInt { pub fn max(a: ExtInt, b: ExtInt) -> ExtInt { if a > b { a } else { b } } pub fn min(a: ExtInt, b: ExtInt) -> ExtInt { if a > b { b } else { a } } pub fn divide(self, other: ExtInt) -> f32 { match self { ExtInt::Int(amount) => match other { ExtInt::Int(other_amount) => amount as f32 / other_amount as f32, ExtInt::Infinity => 0.0, }, ExtInt::Infinity => match other { ExtInt::Int(_) => 0.0, ExtInt::Infinity => 1.0, }, } } pub fn is_zero(self) -> bool { match self { ExtInt::Int(amount) => amount == 0, ExtInt::Infinity => false, } } pub fn is_infinite(self) -> bool { match self { ExtInt::Int(_) => false, ExtInt::Infinity => true, } } pub fn to_f32(self) -> f32 { match self { ExtInt::Int(amount) => amount as f32, ExtInt::Infinity => 1e12, } } pub fn less_than(self, other: u32) -> bool { match self { ExtInt::Int(amount) => amount < other, ExtInt::Infinity => false, } } pub fn greater_than(self, other: u32) -> bool { match self { ExtInt::Int(amount) => amount > other, ExtInt::Infinity => true, } } } impl fmt::Display for ExtInt { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { ExtInt::Int(amount) => write!(f, "{}", amount), ExtInt::Infinity => write!(f, "infinity"), } } } impl Mul<u32> for ExtInt { type Output = ExtInt; fn mul(self, other: u32) -> ExtInt { match self { ExtInt::Int(amount) => ExtInt::Int(amount * other), ExtInt::Infinity => ExtInt::Infinity, } } } impl Add<ExtInt> for ExtInt { type Output = ExtInt; fn add(self, other: ExtInt) -> ExtInt { match self { ExtInt::Int(amount) => match other { ExtInt::Int(other_amount) => ExtInt::Int(amount + other_amount), ExtInt::Infinity => ExtInt::Infinity, }, ExtInt::Infinity => ExtInt::Infinity, } } } impl Add<u32> for ExtInt { type Output = ExtInt; fn add(self, other: u32) -> ExtInt { match self { ExtInt::Int(amount) => ExtInt::Int(amount + other), ExtInt::Infinity => ExtInt::Infinity, } } } impl Sub<u32> for ExtInt { type Output = ExtInt; fn sub(self, other: u32) -> ExtInt { match self { ExtInt::Int(amount) => { if other > amount { ExtInt::Int(0) } else { ExtInt::Int(amount - other) } } ExtInt::Infinity => ExtInt::Infinity, } } } pub fn invalid_data_error<T>(str: &str) -> Result<T, Error> { Err(Error::new(ErrorKind::InvalidData, str)) } pub fn unable_to_create_error<T>(kind: &str, id: &str) -> Result<T, Error> { Err(Error::new( ErrorKind::InvalidData, format!("Unable to create {} '{}'", kind, id), )) } pub fn get_elapsed_millis(elapsed: Duration) -> u32 { (elapsed.as_secs() as u32) * 1_000 + elapsed.subsec_millis() } pub fn format_elapsed_secs(elapsed: Duration) -> String { let secs = elapsed.as_secs() as f64 + elapsed.subsec_nanos() as f64 * 1e-9; format!("{:.6}", secs) } pub fn error_and_exit(error: &str) { error!("{}", error); error!("Exiting..."); ::std::process::exit(1) } #[must_use] pub fn setup_logger() -> LoggerHandle { let mut path = config::USER_DIR.clone(); path.push("log"); let log_dir = path; let log_config = Config::logging_config(); let mut log_builder = LogSpecBuilder::new(); log_builder.default(log_config.log_level); let dup = match log_config.stderr_log_level { LevelFilter::Error => Duplicate::Error, LevelFilter::Warn => Duplicate::Warn, LevelFilter::Info => Duplicate::Info, LevelFilter::Debug => Duplicate::Debug, LevelFilter::Trace => Duplicate::Trace, LevelFilter::Off => Duplicate::None, }; let logger = Logger::with(log_builder.finalize()) .log_to_file( FileSpec::default() .directory(log_dir) .use_timestamp(log_config.use_timestamps) ) .print_message() .duplicate_to_stderr(dup) .o_append(log_config.append) .format(opt_format); let handle = logger.start().unwrap_or_else(|e| { eprintln!("{}", e); eprintln!("There was a fatal error initializing logging to 'log/'"); eprintln!("Exiting..."); ::std::process::exit(1); }); panic::set_hook(Box::new(|p| { if let Some(s) = p.payload().downcast_ref::<String>() { error!("Thread main panic with: '{}'", s); } else if let Some(s) = p.payload().downcast_ref::<&str>() { error!("Thread main panic with: '{}'", s); } else { error!("Thread main panic"); } warn!("at {:?}", p.location()); let bt = Backtrace::new(); warn!("{:?}", bt); })); create_user_dirs(); handle } fn create_user_dirs() { let res = Config::resources_config(); let mut campaign_dir = config::USER_DIR.clone(); campaign_dir.push(&res.campaigns_directory); config::create_dir_and_warn(&campaign_dir); let mut mods_dir = config::USER_DIR.clone(); mods_dir.push(&res.mods_directory); config::create_dir_and_warn(&mods_dir); }
mod point; pub use self::point::{Offset, Point, Rect, Scale}; pub mod size; pub use self::size::Size; use std::cmp::Ordering; use std::f32; use std::fmt; use std::fs; use std::io::{Error, ErrorKind}; use std::ops::*; use std::panic; use std::path::PathBuf; use std::time::Duration; use backtrace::Backtrace; use log::LevelFilter; use flexi_logger::{opt_format, Duplicate, FileSpec, Logger, LogSpecBuilder, LoggerHandle}; use rand::{self, distributions::uniform::{SampleUniform}, seq::SliceRandom, Rng}; use rand_pcg::Pcg64Mcg; use crate::config::{self, Config}; use crate::resource::write_to_file; const MAX_ULPS: i32 = 100; const MAX_DIFF: f32 = 2.0 * std::f32::EPSILON; pub fn approx_eq_slice(a: &[f32], b: &[f32]) -> bool { if a.len() != b.len() { return false ; } for (a, b) in a.iter().zip(b.iter()) { if !approx_eq(*a, *b) { return false; } } true } pub fn approx_eq(a: f32, b: f32) -> bool { if (a - b).abs() <= MAX_DIFF { return true; } if a.signum() != b.signum() { return false; } let a_int = a.to_bits() as i32; let b_int = b.to_bits() as i32; i32::abs(a_int - b_int) <= MAX_ULPS } #[derive(Clone)] pub struct ReproducibleRandom { seed: u128, gen: Pcg64Mcg, } impl ReproducibleRandom { pub fn new(seed: Option<u128>) -> ReproducibleRandom { let seed = match seed { Some(s) => s, None => rand::thread_rng().gen::<u64>() as u128, }; ReproducibleRandom { seed, gen: Pcg64Mcg::new(seed), } } pub fn gen<T: SampleUniform + PartialOrd>(&mut self, min: T, max: T) -> T { self.gen.gen_range(min..max) } pub fn shuffle<T>(&mut self, values: &mut [T]) { values.shuffle(&mut self.gen); } pub fn seed(&self) -> u128 { self.seed } } impl std::fmt::Debug for ReproducibleRandom { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let state = serde_json::to_string(&self.gen).map_err(|_| std::fmt::Error)?; write!(f, "Random: {}", state) } } pub fn shuffle<T>(values: &mut [T]) { values.shuffle(&mut rand::thread_rng()); } pub fn gen_rand<T: SampleUniform + PartialOrd>(min: T, max: T) -> T { rand::thread_rng().gen_range(min..max) } fn active_resources_file_path() -> PathBuf { let mut path = config::USER_DIR.clone(); path.push("active_resources.yml"); path } #[derive(Deserialize, Serialize, Debug, Clone)] pub struct ActiveResources { pub campaign: Option<String>, pub mods: Vec<String>, } impl ActiveResources { pub fn read() -> ActiveResources { let path = active_resources_file_path(); let data = match fs::read_to_string(path) { Ok(data) => data, Err(_) => { info!("active_resources file not found"); return ActiveResources::default(); } }; let active_resources: ActiveResources = match serde_yaml::from_str(&data) { Ok(val) => val, Err(e) => { warn!("Error reading active resources file"); warn!("{}", e); return ActiveResources::default(); } }; active_resources } pub fn write(&self) { let file = active_resources_file_path(); match write_to_file(file, self) { Ok(()) => (), Err(e) => { warn!("Error writing active resources file"); warn!("{}", e); } } } pub fn directories(&self) -> Vec<String> { let mut dirs = vec![Config::resources_config().directory]; if let Some(ref dir) = self.campaign { dirs.push(dir.to_string()); } for mod_dir in self.mods.iter() { dirs.push(mod_dir.to_string()); } dirs } } impl Default for ActiveResources { fn default() -> Self { ActiveResources { campaign: None, mods: Vec::new(), } } } #[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq)] #[serde(deny_unknown_fields, untagged)] pub enum ExtInt { Int(u32), Infinity, } impl Ord for ExtInt { fn cmp(&self, other: &ExtInt) -> Ordering { match self { ExtInt::Int(val) => match other { ExtInt::Int(other) => val.cmp(other), ExtInt::Infinity => Ordering::Less, }, ExtInt::Infinity => match other { ExtInt::Int(_) => Ordering::Greater, ExtInt::Infinity => Ordering::Equal, }, } } } impl PartialOrd for ExtInt { fn partial_cmp(&self, other: &ExtInt) -> Option<Ordering> { Some(self.cmp(other)) } } impl ExtInt { pub fn max(a: ExtInt, b: ExtInt) -> ExtInt { if a > b { a } else { b } } pub fn min(a: ExtInt, b: ExtInt) -> ExtInt { if a > b { b } else { a } } pub fn divide(self, other: ExtInt) -> f32 { match self { ExtInt::Int(amoun
pub fn is_zero(self) -> bool { match self { ExtInt::Int(amount) => amount == 0, ExtInt::Infinity => false, } } pub fn is_infinite(self) -> bool { match self { ExtInt::Int(_) => false, ExtInt::Infinity => true, } } pub fn to_f32(self) -> f32 { match self { ExtInt::Int(amount) => amount as f32, ExtInt::Infinity => 1e12, } } pub fn less_than(self, other: u32) -> bool { match self { ExtInt::Int(amount) => amount < other, ExtInt::Infinity => false, } } pub fn greater_than(self, other: u32) -> bool { match self { ExtInt::Int(amount) => amount > other, ExtInt::Infinity => true, } } } impl fmt::Display for ExtInt { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { ExtInt::Int(amount) => write!(f, "{}", amount), ExtInt::Infinity => write!(f, "infinity"), } } } impl Mul<u32> for ExtInt { type Output = ExtInt; fn mul(self, other: u32) -> ExtInt { match self { ExtInt::Int(amount) => ExtInt::Int(amount * other), ExtInt::Infinity => ExtInt::Infinity, } } } impl Add<ExtInt> for ExtInt { type Output = ExtInt; fn add(self, other: ExtInt) -> ExtInt { match self { ExtInt::Int(amount) => match other { ExtInt::Int(other_amount) => ExtInt::Int(amount + other_amount), ExtInt::Infinity => ExtInt::Infinity, }, ExtInt::Infinity => ExtInt::Infinity, } } } impl Add<u32> for ExtInt { type Output = ExtInt; fn add(self, other: u32) -> ExtInt { match self { ExtInt::Int(amount) => ExtInt::Int(amount + other), ExtInt::Infinity => ExtInt::Infinity, } } } impl Sub<u32> for ExtInt { type Output = ExtInt; fn sub(self, other: u32) -> ExtInt { match self { ExtInt::Int(amount) => { if other > amount { ExtInt::Int(0) } else { ExtInt::Int(amount - other) } } ExtInt::Infinity => ExtInt::Infinity, } } } pub fn invalid_data_error<T>(str: &str) -> Result<T, Error> { Err(Error::new(ErrorKind::InvalidData, str)) } pub fn unable_to_create_error<T>(kind: &str, id: &str) -> Result<T, Error> { Err(Error::new( ErrorKind::InvalidData, format!("Unable to create {} '{}'", kind, id), )) } pub fn get_elapsed_millis(elapsed: Duration) -> u32 { (elapsed.as_secs() as u32) * 1_000 + elapsed.subsec_millis() } pub fn format_elapsed_secs(elapsed: Duration) -> String { let secs = elapsed.as_secs() as f64 + elapsed.subsec_nanos() as f64 * 1e-9; format!("{:.6}", secs) } pub fn error_and_exit(error: &str) { error!("{}", error); error!("Exiting..."); ::std::process::exit(1) } #[must_use] pub fn setup_logger() -> LoggerHandle { let mut path = config::USER_DIR.clone(); path.push("log"); let log_dir = path; let log_config = Config::logging_config(); let mut log_builder = LogSpecBuilder::new(); log_builder.default(log_config.log_level); let dup = match log_config.stderr_log_level { LevelFilter::Error => Duplicate::Error, LevelFilter::Warn => Duplicate::Warn, LevelFilter::Info => Duplicate::Info, LevelFilter::Debug => Duplicate::Debug, LevelFilter::Trace => Duplicate::Trace, LevelFilter::Off => Duplicate::None, }; let logger = Logger::with(log_builder.finalize()) .log_to_file( FileSpec::default() .directory(log_dir) .use_timestamp(log_config.use_timestamps) ) .print_message() .duplicate_to_stderr(dup) .o_append(log_config.append) .format(opt_format); let handle = logger.start().unwrap_or_else(|e| { eprintln!("{}", e); eprintln!("There was a fatal error initializing logging to 'log/'"); eprintln!("Exiting..."); ::std::process::exit(1); }); panic::set_hook(Box::new(|p| { if let Some(s) = p.payload().downcast_ref::<String>() { error!("Thread main panic with: '{}'", s); } else if let Some(s) = p.payload().downcast_ref::<&str>() { error!("Thread main panic with: '{}'", s); } else { error!("Thread main panic"); } warn!("at {:?}", p.location()); let bt = Backtrace::new(); warn!("{:?}", bt); })); create_user_dirs(); handle } fn create_user_dirs() { let res = Config::resources_config(); let mut campaign_dir = config::USER_DIR.clone(); campaign_dir.push(&res.campaigns_directory); config::create_dir_and_warn(&campaign_dir); let mut mods_dir = config::USER_DIR.clone(); mods_dir.push(&res.mods_directory); config::create_dir_and_warn(&mods_dir); }
t) => match other { ExtInt::Int(other_amount) => amount as f32 / other_amount as f32, ExtInt::Infinity => 0.0, }, ExtInt::Infinity => match other { ExtInt::Int(_) => 0.0, ExtInt::Infinity => 1.0, }, } }
function_block-function_prefixed
[]
Rust
utils/global-state-update-gen/src/auction_utils.rs
rafal-ch/casper-node
10ed44340c42dbfd861eefa921144ef6d759410b
use std::collections::{BTreeMap, BTreeSet}; use casper_engine_test_support::internal::LmdbWasmTestBuilder; use casper_execution_engine::shared::stored_value::StoredValue; use casper_types::{ system::auction::{ Bid, SeigniorageRecipient, SeigniorageRecipientsSnapshot, SEIGNIORAGE_RECIPIENTS_SNAPSHOT_KEY, }, AsymmetricType, EraId, Key, ProtocolVersion, PublicKey, U512, }; use crate::utils::ValidatorsDiff; pub fn read_snapshot(builder: &LmdbWasmTestBuilder) -> (Key, SeigniorageRecipientsSnapshot) { let protocol_data = builder .get_engine_state() .get_protocol_data(ProtocolVersion::from_parts(1, 0, 0)) .unwrap() .expect("should have protocol data"); let auction_contract_hash = protocol_data.auction(); let validators_key = builder .get_contract(auction_contract_hash) .expect("auction should exist") .named_keys()[SEIGNIORAGE_RECIPIENTS_SNAPSHOT_KEY]; let stored_value = builder .query(None, validators_key, &[]) .expect("should query"); let cl_value = stored_value .as_cl_value() .cloned() .expect("should be cl value"); (validators_key, cl_value.into_t().expect("should convert")) } pub fn gen_snapshot( validators: Vec<(String, String)>, starting_era_id: EraId, count: u64, ) -> SeigniorageRecipientsSnapshot { let mut new_snapshot = BTreeMap::new(); let mut era_validators = BTreeMap::new(); for (pub_key_str, bonded_amount_str) in &validators { let validator_pub_key = PublicKey::from_hex(pub_key_str.as_bytes()).unwrap(); let bonded_amount = U512::from_dec_str(bonded_amount_str).unwrap(); let seigniorage_recipient = SeigniorageRecipient::new(bonded_amount, Default::default(), Default::default()); let _ = era_validators.insert(validator_pub_key, seigniorage_recipient); } for era_id in starting_era_id.iter(count) { let _ = new_snapshot.insert(era_id, era_validators.clone()); } new_snapshot } pub fn find_large_bids( builder: &mut LmdbWasmTestBuilder, new_snapshot: &SeigniorageRecipientsSnapshot, ) -> BTreeSet<PublicKey> { let min_bid = new_snapshot .values() .next() .unwrap() .values() .map(SeigniorageRecipient::stake) .min() .unwrap(); builder .get_bids() .into_iter() .filter(|(_pkey, bid)| bid.staked_amount() >= min_bid) .map(|(pkey, _bid)| pkey) .collect() } pub fn generate_entries_removing_bids( builder: &mut LmdbWasmTestBuilder, validators_diff: &ValidatorsDiff, new_snapshot: &SeigniorageRecipientsSnapshot, ) -> BTreeMap<Key, StoredValue> { let large_bids = find_large_bids(builder, new_snapshot); let to_unbid = validators_diff.removed.union(&large_bids); validators_diff .added .iter() .map(|pkey| { let amount = *new_snapshot .values() .next() .unwrap() .get(pkey) .unwrap() .stake(); let account_hash = pkey.to_account_hash(); let account = builder.get_account(account_hash).unwrap(); ( Key::Bid(account_hash), Bid::unlocked( pkey.clone(), account.main_purse(), amount, Default::default(), ) .into(), ) }) .chain(to_unbid.into_iter().map(|pkey| { let account_hash = pkey.to_account_hash(); let account = builder.get_account(account_hash).unwrap(); ( Key::Bid(account_hash), Bid::empty(pkey.clone(), account.main_purse()).into(), ) })) .collect() } pub fn generate_entries_removing_withdraws( builder: &mut LmdbWasmTestBuilder, validators_diff: &ValidatorsDiff, ) -> BTreeMap<Key, StoredValue> { let withdraws = builder.get_withdraws(); let withdraw_keys: BTreeSet<_> = withdraws.keys().collect(); validators_diff .removed .iter() .map(PublicKey::to_account_hash) .filter(|acc| withdraw_keys.contains(&acc)) .map(|acc| (Key::Withdraw(acc), StoredValue::Withdraw(vec![]))) .collect() }
use std::collections::{BTreeMap, BTreeSet}; use casper_engine_test_support::internal::LmdbWasmTestBuilder; use casper_execution_engine::shared::stored_value::StoredValue; use casper_types::{ system::auction::{ Bid, SeigniorageRecipient, SeigniorageRecipientsSnapshot, SEIGNIORAGE_RECIPIENTS_SNAPSHOT_KEY, }, AsymmetricType, EraId, Key, ProtocolVersion, PublicKey, U512, }; use crate::utils::ValidatorsDiff; pub fn read_snapshot(builder: &LmdbWasmTestBuilder) -> (Key, SeigniorageRecipientsSnapshot) { let protocol_data = builder .get_engine_state() .get_protocol_data(ProtocolVersion::from_parts(1, 0, 0)) .unwrap() .expect("should have protocol data"); let auction_contract
pkey| { let amount = *new_snapshot .values() .next() .unwrap() .get(pkey) .unwrap() .stake(); let account_hash = pkey.to_account_hash(); let account = builder.get_account(account_hash).unwrap(); ( Key::Bid(account_hash), Bid::unlocked( pkey.clone(), account.main_purse(), amount, Default::default(), ) .into(), ) }) .chain(to_unbid.into_iter().map(|pkey| { let account_hash = pkey.to_account_hash(); let account = builder.get_account(account_hash).unwrap(); ( Key::Bid(account_hash), Bid::empty(pkey.clone(), account.main_purse()).into(), ) })) .collect() } pub fn generate_entries_removing_withdraws( builder: &mut LmdbWasmTestBuilder, validators_diff: &ValidatorsDiff, ) -> BTreeMap<Key, StoredValue> { let withdraws = builder.get_withdraws(); let withdraw_keys: BTreeSet<_> = withdraws.keys().collect(); validators_diff .removed .iter() .map(PublicKey::to_account_hash) .filter(|acc| withdraw_keys.contains(&acc)) .map(|acc| (Key::Withdraw(acc), StoredValue::Withdraw(vec![]))) .collect() }
_hash = protocol_data.auction(); let validators_key = builder .get_contract(auction_contract_hash) .expect("auction should exist") .named_keys()[SEIGNIORAGE_RECIPIENTS_SNAPSHOT_KEY]; let stored_value = builder .query(None, validators_key, &[]) .expect("should query"); let cl_value = stored_value .as_cl_value() .cloned() .expect("should be cl value"); (validators_key, cl_value.into_t().expect("should convert")) } pub fn gen_snapshot( validators: Vec<(String, String)>, starting_era_id: EraId, count: u64, ) -> SeigniorageRecipientsSnapshot { let mut new_snapshot = BTreeMap::new(); let mut era_validators = BTreeMap::new(); for (pub_key_str, bonded_amount_str) in &validators { let validator_pub_key = PublicKey::from_hex(pub_key_str.as_bytes()).unwrap(); let bonded_amount = U512::from_dec_str(bonded_amount_str).unwrap(); let seigniorage_recipient = SeigniorageRecipient::new(bonded_amount, Default::default(), Default::default()); let _ = era_validators.insert(validator_pub_key, seigniorage_recipient); } for era_id in starting_era_id.iter(count) { let _ = new_snapshot.insert(era_id, era_validators.clone()); } new_snapshot } pub fn find_large_bids( builder: &mut LmdbWasmTestBuilder, new_snapshot: &SeigniorageRecipientsSnapshot, ) -> BTreeSet<PublicKey> { let min_bid = new_snapshot .values() .next() .unwrap() .values() .map(SeigniorageRecipient::stake) .min() .unwrap(); builder .get_bids() .into_iter() .filter(|(_pkey, bid)| bid.staked_amount() >= min_bid) .map(|(pkey, _bid)| pkey) .collect() } pub fn generate_entries_removing_bids( builder: &mut LmdbWasmTestBuilder, validators_diff: &ValidatorsDiff, new_snapshot: &SeigniorageRecipientsSnapshot, ) -> BTreeMap<Key, StoredValue> { let large_bids = find_large_bids(builder, new_snapshot); let to_unbid = validators_diff.removed.union(&large_bids); validators_diff .added .iter() .map(|
random
[ { "content": "fn withdraw_bid(public_key: PublicKey, unbond_amount: U512) -> U512 {\n\n let contract_hash = system::get_auction();\n\n let args = runtime_args! {\n\n auction::ARG_AMOUNT => unbond_amount,\n\n auction::ARG_PUBLIC_KEY => public_key,\n\n };\n\n runtime::call_contract(contr...
Rust
tests/do_nestest.rs
Lokathor/terbium
382ce827aeda58e0a66449a1cfafc7da48f9e65b
#![allow(unused)] use std::{fs::File, io::prelude::*, path::Path}; use terbium::*; #[test] fn run_through_nestest() { main(); } fn main() { let rom_path = Path::new("roms").join("nestest.nes"); let mut file = File::open(rom_path).expect("couldn't open ROM file"); let mut contents = Vec::new(); file .read_to_end(&mut contents) .expect("couldn't read file content"); drop(file); let log_path = Path::new("roms").join("nestest.log"); let mut file = File::open(log_path).expect("couldn't open log file."); let mut official_bytes = Vec::new(); file .read_to_end(&mut official_bytes) .expect("couldn't read log contents"); drop(file); let official_log = String::from_utf8_lossy(&official_bytes); let mut system = Terbium::default(); system.opt_cart = Cartridge::from_ines_bytes(&contents) .map_err(|e| println!("Cart Load Error: {}", e)) .ok(); let mut cpu_cycles = 0_usize; let mut display = Bitmap::new(256, 240); system.cpu_reset(); system.cpu.pc = 0xC000; system.cpu.p = ProgramStatus { flags: 0x24 }; system.cpu.s = 0xFD; system.cpu.cycle_deficit = 0; system.ppu.x_pos = 0; system.ppu.y_pos = 0; cpu_cycles = 6; for (n, line) in official_log.lines().enumerate() { let n = n + 1; let log_pc = u16::from_str_radix(&line[0..4], 16).unwrap(); let log_instruction = &line[6..14]; let log_operation = &line[16..19]; let log_a = u8::from_str_radix(&line[50..52], 16).unwrap(); let log_x = u8::from_str_radix(&line[55..57], 16).unwrap(); let log_y = u8::from_str_radix(&line[60..62], 16).unwrap(); let log_p = u8::from_str_radix(&line[65..67], 16).unwrap(); let log_s = u8::from_str_radix(&line[71..73], 16).unwrap(); let log_ppu_x = u16::from_str_radix(&line[78..81].trim(), 10).unwrap(); let log_ppu_y = u16::from_str_radix(&line[82..85].trim(), 10).unwrap(); let log_cyc = usize::from_str_radix(&line[90..].trim(), 10).unwrap(); if log_pc != system.cpu.pc { println!( "PC error, line {} expected state:\n{}\n> expected PC:{:04X}, have {:04X}", n, &line[48..], log_pc, system.cpu.pc ); println!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; let instruction = match Opcode(system.debug_cpu_read(system.cpu.pc)) .addressing_mode() .extra_bytes_needed() { 0 => format!("{:02X} ", system.debug_cpu_read(system.cpu.pc)), 1 => format!( "{:02X} {:02X} ", system.debug_cpu_read(system.cpu.pc), system.debug_cpu_read(system.cpu.pc + 1) ), 2 => format!( "{:02X} {:02X} {:02X}", system.debug_cpu_read(system.cpu.pc), system.debug_cpu_read(system.cpu.pc + 1), system.debug_cpu_read(system.cpu.pc + 2) ), _ => unreachable!(), }; if log_instruction != instruction { panic!( "Instruction error, line {}, expected '{}', got '{}'", n, log_instruction, instruction ) }; if log_a != system.cpu.a { println!( "A error, line {} expected state:\n{}\n> expected A:{:02X}, have {:02X}", n, &line[48..], log_a, system.cpu.a ); println!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; if log_x != system.cpu.x { println!( "X error, line {} expected state:\n{}\n> expected X:{:02X}, have {:02X}", n, &line[48..], log_x, system.cpu.x ); println!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; if log_y != system.cpu.y { panic!( "A error, line {}, expected {:02X}, got {:02X}", n, log_y, system.cpu.y ) }; if log_p != system.cpu.p.flags { println!( "P error, line {} expected state:\n{}\n> expected P:{:?}, have {:?}", n, &line[48..], ProgramStatus { flags: log_p }, system.cpu.p ); println!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; if log_s != system.cpu.s { panic!( "S error, line {}, expected {:02X}, got {:02X}", n, log_s, system.cpu.s ) }; while system.cpu.cycle_deficit != 0 { cpu_cycles += usize::from(system.clock_system(&mut display)); } if log_ppu_x != system.ppu.x_pos { println!( "ppu.x_pos error, line {} expected state:\n{}\n> expected {}, have {}", n, &line[48..], log_ppu_x, system.ppu.x_pos ); println!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; if log_ppu_y != system.ppu.y_pos { println!( "ppu.y_pos error, line {} expected state:\n{}\n> expected {}, have {}", n, &line[48..], log_ppu_y, system.ppu.y_pos ); println!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; while system.cpu.cycle_deficit == 0 { cpu_cycles += usize::from(system.clock_system(&mut display)); } if log_cyc != cpu_cycles { println!( "CYC error, line {} expected state:\n{}\n> expected {}, have {}", n, &line[48..], log_cyc, cpu_cycles ); println!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; } println!("success!"); }
#![allow(unused)] use std::{fs::File, io::prelude::*, path::Path}; use terbium::*; #[test] fn run_through_nestest() { main(); } fn main() { let rom_path = Path::new("roms").join("nestest.nes"); let mut file = File::open(rom_path).expect("couldn't open ROM file"); let mut contents = Vec::new(); file .read_to_end(&mut contents) .expect("couldn't read file content"); drop(file); let log_path = Path::new("roms").join("nestest.log"); let mut file = File::open(log_path).expect("couldn't open log file."); let mut official_bytes = Vec::new(); file .read_to_end(&mut official_bytes) .expect("couldn't read log contents"); drop(file); let official_log = String::from_utf8_lossy(&official_bytes); let mut system = Terbium::default(); system.opt_cart = Cartridge::from_ines_bytes(&contents) .map_err(|e| println!("Cart Load Error: {}", e)) .ok(); let mut cpu_cycles = 0_usize; let mut display = Bitmap::new(256, 240); system.cpu_reset(); system.cpu.pc = 0xC000; system.cpu.p = ProgramStatus { flags: 0x24 }; system.cpu.s = 0xFD; system.cpu.cycle_deficit = 0; system.ppu.x_pos = 0; system.ppu.y_pos = 0; cpu_cycles = 6; for (n, line) in official_log.lines().enumerate() { let n = n + 1; let log_pc = u16::from_str_radix(&line[0..4], 16).unwrap(); let log_instruction = &line[6..14]; let log_operation = &line[16..19]; let log_a = u8::from_str_radix(&line[50..52], 16).unwrap(); let log_x = u8::from_str_radix(&line[55..57], 16).unwrap(); let log_y = u8::from_str_radix(&line[60..62], 16).unwrap(); let log_p = u8::from_str_radix(&line[65..67], 16).unwrap(); let log_s = u8::from_str_radix(&line[71..73], 16).unwrap(); let log_ppu_x = u16::from_str_radix(&line[78..81].trim(), 10).unwrap(); let log_ppu_y = u16::from_str_radix(&line[82..85].trim(), 10).unwrap(); let log_cyc = usize::from_str_radix(&line[90..].trim(), 10).unwrap(); if log_pc != system.cpu.pc { println!( "PC error, line {} expected state:\n{}\n> expected PC:{:04X}, have {:04X}", n, &line[48..], log_pc, system.cpu.pc ); println!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; let instruction = match Opcode(system.debug_cpu_read(system.cpu.pc)) .addressing_mode() .extra_bytes_needed() { 0 => format!("{:02X} ", system.debug_cpu_read(system.cpu.pc)), 1 => format!( "{:02X} {:02X} ", system.debug_cpu_read(system.cpu.pc), system.debug_cpu_read(system.cpu.pc + 1) ), 2 => format!( "{:02X} {:02X} {:02X}", system.debug_cpu_read(system.cpu.pc), system.debug_cpu_read(system.cpu.pc + 1), system.debug_cpu_read(system.cpu.pc + 2) ), _ => unreac
n!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; if log_s != system.cpu.s { panic!( "S error, line {}, expected {:02X}, got {:02X}", n, log_s, system.cpu.s ) }; while system.cpu.cycle_deficit != 0 { cpu_cycles += usize::from(system.clock_system(&mut display)); } if log_ppu_x != system.ppu.x_pos { println!( "ppu.x_pos error, line {} expected state:\n{}\n> expected {}, have {}", n, &line[48..], log_ppu_x, system.ppu.x_pos ); println!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; if log_ppu_y != system.ppu.y_pos { println!( "ppu.y_pos error, line {} expected state:\n{}\n> expected {}, have {}", n, &line[48..], log_ppu_y, system.ppu.y_pos ); println!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; while system.cpu.cycle_deficit == 0 { cpu_cycles += usize::from(system.clock_system(&mut display)); } if log_cyc != cpu_cycles { println!( "CYC error, line {} expected state:\n{}\n> expected {}, have {}", n, &line[48..], log_cyc, cpu_cycles ); println!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; } println!("success!"); }
hable!(), }; if log_instruction != instruction { panic!( "Instruction error, line {}, expected '{}', got '{}'", n, log_instruction, instruction ) }; if log_a != system.cpu.a { println!( "A error, line {} expected state:\n{}\n> expected A:{:02X}, have {:02X}", n, &line[48..], log_a, system.cpu.a ); println!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; if log_x != system.cpu.x { println!( "X error, line {} expected state:\n{}\n> expected X:{:02X}, have {:02X}", n, &line[48..], log_x, system.cpu.x ); println!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; if log_y != system.cpu.y { panic!( "A error, line {}, expected {:02X}, got {:02X}", n, log_y, system.cpu.y ) }; if log_p != system.cpu.p.flags { println!( "P error, line {} expected state:\n{}\n> expected P:{:?}, have {:?}", n, &line[48..], ProgramStatus { flags: log_p }, system.cpu.p ); printl
random
[]
Rust
digest/src/mac.rs
tesaguri/RustCrypto-traits
e5b99207535ed36964ddfc454d1ea46fb1425a07
use crate::{FixedOutput, FixedOutputReset, Update}; use crypto_common::{InvalidLength, Key, KeyInit, KeySizeUser, Output, OutputSizeUser, Reset}; #[cfg(feature = "rand_core")] use crate::rand_core::{CryptoRng, RngCore}; use core::fmt; use generic_array::typenum::Unsigned; use subtle::{Choice, ConstantTimeEq}; #[cfg_attr(docsrs, doc(cfg(feature = "mac")))] pub trait MacMarker {} #[cfg_attr(docsrs, doc(cfg(feature = "mac")))] pub trait Mac: KeySizeUser + OutputSizeUser + Sized { fn new(key: &Key<Self>) -> Self; #[cfg(feature = "rand_core")] #[cfg_attr(docsrs, doc(cfg(feature = "rand_core")))] fn generate_key(rng: impl CryptoRng + RngCore) -> Key<Self>; fn new_from_slice(key: &[u8]) -> Result<Self, InvalidLength>; fn update(&mut self, data: &[u8]); fn finalize(self) -> CtOutput<Self>; fn finalize_reset(&mut self) -> CtOutput<Self> where Self: FixedOutputReset; fn reset(&mut self) where Self: Reset; fn verify(self, tag: &Output<Self>) -> Result<(), MacError>; fn verify_slice(self, tag: &[u8]) -> Result<(), MacError>; fn verify_truncated_left(self, tag: &[u8]) -> Result<(), MacError>; fn verify_truncated_right(self, tag: &[u8]) -> Result<(), MacError>; } impl<T: KeyInit + Update + FixedOutput + MacMarker> Mac for T { #[inline(always)] fn new(key: &Key<Self>) -> Self { KeyInit::new(key) } #[inline(always)] fn new_from_slice(key: &[u8]) -> Result<Self, InvalidLength> { KeyInit::new_from_slice(key) } #[inline] fn update(&mut self, data: &[u8]) { Update::update(self, data); } #[inline] fn finalize(self) -> CtOutput<Self> { CtOutput::new(self.finalize_fixed()) } #[inline(always)] fn finalize_reset(&mut self) -> CtOutput<Self> where Self: FixedOutputReset, { CtOutput::new(self.finalize_fixed_reset()) } #[inline] fn reset(&mut self) where Self: Reset, { Reset::reset(self) } #[inline] fn verify(self, tag: &Output<Self>) -> Result<(), MacError> { if self.finalize() == tag.into() { Ok(()) } else { Err(MacError) } } #[inline] fn verify_slice(self, tag: &[u8]) -> Result<(), MacError> { let n = tag.len(); if n != Self::OutputSize::USIZE { return Err(MacError); } let choice = self.finalize_fixed().ct_eq(tag); if choice.unwrap_u8() == 1 { Ok(()) } else { Err(MacError) } } fn verify_truncated_left(self, tag: &[u8]) -> Result<(), MacError> { let n = tag.len(); if n == 0 || n > Self::OutputSize::USIZE { return Err(MacError); } let choice = self.finalize_fixed()[..n].ct_eq(tag); if choice.unwrap_u8() == 1 { Ok(()) } else { Err(MacError) } } fn verify_truncated_right(self, tag: &[u8]) -> Result<(), MacError> { let n = tag.len(); if n == 0 || n > Self::OutputSize::USIZE { return Err(MacError); } let m = Self::OutputSize::USIZE - n; let choice = self.finalize_fixed()[m..].ct_eq(tag); if choice.unwrap_u8() == 1 { Ok(()) } else { Err(MacError) } } #[cfg(feature = "rand_core")] #[cfg_attr(docsrs, doc(cfg(feature = "rand_core")))] #[inline] fn generate_key(rng: impl CryptoRng + RngCore) -> Key<Self> { <T as KeyInit>::generate_key(rng) } } #[derive(Clone)] #[cfg_attr(docsrs, doc(cfg(feature = "mac")))] pub struct CtOutput<T: OutputSizeUser> { bytes: Output<T>, } impl<T: OutputSizeUser> CtOutput<T> { #[inline(always)] pub fn new(bytes: Output<T>) -> Self { Self { bytes } } #[inline(always)] pub fn into_bytes(self) -> Output<T> { self.bytes } } impl<T: OutputSizeUser> From<Output<T>> for CtOutput<T> { #[inline(always)] fn from(bytes: Output<T>) -> Self { Self { bytes } } } impl<'a, T: OutputSizeUser> From<&'a Output<T>> for CtOutput<T> { #[inline(always)] fn from(bytes: &'a Output<T>) -> Self { bytes.clone().into() } } impl<T: OutputSizeUser> ConstantTimeEq for CtOutput<T> { #[inline(always)] fn ct_eq(&self, other: &Self) -> Choice { self.bytes.ct_eq(&other.bytes) } } impl<T: OutputSizeUser> PartialEq for CtOutput<T> { #[inline(always)] fn eq(&self, x: &CtOutput<T>) -> bool { self.ct_eq(x).unwrap_u8() == 1 } } impl<T: OutputSizeUser> Eq for CtOutput<T> {} #[derive(Default, Debug, Copy, Clone, Eq, PartialEq)] #[cfg_attr(docsrs, doc(cfg(feature = "mac")))] pub struct MacError; impl fmt::Display for MacError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str("MAC tag mismatch") } } #[cfg(feature = "std")] impl std::error::Error for MacError {}
use crate::{FixedOutput, FixedOutputReset, Update}; use crypto_common::{InvalidLength, Key, KeyInit, KeySizeUser, Output, OutputSizeUser, Reset}; #[cfg(feature = "rand_core")] use crate::rand_core::{CryptoRng, RngCore}; use core::fmt; use generic_array::typenum::Unsigned; use subtle::{Choice, ConstantTimeEq}; #[cfg_attr(docsrs, doc(cfg(feature = "mac")))] pub trait MacMarker {} #[cfg_attr(docsrs, doc(cfg(feature = "mac")))] pub trait Mac: KeySizeUser + OutputSizeUser + Sized { fn new(key: &Key<Self>) -> Self; #[cfg(feature = "rand_core")] #[cfg_attr(docsrs, doc(cfg(feature = "rand_core")))] fn generate_key(rng: impl CryptoRng + RngCore) -> Key<Self>; fn new_from_slice(key: &[u8]) -> Result<Self, InvalidLength>; fn update(&mut self, data: &[u8]); fn finalize(self) -> CtOutput<Self>; fn finalize_reset(&mut self) -> CtOutput<Self> where Self: FixedOutputReset; fn reset(&mut self) where Self: Reset; fn verify(self, tag: &Output<Self>) -> Result<(), MacError>; fn verify_slice(self, tag: &[u8]) -> Result<(), MacError>; fn verify_truncated_left(self, tag: &[u8]) -> Result<(), MacError>; fn verify_truncated_right(self, tag: &[u8]) -> Result<(), MacError>; } impl<T: KeyInit + Update + FixedOutput + MacMarker> Mac for T { #[inline(always)] fn new(key: &Key<Self>) -> Self { KeyInit::new(key) } #[inline(always)] fn new_from_slice(key: &[u8]) -> Result<Self, InvalidLength> { KeyInit::new_from_slice(key) } #[inline] fn update(&mut self, data: &[u8]) { Update::update(self, data); } #[inline] fn finalize(self) -> CtOutput<Self> { CtOutput::new(self.finalize_fixed()) } #[inline(always)]
#[inline] fn reset(&mut self) where Self: Reset, { Reset::reset(self) } #[inline] fn verify(self, tag: &Output<Self>) -> Result<(), MacError> { if self.finalize() == tag.into() { Ok(()) } else { Err(MacError) } } #[inline] fn verify_slice(self, tag: &[u8]) -> Result<(), MacError> { let n = tag.len(); if n != Self::OutputSize::USIZE { return Err(MacError); } let choice = self.finalize_fixed().ct_eq(tag); if choice.unwrap_u8() == 1 { Ok(()) } else { Err(MacError) } } fn verify_truncated_left(self, tag: &[u8]) -> Result<(), MacError> { let n = tag.len(); if n == 0 || n > Self::OutputSize::USIZE { return Err(MacError); } let choice = self.finalize_fixed()[..n].ct_eq(tag); if choice.unwrap_u8() == 1 { Ok(()) } else { Err(MacError) } } fn verify_truncated_right(self, tag: &[u8]) -> Result<(), MacError> { let n = tag.len(); if n == 0 || n > Self::OutputSize::USIZE { return Err(MacError); } let m = Self::OutputSize::USIZE - n; let choice = self.finalize_fixed()[m..].ct_eq(tag); if choice.unwrap_u8() == 1 { Ok(()) } else { Err(MacError) } } #[cfg(feature = "rand_core")] #[cfg_attr(docsrs, doc(cfg(feature = "rand_core")))] #[inline] fn generate_key(rng: impl CryptoRng + RngCore) -> Key<Self> { <T as KeyInit>::generate_key(rng) } } #[derive(Clone)] #[cfg_attr(docsrs, doc(cfg(feature = "mac")))] pub struct CtOutput<T: OutputSizeUser> { bytes: Output<T>, } impl<T: OutputSizeUser> CtOutput<T> { #[inline(always)] pub fn new(bytes: Output<T>) -> Self { Self { bytes } } #[inline(always)] pub fn into_bytes(self) -> Output<T> { self.bytes } } impl<T: OutputSizeUser> From<Output<T>> for CtOutput<T> { #[inline(always)] fn from(bytes: Output<T>) -> Self { Self { bytes } } } impl<'a, T: OutputSizeUser> From<&'a Output<T>> for CtOutput<T> { #[inline(always)] fn from(bytes: &'a Output<T>) -> Self { bytes.clone().into() } } impl<T: OutputSizeUser> ConstantTimeEq for CtOutput<T> { #[inline(always)] fn ct_eq(&self, other: &Self) -> Choice { self.bytes.ct_eq(&other.bytes) } } impl<T: OutputSizeUser> PartialEq for CtOutput<T> { #[inline(always)] fn eq(&self, x: &CtOutput<T>) -> bool { self.ct_eq(x).unwrap_u8() == 1 } } impl<T: OutputSizeUser> Eq for CtOutput<T> {} #[derive(Default, Debug, Copy, Clone, Eq, PartialEq)] #[cfg_attr(docsrs, doc(cfg(feature = "mac")))] pub struct MacError; impl fmt::Display for MacError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str("MAC tag mismatch") } } #[cfg(feature = "std")] impl std::error::Error for MacError {}
fn finalize_reset(&mut self) -> CtOutput<Self> where Self: FixedOutputReset, { CtOutput::new(self.finalize_fixed_reset()) }
function_block-full_function
[ { "content": "/// Trait for hash functions with fixed-size output able to reset themselves.\n\npub trait FixedOutputReset: FixedOutput + Reset {\n\n /// Write result into provided array and reset the hasher state.\n\n fn finalize_into_reset(&mut self, out: &mut Output<Self>);\n\n\n\n /// Retrieve resul...
Rust
rafx-api/src/queue.rs
DavidVonDerau/rafx
5d42caed4bd7fcb5d32e3e26021669cf60071abd
#[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] use crate::empty::RafxQueueEmpty; #[cfg(feature = "rafx-metal")] use crate::metal::RafxQueueMetal; #[cfg(feature = "rafx-vulkan")] use crate::vulkan::RafxQueueVulkan; use crate::{ RafxCommandBuffer, RafxCommandPool, RafxCommandPoolDef, RafxDeviceContext, RafxFence, RafxPresentSuccessResult, RafxQueueType, RafxResult, RafxSemaphore, RafxSwapchain, }; #[derive(Clone, Debug)] pub enum RafxQueue { #[cfg(feature = "rafx-vulkan")] Vk(RafxQueueVulkan), #[cfg(feature = "rafx-metal")] Metal(RafxQueueMetal), #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] Empty(RafxQueueEmpty), } impl RafxQueue { pub fn device_context(&self) -> RafxDeviceContext { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => RafxDeviceContext::Vk(inner.device_context().clone()), #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => RafxDeviceContext::Metal(inner.device_context().clone()), #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => RafxDeviceContext::Empty(inner.device_context().clone()), } } pub fn queue_id(&self) -> u32 { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => inner.queue_id(), #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => inner.queue_id(), #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => inner.queue_id(), } } pub fn queue_type(&self) -> RafxQueueType { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => inner.queue_type(), #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => inner.queue_type(), #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => inner.queue_type(), } } pub fn create_command_pool( &self, command_pool_def: &RafxCommandPoolDef, ) -> RafxResult<RafxCommandPool> { Ok(match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => { RafxCommandPool::Vk(inner.create_command_pool(command_pool_def)?) } #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => { RafxCommandPool::Metal(inner.create_command_pool(command_pool_def)?) } #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => { RafxCommandPool::Empty(inner.create_command_pool(command_pool_def)?) } }) } pub fn submit( &self, command_buffers: &[&RafxCommandBuffer], wait_semaphores: &[&RafxSemaphore], signal_semaphores: &[&RafxSemaphore], signal_fence: Option<&RafxFence>, ) -> RafxResult<()> { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => { let command_buffers: Vec<_> = command_buffers .iter() .map(|x| x.vk_command_buffer().unwrap()) .collect(); let wait_semaphores: Vec<_> = wait_semaphores .iter() .map(|x| x.vk_semaphore().unwrap()) .collect(); let signal_semaphores: Vec<_> = signal_semaphores .iter() .map(|x| x.vk_semaphore().unwrap()) .collect(); inner.submit( &command_buffers, &wait_semaphores, &signal_semaphores, signal_fence.map(|x| x.vk_fence().unwrap()), ) } #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => { let command_buffers: Vec<_> = command_buffers .iter() .map(|x| x.metal_command_buffer().unwrap()) .collect(); let wait_semaphores: Vec<_> = wait_semaphores .iter() .map(|x| x.metal_semaphore().unwrap()) .collect(); let signal_semaphores: Vec<_> = signal_semaphores .iter() .map(|x| x.metal_semaphore().unwrap()) .collect(); inner.submit( &command_buffers, &wait_semaphores, &signal_semaphores, signal_fence.map(|x| x.metal_fence().unwrap()), ) } #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => { let command_buffers: Vec<_> = command_buffers .iter() .map(|x| x.empty_command_buffer().unwrap()) .collect(); let wait_semaphores: Vec<_> = wait_semaphores .iter() .map(|x| x.empty_semaphore().unwrap()) .collect(); let signal_semaphores: Vec<_> = signal_semaphores .iter() .map(|x| x.empty_semaphore().unwrap()) .collect(); inner.submit( &command_buffers, &wait_semaphores, &signal_semaphores, signal_fence.map(|x| x.empty_fence().unwrap()), ) } } } pub fn present( &self, swapchain: &RafxSwapchain, wait_semaphores: &[&RafxSemaphore], image_index: u32, ) -> RafxResult<RafxPresentSuccessResult> { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => { let wait_semaphores: Vec<_> = wait_semaphores .iter() .map(|x| x.vk_semaphore().unwrap()) .collect(); inner.present( swapchain.vk_swapchain().unwrap(), &wait_semaphores, image_index, ) } #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => { let wait_semaphores: Vec<_> = wait_semaphores .iter() .map(|x| x.metal_semaphore().unwrap()) .collect(); inner.present( swapchain.metal_swapchain().unwrap(), &wait_semaphores, image_index, ) } #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => { let wait_semaphores: Vec<_> = wait_semaphores .iter() .map(|x| x.empty_semaphore().unwrap()) .collect(); inner.present( swapchain.empty_swapchain().unwrap(), &wait_semaphores, image_index, ) } } } pub fn wait_for_queue_idle(&self) -> RafxResult<()> { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => inner.wait_for_queue_idle(), #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => inner.wait_for_queue_idle(), #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => inner.wait_for_queue_idle(), } } #[cfg(feature = "rafx-vulkan")] pub fn vk_queue(&self) -> Option<&RafxQueueVulkan> { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => Some(inner), #[cfg(feature = "rafx-metal")] RafxQueue::Metal(_inner) => None, #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(_inner) => None, } } #[cfg(feature = "rafx-metal")] pub fn metal_queue(&self) -> Option<&RafxQueueMetal> { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(_inner) => None, #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => Some(inner), #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => None, } } #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] pub fn empty_queue(&self) -> Option<&RafxQueueEmpty> { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(_inner) => None, #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => None, #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => Some(inner), } } }
#[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] use crate::empty::RafxQueueEmpty; #[cfg(feature = "rafx-metal")] use crate::metal::RafxQueueMetal; #[cfg(feature = "rafx-vulkan")] use crate::vulkan::RafxQueueVulkan; use crate::{ RafxCommandBuffer, RafxCommandPool, RafxCommandPoolDef, RafxDeviceContext, RafxFence, RafxPresentSuccessResult, RafxQueueType, RafxResult, RafxSemaphore, RafxSwapchain, }; #[derive(Clone, Debug)] pub enum RafxQueue { #[cfg(feature = "rafx-vulkan")] Vk(RafxQueueVulkan), #[cfg(feature = "rafx-metal")] Metal(RafxQueueMetal), #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] Empty(RafxQueueEmpty), } impl RafxQueue { pub fn device_context(&self) -> RafxDeviceContext { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => RafxDeviceContext::Vk(inner.device_context().clone()), #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => RafxDeviceContext::Metal(inner.device_context().clone()), #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => RafxDeviceContext::Empty(inner.device_context().clone()), } } pub fn queue_id(&self) -> u32 { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => inner.queue_id(), #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => inner.queue_id(), #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => inner.queue_id(), } } pub fn queue_type(
afx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => inner.queue_type(), } } pub fn create_command_pool( &self, command_pool_def: &RafxCommandPoolDef, ) -> RafxResult<RafxCommandPool> { Ok(match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => { RafxCommandPool::Vk(inner.create_command_pool(command_pool_def)?) } #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => { RafxCommandPool::Metal(inner.create_command_pool(command_pool_def)?) } #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => { RafxCommandPool::Empty(inner.create_command_pool(command_pool_def)?) } }) } pub fn submit( &self, command_buffers: &[&RafxCommandBuffer], wait_semaphores: &[&RafxSemaphore], signal_semaphores: &[&RafxSemaphore], signal_fence: Option<&RafxFence>, ) -> RafxResult<()> { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => { let command_buffers: Vec<_> = command_buffers .iter() .map(|x| x.vk_command_buffer().unwrap()) .collect(); let wait_semaphores: Vec<_> = wait_semaphores .iter() .map(|x| x.vk_semaphore().unwrap()) .collect(); let signal_semaphores: Vec<_> = signal_semaphores .iter() .map(|x| x.vk_semaphore().unwrap()) .collect(); inner.submit( &command_buffers, &wait_semaphores, &signal_semaphores, signal_fence.map(|x| x.vk_fence().unwrap()), ) } #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => { let command_buffers: Vec<_> = command_buffers .iter() .map(|x| x.metal_command_buffer().unwrap()) .collect(); let wait_semaphores: Vec<_> = wait_semaphores .iter() .map(|x| x.metal_semaphore().unwrap()) .collect(); let signal_semaphores: Vec<_> = signal_semaphores .iter() .map(|x| x.metal_semaphore().unwrap()) .collect(); inner.submit( &command_buffers, &wait_semaphores, &signal_semaphores, signal_fence.map(|x| x.metal_fence().unwrap()), ) } #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => { let command_buffers: Vec<_> = command_buffers .iter() .map(|x| x.empty_command_buffer().unwrap()) .collect(); let wait_semaphores: Vec<_> = wait_semaphores .iter() .map(|x| x.empty_semaphore().unwrap()) .collect(); let signal_semaphores: Vec<_> = signal_semaphores .iter() .map(|x| x.empty_semaphore().unwrap()) .collect(); inner.submit( &command_buffers, &wait_semaphores, &signal_semaphores, signal_fence.map(|x| x.empty_fence().unwrap()), ) } } } pub fn present( &self, swapchain: &RafxSwapchain, wait_semaphores: &[&RafxSemaphore], image_index: u32, ) -> RafxResult<RafxPresentSuccessResult> { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => { let wait_semaphores: Vec<_> = wait_semaphores .iter() .map(|x| x.vk_semaphore().unwrap()) .collect(); inner.present( swapchain.vk_swapchain().unwrap(), &wait_semaphores, image_index, ) } #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => { let wait_semaphores: Vec<_> = wait_semaphores .iter() .map(|x| x.metal_semaphore().unwrap()) .collect(); inner.present( swapchain.metal_swapchain().unwrap(), &wait_semaphores, image_index, ) } #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => { let wait_semaphores: Vec<_> = wait_semaphores .iter() .map(|x| x.empty_semaphore().unwrap()) .collect(); inner.present( swapchain.empty_swapchain().unwrap(), &wait_semaphores, image_index, ) } } } pub fn wait_for_queue_idle(&self) -> RafxResult<()> { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => inner.wait_for_queue_idle(), #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => inner.wait_for_queue_idle(), #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => inner.wait_for_queue_idle(), } } #[cfg(feature = "rafx-vulkan")] pub fn vk_queue(&self) -> Option<&RafxQueueVulkan> { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => Some(inner), #[cfg(feature = "rafx-metal")] RafxQueue::Metal(_inner) => None, #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(_inner) => None, } } #[cfg(feature = "rafx-metal")] pub fn metal_queue(&self) -> Option<&RafxQueueMetal> { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(_inner) => None, #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => Some(inner), #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => None, } } #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] pub fn empty_queue(&self) -> Option<&RafxQueueEmpty> { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(_inner) => None, #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => None, #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => Some(inner), } } }
&self) -> RafxQueueType { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => inner.queue_type(), #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => inner.queue_type(), #[cfg(any( feature = "r
function_block-random_span
[ { "content": "pub fn draw_skybox(\n\n resource_context: &ResourceContext,\n\n skybox_material: &ResourceArc<MaterialPassResource>,\n\n skybox_texture: &ResourceArc<ImageViewResource>,\n\n main_view: &RenderView,\n\n render_target_meta: &GraphicsPipelineRenderTargetMeta,\n\n command_buffer: &Ra...
Rust
elasticsearch/examples/index_questions_answers/main.rs
yaanhyy/elasticsearch-rs
740c3ebd41b391f954e9cf008209b39d89a75231
/* * Licensed to Elasticsearch B.V. under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch B.V. licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ #[macro_use] extern crate serde_json; use clap::{App, Arg}; #[cfg(any(feature = "native-tls", feature = "rustls-tls"))] use elasticsearch::cert::CertificateValidation; use elasticsearch::{ auth::Credentials, http::transport::{SingleNodeConnectionPool, TransportBuilder}, indices::{ IndicesCreateParts, IndicesDeleteParts, IndicesExistsParts, IndicesPutSettingsParts, }, BulkOperation, BulkParts, Elasticsearch, Error, DEFAULT_ADDRESS, }; use serde_json::Value; use sysinfo::SystemExt; use url::Url; mod stack_overflow; use http::StatusCode; use stack_overflow::*; use std::time::Instant; static POSTS_INDEX: &'static str = "posts"; #[tokio::main] pub async fn main() -> Result<(), Box<dyn std::error::Error>> { let matches = App::new("index_questions_answers") .about( "indexes Stack Overflow questions and answers into Elasticsearch with the Rust client", ) .arg( Arg::with_name("path") .short("p") .long("path") .value_name("PATH") .help("The path to the Posts.xml file containing questions and answers. Can be obtained from https://archive.org/download/stackexchange/stackoverflow.com-Posts.7z (large file)") .required(true) .takes_value(true), ) .arg( Arg::with_name("limit") .short("l") .long("limit") .value_name("LIMIT") .help("The number of questions and answers from Posts.xml to index") .required(false) .takes_value(true), ) .arg( Arg::with_name("size") .short("s") .long("size") .value_name("SIZE") .help("The number of documents in each bulk request") .required(false) .takes_value(true), ) .arg( Arg::with_name("delete") .short("d") .long("delete") .help("Whether to delete the index before indexing") .required(false) .takes_value(false), ) .get_matches(); let path = matches.value_of("path").expect("missing 'path' argument"); let limit = match matches.value_of("limit") { Some(l) => Some(l.parse::<usize>()?), _ => None, }; let size = match matches.value_of("size") { Some(l) => l.parse::<usize>()?, _ => 1000, }; let delete = matches.is_present("delete"); let client = create_client()?; create_index_if_not_exists(&client, delete).await?; set_refresh_interval(&client, json!("-1")).await?; let mut posts_iter = PostsIter::new(path); let mut total = 0; let mut posts = Vec::with_capacity(size); let now = Instant::now(); while let Some(post) = posts_iter.next() { total += 1; posts.push(post); if total % size == 0 { index_posts(&client, &posts).await?; let duration = now.elapsed(); let secs = duration.as_secs_f64(); let taken = if secs >= 60f64 { format!("{}m", secs / 60f64) } else { format!("{:?}", duration) }; println!("Indexed total {} posts in {}", total, taken); posts.clear(); } if let Some(l) = limit { if total >= l { break; } } } if !posts.is_empty() { index_posts(&client, &posts).await?; posts.clear(); } set_refresh_interval(&client, json!(null)).await?; Ok(()) } async fn set_refresh_interval(client: &Elasticsearch, interval: Value) -> Result<(), Error> { let response = client .indices() .put_settings(IndicesPutSettingsParts::Index(&[POSTS_INDEX])) .body(json!({ "index" : { "refresh_interval" : interval } })) .send() .await?; if !response.status_code().is_success() { println!("Failed to update refresh interval"); } Ok(()) } async fn index_posts(client: &Elasticsearch, posts: &[Post]) -> Result<(), Error> { let body: Vec<BulkOperation<_>> = posts .iter() .map(|p| { let id = p.id().to_string(); BulkOperation::index(p).id(&id).routing(&id).into() }) .collect(); let response = client .bulk(BulkParts::Index(POSTS_INDEX)) .body(body) .send() .await?; let json: Value = response.json().await?; if json["errors"].as_bool().unwrap() { let failed: Vec<&Value> = json["items"] .as_array() .unwrap() .iter() .filter(|v| !v["error"].is_null()) .collect(); println!("Errors whilst indexing. Failures: {}", failed.len()); } Ok(()) } async fn create_index_if_not_exists(client: &Elasticsearch, delete: bool) -> Result<(), Error> { let exists = client .indices() .exists(IndicesExistsParts::Index(&[POSTS_INDEX])) .send() .await?; if exists.status_code().is_success() && delete { let delete = client .indices() .delete(IndicesDeleteParts::Index(&[POSTS_INDEX])) .send() .await?; if !delete.status_code().is_success() { println!("Problem deleting index: {}", delete.text().await?); } } if exists.status_code() == StatusCode::NOT_FOUND || delete { let response = client .indices() .create(IndicesCreateParts::Index(POSTS_INDEX)) .body(json!( { "mappings": { "properties": { "type": { "type": "keyword" }, "id": { "type": "integer" }, "parent_id": { "relations": { "question": "answer" }, "type": "join" }, "creation_date": { "type": "date" }, "score": { "type": "integer" }, "body": { "analyzer": "html", "search_analyzer": "expand", "type": "text" }, "owner_user_id": { "type": "integer" }, "owner_display_name": { "type": "keyword" }, "last_editor_user_id": { "type": "integer" }, "last_edit_date": { "type": "date" }, "last_activity_date": { "type": "date" }, "comment_count": { "type": "integer" }, "title": { "analyzer": "expand", "norms": false, "fields": { "raw": { "type": "keyword" } }, "type": "text" }, "title_suggest": { "type": "completion" }, "accepted_answer_id": { "type": "integer" }, "view_count": { "type": "integer" }, "last_editor_display_name": { "type": "keyword" }, "tags": { "type": "keyword" }, "answer_count": { "type": "integer" }, "favorite_count": { "type": "integer" }, "community_owned_date": { "type": "date" } }, "_routing": { "required": true }, "_source": { "excludes": ["title_suggest"] } }, "settings": { "index.number_of_shards": 3, "index.number_of_replicas": 0, "analysis": { "analyzer": { "html": { "char_filter": ["html_strip", "programming_language"], "filter": ["lowercase", "stop"], "tokenizer": "standard", "type": "custom" }, "expand": { "char_filter": ["programming_language"], "filter": ["lowercase", "stop"], "tokenizer": "standard", "type": "custom" } }, "char_filter": { "programming_language": { "mappings": [ "c# => csharp", "C# => csharp", "f# => fsharp", "F# => fsharp", "m# => msharp", "M# => msharp", "j# => jsharp", "J# => jsharp", "s# => ssharp", "S# => ssharp", "a# => asharp", "A# => asharp", "k# => ksharp", "K# => ksharp", "t# => tsharp", "T# => tsharp", "g++ => gplusplus", "G++ => gplusplus", "m++ => mplusplus", "M++ => mplusplus", "c++ => cplusplus", "C++ => cplusplus", "s++ => splusplus", "S++ => splusplus", "a++ => aplusplus", "A++ => aplusplus", "d++ => dplusplus", "D++ => dplusplus" ], "type": "mapping" } } } } } )) .send() .await?; if !response.status_code().is_success() { println!("Error while creating index"); } } Ok(()) } fn create_client() -> Result<Elasticsearch, Error> { fn cluster_addr() -> String { match std::env::var("ELASTICSEARCH_URL") { Ok(server) => server, Err(_) => DEFAULT_ADDRESS.into(), } } fn running_proxy() -> bool { let system = sysinfo::System::new(); !system.get_process_by_name("Fiddler").is_empty() } let mut url = Url::parse(cluster_addr().as_ref()).unwrap(); let credentials = if url.scheme() == "https" { let username = if !url.username().is_empty() { let u = url.username().to_string(); url.set_username("").unwrap(); u } else { std::env::var("ES_USERNAME").unwrap_or_else(|_| "elastic".into()) }; let password = match url.password() { Some(p) => { let pass = p.to_string(); url.set_password(None).unwrap(); pass } None => std::env::var("ES_PASSWORD").unwrap_or_else(|_| "changeme".into()), }; Some(Credentials::Basic(username, password)) } else { None }; let conn_pool = SingleNodeConnectionPool::new(url); let mut builder = TransportBuilder::new(conn_pool); builder = match credentials { Some(c) => { builder = builder.auth(c); #[cfg(any(feature = "native-tls", feature = "rustls-tls"))] { builder = builder.cert_validation(CertificateValidation::None); } builder } None => builder, }; if running_proxy() { let proxy_url = Url::parse("http://localhost:8888").unwrap(); builder = builder.proxy(proxy_url, None, None); } let transport = builder.build()?; Ok(Elasticsearch::new(transport)) }
/* * Licensed to Elasticsearch B.V. under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch B.V. licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ #[macro_use] extern crate serde_json; use clap::{App, Arg}; #[cfg(any(feature = "native-tls", feature = "rustls-tls"))] use elasticsearch::cert::CertificateValidation; use elasticsearch::{ auth::Credentials, http::transport::{SingleNodeConnectionPool, TransportBuilder}, indices::{ IndicesCreateParts, IndicesDeleteParts, IndicesExistsParts, IndicesPutSettingsParts, }, BulkOperation, BulkParts, Elasticsearch, Error, DEFAULT_ADDRESS, }; use serde_json::Value; use sysinfo::SystemExt; use url::Url; mod stack_overflow; use http::StatusCode; use stack_overflow::*; use std::time::Instant; static POSTS_INDEX: &'static str = "posts"; #[tokio::main] pub async fn main() -> Result<(), Box<dyn std::error::Error>> { let matches = App::new("index_questions_answers") .about( "indexes Stack Overflow questions and answers into Elasticsearch with the Rust client", ) .arg( Arg::with_name("path") .short("p") .long("path") .value_name("PATH") .help("The path to the Posts.xml file containing questions and answers. Can be obtained from https://archive.org/download/stackexchange/stackoverflow.com-Posts.7z (large file)") .required(true) .takes_value(true), ) .arg( Arg::with_name("limit") .short("l") .long("limit") .value_name("LIMIT") .help("The number of questions and answers from Posts.xml to index") .required(false) .takes_value(true), ) .arg( Arg::with_name("size") .short("s") .long("size") .value_name("SIZE") .help("The number of documents in each bulk request") .required(false) .takes_value(true), ) .arg( Arg::with_name("delete") .short("d") .long("delete") .help("Whether to delete the index before indexing") .required(false) .takes_value(false), ) .get_matches(); let path = matches.value_of("path").expect("missing 'path' argument"); let limit = match matches.value_of("limit") { Some(l) => Some(l.parse::<usize>()?), _ => None, }; let size = match matches.value_of("size") { Some(l) => l.parse::<usize>()?, _ => 1000, }; let delete = matches.is_present("delete"); let client = create_client()?; create_index_if_not_exists(&client, delete).await?; set_refresh_interval(&client, json!("-1")).await?; let mut posts_iter = PostsIter::new(path); let mut total = 0; let mut posts = Vec::with_capacity(size); let now = Instant::now(); while let Some(post) = posts_iter.next() { total += 1; posts.push(post); if total % size == 0 { index_posts(&client, &posts).await?; let duration = now.elapsed(); let secs = duration.as_secs_f64(); let taken = if secs >= 60f64 { format!("{}m", secs / 60f64) } else { format!("{:?}", duration) }; println!("Indexed total {} posts in {}", total, taken); posts.clear(); } if let Some(l) = limit { if total >= l { break; } } } if !posts.is_empty() { index_posts(&client, &posts).await?; posts.clear(); } set_refresh_interval(&client, json!(null)).await?; Ok(()) } async fn set_refresh_interval(client: &Elasticsearch, interval: Value) -> Result<(), Error> { let response = client .indices() .put_settings(IndicesPutSettingsParts::Index(&[POSTS_INDEX])) .body(json!({ "index" : { "refresh_interval" : interval } })) .send() .await?; if !response.status_code().is_success() { println!("Failed to update refresh interval"); } Ok(()) } async fn index_posts(client: &Elasticsearch, posts: &[Post]) -> Result<(), Error> { let body: Vec<BulkOperation<_>> = posts .iter() .map(|p| { let id = p.id().to_string(); BulkOperation::index(p).id(&id).routing(&id).into() }) .collect(); let response = client .bulk(BulkParts::Index(POSTS_INDEX)) .body(body) .send() .await?; let json: Value = response.json().await?; if json["errors"].as_bool().unwrap() { let failed: Vec<&Value> = json["items"] .as_array() .unwrap() .iter() .filter(|v| !v["error"].is_null()) .collect(); println!("Errors whilst indexing. Failures: {}", failed.len()); } Ok(()) } async fn create_index_if_not_exists(client: &Elasticsearch, delete: bool) -> Result<(), Error> { let exists = client .indices() .exists(IndicesExistsParts::Index(&[POSTS_INDEX])) .send() .await?; if exists.status_code().is_success() && delete { let delete = client .indices() .delete(IndicesDeleteParts::Index(&[POSTS_INDEX])) .send() .await?; if !delete.status_code().is_success() { println!("Problem deleting index: {}", delete.text().await?); } } if exists.status_code() == StatusCode::NOT_FOUND || delete { let response = client .indices() .create(IndicesCreateParts::Index(POSTS_INDEX)) .body(json!( { "mappings": { "properties": { "type": { "type": "keyword" }, "id": { "type": "integer" }, "parent_id": { "relations": { "question": "answer" }, "type": "join" }, "creation_date": { "type": "date" }, "score": { "type": "integer" }, "body": { "analyzer": "html", "search_analyzer": "expand", "type": "text" }, "owner_user_id": { "type": "integer" }, "owner_display_name": { "type": "keyword" }, "last_editor_user_id": { "type": "integer" }, "last_edit_date": { "type": "date" }, "last_activity_date": { "type": "date" }, "comment_count": { "type": "integer" }, "title": { "analyzer": "expand", "norms": false, "fields": { "raw": { "type": "keyword" } }, "type": "text" }, "title_suggest": { "type": "completion" }, "accepted_answer_id": { "type": "integer" }, "view_count": { "type": "integer" }, "last_editor_display_name": { "type": "keyword" }, "tags": { "type": "keyword" }, "answer_count": { "type": "integer" }, "favorite_count": { "type": "integer" }, "community_owned_date": { "type": "date" } }, "_routing": { "required": true }, "_source": { "excludes": ["title_suggest"] } }, "settings": { "index.number_of_shards": 3, "index.number_of_replicas": 0, "analysis": { "analyzer": { "html": { "char_filter": ["html_strip", "programming_language"], "filter": ["lowercase", "stop"], "tokenizer": "standard", "type": "custom" }, "expand": { "char_filter": ["programming_language"], "filter": ["lowercase", "stop"], "tokenizer": "standard", "type": "custom" } }, "char_filter": { "programming_language": { "mappings": [ "c# => csharp", "C# => csharp", "f# => fsharp", "F# => fsharp", "m# => msharp", "M# => msharp", "j# => jsharp", "J# => jsharp", "s# => ssharp", "S# => ssharp", "a# => asharp", "A# => asharp", "k# => ksharp", "K# => ksharp", "t# => tsharp", "T# => tsharp", "g++ => gplusplus", "G++ => gplusplus", "m++ => mplusplus", "M++ => mplusplus", "c++ => cplusplus", "C++ => cplusplus", "s++ => splusplus", "S++ => splusplus", "a++ => aplusplus", "A++ => aplusplus", "d++ => dplusplus", "D++ => dplusplus" ], "type": "mapping" } } } } } )) .send() .await?; if !response.status_code().is_success() { println!("Error while creating index"); } } Ok(()) } fn create_client() -> Result<Elasticsearch, Error> { fn cluster_addr() -> String { match s
fn running_proxy() -> bool { let system = sysinfo::System::new(); !system.get_process_by_name("Fiddler").is_empty() } let mut url = Url::parse(cluster_addr().as_ref()).unwrap(); let credentials = if url.scheme() == "https" { let username = if !url.username().is_empty() { let u = url.username().to_string(); url.set_username("").unwrap(); u } else { std::env::var("ES_USERNAME").unwrap_or_else(|_| "elastic".into()) }; let password = match url.password() { Some(p) => { let pass = p.to_string(); url.set_password(None).unwrap(); pass } None => std::env::var("ES_PASSWORD").unwrap_or_else(|_| "changeme".into()), }; Some(Credentials::Basic(username, password)) } else { None }; let conn_pool = SingleNodeConnectionPool::new(url); let mut builder = TransportBuilder::new(conn_pool); builder = match credentials { Some(c) => { builder = builder.auth(c); #[cfg(any(feature = "native-tls", feature = "rustls-tls"))] { builder = builder.cert_validation(CertificateValidation::None); } builder } None => builder, }; if running_proxy() { let proxy_url = Url::parse("http://localhost:8888").unwrap(); builder = builder.proxy(proxy_url, None, None); } let transport = builder.build()?; Ok(Elasticsearch::new(transport)) }
td::env::var("ELASTICSEARCH_URL") { Ok(server) => server, Err(_) => DEFAULT_ADDRESS.into(), } }
function_block-function_prefixed
[ { "content": "fn create_client() -> Result<Elasticsearch, Error> {\n\n fn cluster_addr() -> String {\n\n match std::env::var(\"ELASTICSEARCH_URL\") {\n\n Ok(server) => server,\n\n Err(_) => DEFAULT_ADDRESS.into(),\n\n }\n\n }\n\n\n\n /// Determines if Fiddler.exe pro...
Rust
src/lib.rs
Lantern-chat/mime_db
a197b8e9e608872b8a042a254d4e86fa6cf21ce0
use unicase::UniCase; #[derive(Debug, Clone, Copy)] pub struct MimeEntry { compressible: bool, extensions: &'static [&'static str], } #[derive(Debug, Clone, Copy)] pub struct ExtEntry { types: &'static [&'static str], } include!(concat!(env!("OUT_DIR"), "/mime_db.rs")); pub fn lookup_ext(ext: &str) -> Option<&ExtEntry> { EXT_TO_MIME.get(&UniCase::new(ext)) } pub fn lookup_mime(mime: &str) -> Option<&MimeEntry> { MIME_TO_EXT.get(&UniCase::new(mime)) } #[inline] pub fn lookup_mime_from_ext(ext: &str) -> Option<&MimeEntry> { let entry = lookup_ext(ext)?; if entry.types.is_empty() { return None; } lookup_mime(entry.types[0]) } pub fn from_prefix(bytes: &[u8]) -> Option<(&str, Option<&MimeEntry>)> { static MAGIC_BYTES: &[(usize, &[u8], &str)] = &[ (0, b"\x89PNG\r\n\x1a\n", "image/png"), (0, &[0xff, 0xd8, 0xff], "image/jpeg"), (0, &[0xCF, 0x84, 0x01], "image/jpeg"), (0, b"GIF89a", "image/gif"), (0, b"GIF87a", "image/gif"), (0, b"MM\x00*", "image/tiff"), (0, b"II*\x00", "image/tiff"), (0, b"DDS ", "image/vnd.ms-dds"), (0, b"BM", "image/bmp"), (0, &[0, 0, 1, 0], "image/x-icon"), (0, b"#?RADIANCE", "image/vnd.radiance"), (0, b"P1", "image/x-portable-anymap"), (0, b"P2", "image/x-portable-anymap"), (0, b"P3", "image/x-portable-anymap"), (0, b"P4", "image/x-portable-anymap"), (0, b"P5", "image/x-portable-anymap"), (0, b"P6", "image/x-portable-anymap"), (0, b"P7", "image/x-portable-anymap"), (0, b"farbfeld", "image/x-farbfeld"), (0, b"\0\0\0 ftypavif", "image/avif"), (0, &[0x76, 0x2f, 0x31, 0x01], "image/aces"), (0, &[0x38, 0x42, 0x50, 0x53], "image/vnd.adobe.photoshop"), (0, &[0x25, 0x50, 0x44, 0x46, 0x2D], "application/pdf"), (0, &[0x4F, 0x67, 0x67, 0x53], "audio/ogg"), (0, &[0xFF, 0xFB], "audio/mp3"), (0, &[0xFF, 0xF3], "audio/mp3"), (0, &[0xFF, 0xF2], "audio/mp3"), (0, &[0x49, 0x44, 0x33], "audio/mp3"), (0, &[0x66, 0x4C, 0x61, 0x43], "audio/x-flac"), ( 0, &[ 0x00, 0x00, 0x00, 0x0C, 0x4A, 0x58, 0x4C, 0x20, 0x0D, 0x0A, 0x87, 0x0A, ], "image/jxl", ), (0, &[0x4D, 0x54, 0x68, 0x64], "audio/midi"), ( 0, &[0xD0, 0xCF, 0x11, 0xE0, 0xA1, 0xB1, 0x1A, 0xE1], "application/msword", ), (0, &[0x1F, 0x8B], "application/gzip"), ( 257, &[0x75, 0x73, 0x74, 0x61, 0x72, 0x00, 0x30, 0x30], "application/tar", ), ( 257, &[0x75, 0x73, 0x74, 0x61, 0x72, 0x20, 0x20, 0x00], "application/tar", ), ( 0, &[0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C], "application/x-7z-compressed", ), (0, &[0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00], "application/x-xz"), (0, &[0x46, 0x4C, 0x49, 0x46], "image/flif"), (0, &[0x1A, 0x45, 0xDF, 0xA3], "video/x-matroska"), (0, &[0x47], "video/mpeg"), (4, &[0x66, 0x74, 0x79, 0x70, 0x69, 0x73, 0x6F, 0x6D], "video/mp4"), (0, &[0x78, 0x01], "application/z-lib"), (0, &[0x78, 0x5E], "application/z-lib"), (0, &[0x78, 0x9C], "application/z-lib"), (0, &[0x78, 0xDA], "application/z-lib"), (0, &[0x78, 0x20], "application/z-lib"), (0, &[0x78, 0x7D], "application/z-lib"), (0, &[0x78, 0xBB], "application/z-lib"), (0, &[0x78, 0xF9], "application/z-lib"), ( 0, &[0x42, 0x4C, 0x45, 0x4E, 0x44, 0x45, 0x52], "application/x-blend", ), (0, &[0x46, 0x4C, 0x56], "video/x-flv"), (0, &[0x4D, 0x53, 0x43, 0x46], "application/vnd.ms-cab-compressed"), ( 0, &[ 0x30, 0x26, 0xB2, 0x75, 0x8E, 0x66, 0xCF, 0x11, 0xA6, 0xD9, 0x00, 0xAA, 0x00, 0x62, 0xCE, 0x6C, ], "video/x-ms-wmv", ), ( 0, &[ 0x53, 0x49, 0x4D, 0x50, 0x4C, 0x45, 0x20, 0x20, 0x3D, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x54, ], "image/fits", ), ]; const RIFFS: &[(&[u8], &str)] = &[ (&[0x57, 0x45, 0x42, 0x50], "image/webp"), (&[0x57, 0x41, 0x56, 0x45], "audio/wav"), (&[0x41, 0x56, 0x49, 0x20], "video/x-msvideo"), (&[0x43, 0x44, 0x44, 0x41], "audio/cda"), ]; for (offset, prefix, mime) in MAGIC_BYTES { if bytes.len() > *offset && bytes[*offset..].starts_with(prefix) { return Some((*mime, lookup_mime(mime))); } if bytes.starts_with(b"RIFF") && bytes.len() >= 12 { let bytes = &bytes[4..]; for (prefix, mime) in RIFFS { if bytes.starts_with(prefix) { return Some((*mime, lookup_mime(mime))); } } } } None }
use unicase::UniCase; #[derive(Debug, Clone, Copy)] pub struct MimeEntry { compressible: bool, extensions: &'static [&'static str], } #[derive(Debug, Clone, Copy)] pub struct ExtEntry { types: &'static [&'static str], } include!(concat!(env!("OUT_DIR"), "/mime_db.rs")); pub fn lookup_ext(ext: &str) -> Option<&ExtEntry> { EXT_TO_MIME.get(&UniCase::new(ext)) } pub fn lookup_mime(mime: &str) -> Option<&MimeEntry> { MIME_TO_EXT.get(&UniCase::new(mime)) } #[inline] pub fn lookup_mime_from_ext(ext: &str) -> Option<&MimeEntry> { let entry = lookup_ext(ext)?; if entry.types.is_empty() { return None; } lookup_mime(entry.types[0]) } pub fn from_prefix(bytes: &[u8]) -> Option<(&str, Option<&MimeEntry>)> { static MAGIC_BYTES: &[(usize, &[u8], &str)] = &[ (0, b"\x89PNG\r\n\x1a\n", "image/png"), (0, &[0xff, 0xd8, 0xff], "image/jpeg"), (0, &[0xCF, 0x84, 0x01], "image/jpeg"), (0, b"GIF89a", "image/gif"), (0, b"GIF87a", "image/gif"), (0, b"MM\x00*", "image/tiff"), (0, b"II*\x00", "image/tiff"), (0, b"DDS ", "image/vnd.ms-dds"), (0, b"BM", "image/bmp"), (0, &[0, 0, 1, 0], "image/x-icon"), (0, b"#?RADIANCE", "image/vnd.radiance"), (0, b"P1", "image/x-portable-anymap"), (0, b"P2", "image/x-portable-anymap"), (0, b"P3", "image/x-portable-anymap"), (0, b"P4", "image/x-portable-anymap"), (0, b"P5", "image/x-portable-anymap"), (0, b"P6", "image/x-portable-anymap"), (0, b"P7", "image/x-portable-anymap"), (0, b"farbfeld", "image/x-farbfeld"), (0, b"\0\0\0 ftypavif", "image/avif"), (0, &[0x76, 0x2f, 0x31, 0x01], "image/aces"), (0, &[0x38, 0x42, 0x50, 0x53], "image/vnd.adobe.photoshop"), (0, &[0x25, 0x50, 0x44, 0x46, 0x2D], "application/pdf"), (0, &[0x4F, 0x67, 0x67, 0x53], "audio/ogg"), (0, &[0xFF, 0xFB], "audio/mp3"), (0, &[0xFF, 0xF3], "audio/mp3"), (0, &[0xFF, 0xF2], "audio/mp3"), (0, &[0x49, 0x44, 0x33], "audio/mp3"), (0, &[0x66, 0x4C, 0x61, 0x43], "audio/x-flac"), ( 0, &[ 0x00, 0x00, 0x00, 0x0C, 0x4A, 0x58, 0x4C, 0x20, 0x0D, 0x0A, 0x87, 0x0A, ], "image/jxl", ), (0, &[0x4D, 0x54, 0x68, 0x64], "audio/midi"), ( 0, &[0xD0, 0xCF, 0x11, 0xE0, 0xA1, 0xB1, 0x1A, 0xE1], "application/msword", ), (0, &[0x1F, 0x8B], "application/gzip"), ( 257, &[0x75, 0x73, 0x74, 0x61, 0x72, 0x00, 0x30, 0x30], "application/tar", ), ( 257, &[0x75, 0x73, 0x74, 0x61, 0x72, 0x20, 0x20, 0x00], "application/tar", ), ( 0, &[0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C], "application/x-7z-compressed", ), (0, &[0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00], "application/x-xz"), (0, &[0x46, 0x4C, 0x49, 0x46], "image/flif"), (0, &[0x1A, 0x45, 0xDF, 0xA3], "video/x-matroska"), (0, &[0x47], "video/mpeg"), (4, &[0x66, 0x74, 0x79, 0x70, 0x69, 0x73, 0x6F, 0x6D], "video/mp4"), (0, &[0x78, 0x01], "application/z-lib"), (0, &[0x78, 0x5E], "application/z-lib"), (
0, &[ 0x30, 0x26, 0xB2, 0x75, 0x8E, 0x66, 0xCF, 0x11, 0xA6, 0xD9, 0x00, 0xAA, 0x00, 0x62, 0xCE, 0x6C, ], "video/x-ms-wmv", ), ( 0, &[ 0x53, 0x49, 0x4D, 0x50, 0x4C, 0x45, 0x20, 0x20, 0x3D, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x54, ], "image/fits", ), ]; const RIFFS: &[(&[u8], &str)] = &[ (&[0x57, 0x45, 0x42, 0x50], "image/webp"), (&[0x57, 0x41, 0x56, 0x45], "audio/wav"), (&[0x41, 0x56, 0x49, 0x20], "video/x-msvideo"), (&[0x43, 0x44, 0x44, 0x41], "audio/cda"), ]; for (offset, prefix, mime) in MAGIC_BYTES { if bytes.len() > *offset && bytes[*offset..].starts_with(prefix) { return Some((*mime, lookup_mime(mime))); } if bytes.starts_with(b"RIFF") && bytes.len() >= 12 { let bytes = &bytes[4..]; for (prefix, mime) in RIFFS { if bytes.starts_with(prefix) { return Some((*mime, lookup_mime(mime))); } } } } None }
0, &[0x78, 0x9C], "application/z-lib"), (0, &[0x78, 0xDA], "application/z-lib"), (0, &[0x78, 0x20], "application/z-lib"), (0, &[0x78, 0x7D], "application/z-lib"), (0, &[0x78, 0xBB], "application/z-lib"), (0, &[0x78, 0xF9], "application/z-lib"), ( 0, &[0x42, 0x4C, 0x45, 0x4E, 0x44, 0x45, 0x52], "application/x-blend", ), (0, &[0x46, 0x4C, 0x56], "video/x-flv"), (0, &[0x4D, 0x53, 0x43, 0x46], "application/vnd.ms-cab-compressed"), (
random
[ { "content": "#[derive(Debug, serde::Deserialize)]\n\nstruct MimeEntry {\n\n #[serde(default)]\n\n pub compressible: bool,\n\n\n\n #[serde(default)]\n\n pub extensions: Vec<String>,\n\n\n\n #[serde(default)]\n\n pub source: Source,\n\n}\n\n\n", "file_path": "build.rs", "rank": 4, "...
Rust
src/connectivity/overnet/overnetstack/src/main.rs
mehulagg/fuchsia
3f56175ee594da6b287d5fb19f2f0eccea2897f0
#![deny(missing_docs)] mod mdns; use failure::{Error, ResultExt}; use fidl_fuchsia_overnet::{ MeshControllerRequest, MeshControllerRequestStream, OvernetListPeersResponder, OvernetRequest, OvernetRequestStream, ServiceConsumerListPeersResponder, ServiceConsumerRequest, ServiceConsumerRequestStream, ServicePublisherRequest, ServicePublisherRequestStream, }; use fuchsia_async as fasync; use fuchsia_component::server::ServiceFs; use fuchsia_zircon as zx; use futures::future::{abortable, AbortHandle}; use futures::prelude::*; use overnet_core::{LinkId, Node, NodeId, NodeOptions, NodeRuntime, RouterTime, SendHandle}; use std::cell::RefCell; use std::collections::HashMap; use std::net::{SocketAddr, SocketAddrV6}; use std::ops::Deref; use std::rc::Rc; use zx::AsHandleRef; #[derive(Clone, Copy, Debug)] enum AppLinkId { Udp(SocketAddrV6), } #[derive(PartialEq, PartialOrd, Eq, Ord, Clone, Copy, Debug)] struct Time(fasync::Time); impl RouterTime for Time { type Duration = zx::Duration; fn now() -> Self { Time(fasync::Time::now()) } fn after(time: Self, duration: zx::Duration) -> Self { Self(time.0 + duration) } } struct AppRuntime; impl NodeRuntime for AppRuntime { type Time = Time; type LinkId = AppLinkId; const IMPLEMENTATION: fidl_fuchsia_overnet_protocol::Implementation = fidl_fuchsia_overnet_protocol::Implementation::OvernetStack; fn handle_type(handle: &zx::Handle) -> Result<SendHandle, Error> { match handle.basic_info()?.object_type { zx::ObjectType::CHANNEL => Ok(SendHandle::Channel), _ => failure::bail!("Handle type not proxyable {:?}", handle.basic_info()?.object_type), } } fn spawn_local<F>(&mut self, future: F) where F: Future<Output = ()> + 'static, { fasync::spawn_local(future) } fn at(&mut self, t: Self::Time, f: impl FnOnce() + 'static) { fasync::spawn_local(at(t.0, f)) } fn router_link_id(&self, id: AppLinkId) -> LinkId<overnet_core::PhysLinkId<AppLinkId>> { with_app_mut(|app| match id { AppLinkId::Udp(addr) => { app.udp_link_ids.get(&addr).copied().unwrap_or(LinkId::invalid()) } }) } fn send_on_link(&mut self, id: Self::LinkId, packet: &mut [u8]) -> Result<(), Error> { match id { AppLinkId::Udp(addr) => { println!("UDP_SEND to:{} len:{}", addr, packet.len()); let sock = with_app_mut(|app| -> Result<_, Error> { Ok(app .udp_socket .as_ref() .ok_or_else(|| failure::format_err!("no udp socket"))? .sock .clone()) })?; let sock = sock.deref().as_ref(); if let Err(e) = sock.send_to(packet, addr) { if e.kind() == std::io::ErrorKind::BrokenPipe { log::warn!("BrokenPipe on UDP socket: let's make a new one"); with_app_mut(|app| { app.udp_socket.take(); app.udp_socket = Some(UdpSocketHolder::new(app.node_id)?); Ok(()) }) } else { Err(e.into()) } } else { Ok(()) } } } } } struct UdpSocketHolder { sock: Rc<fasync::net::UdpSocket>, abort_publisher: AbortHandle, } impl UdpSocketHolder { fn new(node_id: NodeId) -> Result<Self, Error> { let sock = std::net::UdpSocket::bind("[::]:0").context("Creating UDP socket")?; let publisher = mdns::publish(node_id, sock.local_addr().context("Getting UDP local address")?.port()); let sock = Rc::new(fasync::net::UdpSocket::from_socket(sock)?); let (publisher, abort_publisher) = abortable(publisher); fasync::spawn_local(async move { let _ = publisher.await; }); Ok(Self { sock, abort_publisher }) } } impl Drop for UdpSocketHolder { fn drop(&mut self) { self.abort_publisher.abort(); } } struct App { node_id: NodeId, node: Node<AppRuntime>, udp_link_ids: HashMap<SocketAddrV6, LinkId<overnet_core::PhysLinkId<AppLinkId>>>, udp_socket: Option<UdpSocketHolder>, } thread_local! { static APP: RefCell<App> = RefCell::new(App::new()); } fn with_app_mut<R>(f: impl FnOnce(&mut App) -> R) -> R { APP.with(|rcapp| f(&mut rcapp.borrow_mut())) } async fn at(when: fasync::Time, f: impl FnOnce()) { fasync::Timer::new(when).await; f(); } impl App { fn new() -> App { let node = Node::new( AppRuntime, NodeOptions::new() .set_quic_server_key_file(Box::new("/pkg/data/cert.key".to_string())) .set_quic_server_cert_file(Box::new("/pkg/data/cert.crt".to_string())), ) .unwrap(); App { node_id: node.id(), node, udp_link_ids: HashMap::new(), udp_socket: None } } } fn normalize_addr(addr: SocketAddr) -> SocketAddrV6 { match addr { SocketAddr::V6(a) => a, SocketAddr::V4(a) => SocketAddrV6::new(a.ip().to_ipv6_mapped(), a.port(), 0, 0), } } async fn read_udp_inner() -> Result<(), Error> { let mut buf: [u8; 1500] = [0; 1500]; loop { let sock = with_app_mut(|app| -> Result<_, Error> { Ok(app .udp_socket .as_ref() .ok_or_else(|| failure::format_err!("No udp socket to read from"))? .sock .clone()) })?; let (length, sender) = sock.recv_from(&mut buf).await?; println!("UDP_RECV from:{} len:{}", sender, length); let sender = normalize_addr(sender); with_app_mut(|app| -> Result<(), Error> { if let Some(link_id) = app.udp_link_ids.get(&sender) { app.node.queue_recv(*link_id, &mut buf[..length]); } else { log::warn!("No link for received packet {:?}", sender); } Ok(()) })?; } } async fn read_udp() { if let Err(e) = read_udp_inner().await { log::warn!("UDP read loop failed: {:?}", e); } } fn register_udp(addr: SocketAddr, node_id: NodeId) -> Result<(), Error> { with_app_mut(|app| { app.node.mention_node(node_id); let addr = normalize_addr(addr); if app.udp_link_ids.get(&addr).is_none() { let rtr_id = app.node.new_link(node_id, AppLinkId::Udp(addr))?; println!("register peer: {} node_id={:?} rtr_id={:?}", addr, node_id, rtr_id); app.udp_link_ids.insert(addr, rtr_id); } Ok(()) }) } trait ListPeersResponder { fn respond( self, peers: &mut dyn ExactSizeIterator<Item = &mut fidl_fuchsia_overnet::Peer>, ) -> Result<(), fidl::Error>; } impl ListPeersResponder for ServiceConsumerListPeersResponder { fn respond( self, peers: &mut dyn ExactSizeIterator<Item = &mut fidl_fuchsia_overnet::Peer>, ) -> Result<(), fidl::Error> { self.send(peers) } } impl ListPeersResponder for OvernetListPeersResponder { fn respond( self, peers: &mut dyn ExactSizeIterator<Item = &mut fidl_fuchsia_overnet::Peer>, ) -> Result<(), fidl::Error> { self.send(peers) } } async fn run_list_peers_inner(responder: impl ListPeersResponder) -> Result<(), Error> { let mut peers = with_app_mut(|app| app.node.clone().list_peers()).await?; responder.respond(&mut peers.iter_mut())?; Ok(()) } async fn run_list_peers(responder: impl ListPeersResponder) { if let Err(e) = run_list_peers_inner(responder).await { log::warn!("List peers gets error: {:?}", e); } } async fn run_service_publisher_server( mut stream: ServicePublisherRequestStream, ) -> Result<(), Error> { while let Some(request) = stream.try_next().await.context("error running overnet server")? { let result = with_app_mut(|app| match request { ServicePublisherRequest::PublishService { service_name, provider, .. } => { app.node.register_service(service_name, provider) } }); if let Err(e) = result { log::warn!("Error servicing request: {:?}", e) } } Ok(()) } async fn run_service_consumer_server( mut stream: ServiceConsumerRequestStream, ) -> Result<(), Error> { while let Some(request) = stream.try_next().await.context("error running overnet server")? { let result = with_app_mut(|app| match request { ServiceConsumerRequest::ListPeers { responder, .. } => { fasync::spawn_local(run_list_peers(responder)); Ok(()) } ServiceConsumerRequest::ConnectToService { node, service_name, chan, .. } => { app.node.connect_to_service(node.id.into(), &service_name, chan) } }); if let Err(e) = result { log::warn!("Error servicing request: {:?}", e); } } Ok(()) } async fn run_mesh_controller_server(mut stream: MeshControllerRequestStream) -> Result<(), Error> { while let Some(request) = stream.try_next().await.context("error running overnet server")? { let result = with_app_mut(|app| match request { MeshControllerRequest::AttachSocketLink { socket, options, .. } => { app.node.attach_socket_link(socket, options) } }); if let Err(e) = result { log::warn!("Error servicing request: {:?}", e); } } Ok(()) } async fn run_legacy_overnet_server(mut stream: OvernetRequestStream) -> Result<(), Error> { while let Some(request) = stream.try_next().await.context("error running overnet server")? { let result = with_app_mut(|app| match request { OvernetRequest::PublishService { service_name, provider, .. } => { app.node.register_service(service_name, provider) } OvernetRequest::ListPeers { responder, .. } => { fasync::spawn_local(run_list_peers(responder)); Ok(()) } OvernetRequest::ConnectToService { node, service_name, chan, .. } => { app.node.connect_to_service(node.id.into(), &service_name, chan) } }); if let Err(e) = result { log::warn!("Error servicing request: {:?}", e); } } Ok(()) } enum IncomingService { ServiceConsumer(ServiceConsumerRequestStream), ServicePublisher(ServicePublisherRequestStream), MeshController(MeshControllerRequestStream), LegacyOvernet(OvernetRequestStream), } #[fasync::run_singlethreaded] async fn main() -> Result<(), Error> { fuchsia_syslog::init_with_tags(&["overnet"]).context("initialize logging")?; let mut fs = ServiceFs::new_local(); let mut svc_dir = fs.dir("svc"); svc_dir.add_fidl_service(IncomingService::ServiceConsumer); svc_dir.add_fidl_service(IncomingService::ServicePublisher); svc_dir.add_fidl_service(IncomingService::MeshController); svc_dir.add_fidl_service(IncomingService::LegacyOvernet); fs.take_and_serve_directory_handle()?; with_app_mut(|app| -> Result<(), Error> { app.udp_socket = Some(UdpSocketHolder::new(app.node.id())?); fasync::spawn_local(mdns::subscribe()); fasync::spawn_local(read_udp()); Ok(()) }) .context("Initializing UDP & MDNS")?; const MAX_CONCURRENT: usize = 10_000; fs.for_each_concurrent(MAX_CONCURRENT, |svcreq| match svcreq { IncomingService::MeshController(stream) => { run_mesh_controller_server(stream).unwrap_or_else(|e| log::trace!("{:?}", e)).boxed() } IncomingService::ServicePublisher(stream) => { run_service_publisher_server(stream).unwrap_or_else(|e| log::trace!("{:?}", e)).boxed() } IncomingService::ServiceConsumer(stream) => { run_service_consumer_server(stream).unwrap_or_else(|e| log::trace!("{:?}", e)).boxed() } IncomingService::LegacyOvernet(stream) => { run_legacy_overnet_server(stream).unwrap_or_else(|e| log::trace!("{:?}", e)).boxed() } }) .await; Ok(()) }
#![deny(missing_docs)] mod mdns; use failure::{Error, ResultExt}; use fidl_fuchsia_overnet::{ MeshControllerRequest, MeshControllerRequestStream, OvernetListPeersResponder, OvernetRequest, OvernetRequestStream, ServiceConsumerListPeersResponder, ServiceConsumerRequest, ServiceConsumerRequestStream, ServicePublisherRequest, ServicePublisherRequestStream, }; use fuchsia_async as fasync; use fuchsia_component::server::ServiceFs; use fuchsia_zircon as zx; use futures::future::{abortable, AbortHandle}; use futures::prelude::*; use overnet_core::{LinkId, Node, NodeId, NodeOptions, NodeRuntime, RouterTime, SendHandle}; use std::cell::RefCell; use std::collections::HashMap; use std::net::{SocketAddr, SocketAddrV6}; use std::ops::Deref; use std::rc::Rc; use zx::AsHandleRef; #[derive(Clone, Copy, Debug)] enum AppLinkId { Udp(SocketAddrV6), } #[derive(PartialEq, PartialOrd, Eq, Ord, Clone, Copy, Debug)] struct Time(fasync::Time); impl RouterTime for Time { type Duration = zx::Duration; fn now() -> Self { Time(fasync::Time::now()) } fn after(time: Self, duration: zx::Duration) -> Self { Self(time.0 + duration) } } struct AppRuntime; impl NodeRuntime for AppRuntime { type Time = Time; type LinkId = AppLinkId; const IMPLEMENTATION: fidl_fuchsia_overnet_protocol::Implementation = fidl_fuchsia_overnet_protocol::Implementation::OvernetStack; fn handle_type(handle: &zx::Handle) -> Result<SendHandle, Error> { match handle.basic_info()?.object_type { zx::ObjectType::CHANNEL => Ok(SendHandle::Channel), _ => failure::bail!("Handle type not proxyable {:?}", handle.basic_info()?.object_type), } } fn spawn_local<F>(&mut self, future: F) where F: Future<Output = ()> + 'static, { fasync::spawn_local(future) } fn at(&mut self, t: Self::Time, f: impl FnOnce() + 'static) { fasync::spawn_local(at(t.0, f)) } fn router_link_id(&self, id: A
fn send_on_link(&mut self, id: Self::LinkId, packet: &mut [u8]) -> Result<(), Error> { match id { AppLinkId::Udp(addr) => { println!("UDP_SEND to:{} len:{}", addr, packet.len()); let sock = with_app_mut(|app| -> Result<_, Error> { Ok(app .udp_socket .as_ref() .ok_or_else(|| failure::format_err!("no udp socket"))? .sock .clone()) })?; let sock = sock.deref().as_ref(); if let Err(e) = sock.send_to(packet, addr) { if e.kind() == std::io::ErrorKind::BrokenPipe { log::warn!("BrokenPipe on UDP socket: let's make a new one"); with_app_mut(|app| { app.udp_socket.take(); app.udp_socket = Some(UdpSocketHolder::new(app.node_id)?); Ok(()) }) } else { Err(e.into()) } } else { Ok(()) } } } } } struct UdpSocketHolder { sock: Rc<fasync::net::UdpSocket>, abort_publisher: AbortHandle, } impl UdpSocketHolder { fn new(node_id: NodeId) -> Result<Self, Error> { let sock = std::net::UdpSocket::bind("[::]:0").context("Creating UDP socket")?; let publisher = mdns::publish(node_id, sock.local_addr().context("Getting UDP local address")?.port()); let sock = Rc::new(fasync::net::UdpSocket::from_socket(sock)?); let (publisher, abort_publisher) = abortable(publisher); fasync::spawn_local(async move { let _ = publisher.await; }); Ok(Self { sock, abort_publisher }) } } impl Drop for UdpSocketHolder { fn drop(&mut self) { self.abort_publisher.abort(); } } struct App { node_id: NodeId, node: Node<AppRuntime>, udp_link_ids: HashMap<SocketAddrV6, LinkId<overnet_core::PhysLinkId<AppLinkId>>>, udp_socket: Option<UdpSocketHolder>, } thread_local! { static APP: RefCell<App> = RefCell::new(App::new()); } fn with_app_mut<R>(f: impl FnOnce(&mut App) -> R) -> R { APP.with(|rcapp| f(&mut rcapp.borrow_mut())) } async fn at(when: fasync::Time, f: impl FnOnce()) { fasync::Timer::new(when).await; f(); } impl App { fn new() -> App { let node = Node::new( AppRuntime, NodeOptions::new() .set_quic_server_key_file(Box::new("/pkg/data/cert.key".to_string())) .set_quic_server_cert_file(Box::new("/pkg/data/cert.crt".to_string())), ) .unwrap(); App { node_id: node.id(), node, udp_link_ids: HashMap::new(), udp_socket: None } } } fn normalize_addr(addr: SocketAddr) -> SocketAddrV6 { match addr { SocketAddr::V6(a) => a, SocketAddr::V4(a) => SocketAddrV6::new(a.ip().to_ipv6_mapped(), a.port(), 0, 0), } } async fn read_udp_inner() -> Result<(), Error> { let mut buf: [u8; 1500] = [0; 1500]; loop { let sock = with_app_mut(|app| -> Result<_, Error> { Ok(app .udp_socket .as_ref() .ok_or_else(|| failure::format_err!("No udp socket to read from"))? .sock .clone()) })?; let (length, sender) = sock.recv_from(&mut buf).await?; println!("UDP_RECV from:{} len:{}", sender, length); let sender = normalize_addr(sender); with_app_mut(|app| -> Result<(), Error> { if let Some(link_id) = app.udp_link_ids.get(&sender) { app.node.queue_recv(*link_id, &mut buf[..length]); } else { log::warn!("No link for received packet {:?}", sender); } Ok(()) })?; } } async fn read_udp() { if let Err(e) = read_udp_inner().await { log::warn!("UDP read loop failed: {:?}", e); } } fn register_udp(addr: SocketAddr, node_id: NodeId) -> Result<(), Error> { with_app_mut(|app| { app.node.mention_node(node_id); let addr = normalize_addr(addr); if app.udp_link_ids.get(&addr).is_none() { let rtr_id = app.node.new_link(node_id, AppLinkId::Udp(addr))?; println!("register peer: {} node_id={:?} rtr_id={:?}", addr, node_id, rtr_id); app.udp_link_ids.insert(addr, rtr_id); } Ok(()) }) } trait ListPeersResponder { fn respond( self, peers: &mut dyn ExactSizeIterator<Item = &mut fidl_fuchsia_overnet::Peer>, ) -> Result<(), fidl::Error>; } impl ListPeersResponder for ServiceConsumerListPeersResponder { fn respond( self, peers: &mut dyn ExactSizeIterator<Item = &mut fidl_fuchsia_overnet::Peer>, ) -> Result<(), fidl::Error> { self.send(peers) } } impl ListPeersResponder for OvernetListPeersResponder { fn respond( self, peers: &mut dyn ExactSizeIterator<Item = &mut fidl_fuchsia_overnet::Peer>, ) -> Result<(), fidl::Error> { self.send(peers) } } async fn run_list_peers_inner(responder: impl ListPeersResponder) -> Result<(), Error> { let mut peers = with_app_mut(|app| app.node.clone().list_peers()).await?; responder.respond(&mut peers.iter_mut())?; Ok(()) } async fn run_list_peers(responder: impl ListPeersResponder) { if let Err(e) = run_list_peers_inner(responder).await { log::warn!("List peers gets error: {:?}", e); } } async fn run_service_publisher_server( mut stream: ServicePublisherRequestStream, ) -> Result<(), Error> { while let Some(request) = stream.try_next().await.context("error running overnet server")? { let result = with_app_mut(|app| match request { ServicePublisherRequest::PublishService { service_name, provider, .. } => { app.node.register_service(service_name, provider) } }); if let Err(e) = result { log::warn!("Error servicing request: {:?}", e) } } Ok(()) } async fn run_service_consumer_server( mut stream: ServiceConsumerRequestStream, ) -> Result<(), Error> { while let Some(request) = stream.try_next().await.context("error running overnet server")? { let result = with_app_mut(|app| match request { ServiceConsumerRequest::ListPeers { responder, .. } => { fasync::spawn_local(run_list_peers(responder)); Ok(()) } ServiceConsumerRequest::ConnectToService { node, service_name, chan, .. } => { app.node.connect_to_service(node.id.into(), &service_name, chan) } }); if let Err(e) = result { log::warn!("Error servicing request: {:?}", e); } } Ok(()) } async fn run_mesh_controller_server(mut stream: MeshControllerRequestStream) -> Result<(), Error> { while let Some(request) = stream.try_next().await.context("error running overnet server")? { let result = with_app_mut(|app| match request { MeshControllerRequest::AttachSocketLink { socket, options, .. } => { app.node.attach_socket_link(socket, options) } }); if let Err(e) = result { log::warn!("Error servicing request: {:?}", e); } } Ok(()) } async fn run_legacy_overnet_server(mut stream: OvernetRequestStream) -> Result<(), Error> { while let Some(request) = stream.try_next().await.context("error running overnet server")? { let result = with_app_mut(|app| match request { OvernetRequest::PublishService { service_name, provider, .. } => { app.node.register_service(service_name, provider) } OvernetRequest::ListPeers { responder, .. } => { fasync::spawn_local(run_list_peers(responder)); Ok(()) } OvernetRequest::ConnectToService { node, service_name, chan, .. } => { app.node.connect_to_service(node.id.into(), &service_name, chan) } }); if let Err(e) = result { log::warn!("Error servicing request: {:?}", e); } } Ok(()) } enum IncomingService { ServiceConsumer(ServiceConsumerRequestStream), ServicePublisher(ServicePublisherRequestStream), MeshController(MeshControllerRequestStream), LegacyOvernet(OvernetRequestStream), } #[fasync::run_singlethreaded] async fn main() -> Result<(), Error> { fuchsia_syslog::init_with_tags(&["overnet"]).context("initialize logging")?; let mut fs = ServiceFs::new_local(); let mut svc_dir = fs.dir("svc"); svc_dir.add_fidl_service(IncomingService::ServiceConsumer); svc_dir.add_fidl_service(IncomingService::ServicePublisher); svc_dir.add_fidl_service(IncomingService::MeshController); svc_dir.add_fidl_service(IncomingService::LegacyOvernet); fs.take_and_serve_directory_handle()?; with_app_mut(|app| -> Result<(), Error> { app.udp_socket = Some(UdpSocketHolder::new(app.node.id())?); fasync::spawn_local(mdns::subscribe()); fasync::spawn_local(read_udp()); Ok(()) }) .context("Initializing UDP & MDNS")?; const MAX_CONCURRENT: usize = 10_000; fs.for_each_concurrent(MAX_CONCURRENT, |svcreq| match svcreq { IncomingService::MeshController(stream) => { run_mesh_controller_server(stream).unwrap_or_else(|e| log::trace!("{:?}", e)).boxed() } IncomingService::ServicePublisher(stream) => { run_service_publisher_server(stream).unwrap_or_else(|e| log::trace!("{:?}", e)).boxed() } IncomingService::ServiceConsumer(stream) => { run_service_consumer_server(stream).unwrap_or_else(|e| log::trace!("{:?}", e)).boxed() } IncomingService::LegacyOvernet(stream) => { run_legacy_overnet_server(stream).unwrap_or_else(|e| log::trace!("{:?}", e)).boxed() } }) .await; Ok(()) }
ppLinkId) -> LinkId<overnet_core::PhysLinkId<AppLinkId>> { with_app_mut(|app| match id { AppLinkId::Udp(addr) => { app.udp_link_ids.get(&addr).copied().unwrap_or(LinkId::invalid()) } }) }
function_block-function_prefixed
[]
Rust
src/fmtstr.rs
MikaelSmith/strfmt
4ad7c2240203df3eec954fb00a31e87d74812339
use std::fmt::Write; use std::string::String; use types::*; use formatter::Formatter; fn write_char(f: &mut Formatter, c: char, n: usize) { for _ in 0..n { f.write_char(c).unwrap(); } } #[test] fn test_write_char() { let mut s = String::new(); s.write_str("h ").unwrap(); { let mut f = Formatter::from_str("{}", &mut s).unwrap(); write_char(&mut f, 'f', 3); } assert!(s == "h fff"); } fn write_from<I>(fmt: &mut Formatter, f: I, n: usize) -> usize where I: Iterator<Item = char> { if n == 0 { return 0; } let mut n_written: usize = 0; for c in f { fmt.write_char(c).unwrap(); n_written += 1; if n_written == n { return n_written; } } n_written } #[test] fn test_write_from() { let mut s = String::new(); s.write_str("h ").unwrap(); { let mut f = Formatter::from_str("{}", &mut s).unwrap(); write_from(&mut f, "fff".chars(), 5); } assert!(s == "h fff"); { let mut f = Formatter::from_str("{}", &mut s).unwrap(); write_from(&mut f, "xxxx".chars(), 2); } assert!(s == "h fffxx"); { let mut f = Formatter::from_str("{}", &mut s).unwrap(); write_from(&mut f, "333".chars(), 3); } assert!(s == "h fffxx333"); s.clear(); { let mut f = Formatter::from_str("{}", &mut s).unwrap(); write!(f, "hello").unwrap(); } assert!(s == "hello"); } impl<'a, 'b> Formatter<'a, 'b> { pub fn str(&mut self, s: &str) -> Result<()> { if !(self.ty() == None || self.ty() == Some('s')) { let mut msg = String::new(); write!(msg, "Unknown format code {:?} for object of type 'str'", self.ty()).unwrap(); return Err(FmtError::TypeError(msg)); } else if self.alternate() { return Err(FmtError::TypeError("Alternate form (#) not allowed in string \ format specifier".to_string())); } else if self.thousands() { return Err(FmtError::TypeError("Cannot specify ',' with 's'".to_string())); } else if self.sign().is_unspecified() { return Err(FmtError::TypeError("Sign not allowed in string format specifier" .to_string())); } self.str_unchecked(s) } pub fn str_unchecked(&mut self, s: &str) -> Result<()> { let fill = self.fill(); let width = self.width(); let precision = self.precision(); let len = match precision { Some(p) => if p < s.len() { p } else { s.len() }, None => s.len(), }; let mut chars = s.chars(); let mut pad: usize = 0; if let Some(mut width) = width { if width > len { let align = self.align(); match align { Alignment::Left => pad = width - len, Alignment::Center => { width -= len; pad = width / 2; write_char(self, fill, pad); pad += width % 2; } Alignment::Right => { write_char(self, fill, width - len); } Alignment::Equal => return Err(FmtError::Invalid( "sign aware zero padding and Align '=' not yet supported".to_string())), } } } write_from(self, &mut chars, len); write_char(self, fill, pad); Ok(()) } } pub fn strfmt_map<F>(fmtstr: &str, f: &F) -> Result<String> where F: Fn(Formatter) -> Result<()> { let mut out = String::with_capacity(fmtstr.len() * 2); let mut bytes_read: usize = 0; let mut opening_brace: usize = 0; let mut closing_brace: bool = false; let mut reading_fmt = false; let mut remaining = fmtstr; for c in fmtstr.chars() { bytes_read += c.len_utf8(); if c == '{' { if reading_fmt && opening_brace == bytes_read - 2 { out.push(c); reading_fmt = false; } else if !reading_fmt { reading_fmt = true; opening_brace = bytes_read - 1; } else { out.clear(); out.write_str("extra { found").unwrap(); return Err(FmtError::Invalid(out)); } } else if c == '}' { if !reading_fmt && !closing_brace { closing_brace = true; } else if closing_brace { out.push(c); closing_brace = false; } else { let (_, r) = remaining.split_at(opening_brace); let (fmt_pattern, r) = r.split_at(bytes_read - opening_brace); remaining = r; let (_, fmt_pattern) = fmt_pattern.split_at(1); let (fmt_pattern, _) = fmt_pattern.split_at(fmt_pattern.len() - 1); let fmt = try!(Formatter::from_str(fmt_pattern, &mut out)); try!(f(fmt)); reading_fmt = false; bytes_read = 0; } } else if closing_brace { return Err(FmtError::Invalid("Single '}' encountered in format string".to_string())); } else if !reading_fmt { out.push(c) } } if closing_brace { return Err(FmtError::Invalid("Single '}' encountered in format string".to_string())); } else if reading_fmt { return Err(FmtError::Invalid("Expected '}' before end of string".to_string())); } out.shrink_to_fit(); Ok(out) }
use std::fmt::Write; use std::string::String; use types::*; use formatter::Formatter; fn write_char(f: &mut Formatter, c: char, n: usize) { for _ in 0..n { f.write_char(c).unwrap(); } } #[test] fn test_write_char() { let mut s = String::new(); s.write_str("h ").unwrap(); { let mut f = Formatter::from_str("{}", &mut s).unwrap(); write_char(&mut f, 'f', 3); } assert!(s == "h fff"); } fn write_from<I>(fmt: &mut Formatter, f: I, n: usize) -> usize where I: Iterator<Item = char> { if n == 0 { return 0; } let mut n_written: usize = 0; for c in f { fmt.write_char(c).unwrap(); n_written += 1; if n_written == n { return n_written; } } n_written } #[test] fn test_write_from() { let mut s = String::new(); s.write_str("h ").unwrap(); { let mut f = Formatter::from_str("{}", &mut s).unwrap(); write_from(&mut f, "fff".chars(), 5); } assert!(s == "h fff"); { let mut f = Formatter::from_str("{}", &mut s).unwrap(); write_from(&mut f, "xxxx".chars(), 2); } assert!(s == "h fffxx"); { let mut f = Formatter::from_str("{}", &mut s).unwrap(); write_from(&mut f, "333".chars(), 3); } assert!(s == "h fffxx333"); s.clear(); { let mut f = Formatter::from_str("{}", &mut s).unwrap(); write!(f, "hello").unwrap(); } assert!(s == "hello"); } impl<'a, 'b> Formatter<'a, 'b> {
pub fn str_unchecked(&mut self, s: &str) -> Result<()> { let fill = self.fill(); let width = self.width(); let precision = self.precision(); let len = match precision { Some(p) => if p < s.len() { p } else { s.len() }, None => s.len(), }; let mut chars = s.chars(); let mut pad: usize = 0; if let Some(mut width) = width { if width > len { let align = self.align(); match align { Alignment::Left => pad = width - len, Alignment::Center => { width -= len; pad = width / 2; write_char(self, fill, pad); pad += width % 2; } Alignment::Right => { write_char(self, fill, width - len); } Alignment::Equal => return Err(FmtError::Invalid( "sign aware zero padding and Align '=' not yet supported".to_string())), } } } write_from(self, &mut chars, len); write_char(self, fill, pad); Ok(()) } } pub fn strfmt_map<F>(fmtstr: &str, f: &F) -> Result<String> where F: Fn(Formatter) -> Result<()> { let mut out = String::with_capacity(fmtstr.len() * 2); let mut bytes_read: usize = 0; let mut opening_brace: usize = 0; let mut closing_brace: bool = false; let mut reading_fmt = false; let mut remaining = fmtstr; for c in fmtstr.chars() { bytes_read += c.len_utf8(); if c == '{' { if reading_fmt && opening_brace == bytes_read - 2 { out.push(c); reading_fmt = false; } else if !reading_fmt { reading_fmt = true; opening_brace = bytes_read - 1; } else { out.clear(); out.write_str("extra { found").unwrap(); return Err(FmtError::Invalid(out)); } } else if c == '}' { if !reading_fmt && !closing_brace { closing_brace = true; } else if closing_brace { out.push(c); closing_brace = false; } else { let (_, r) = remaining.split_at(opening_brace); let (fmt_pattern, r) = r.split_at(bytes_read - opening_brace); remaining = r; let (_, fmt_pattern) = fmt_pattern.split_at(1); let (fmt_pattern, _) = fmt_pattern.split_at(fmt_pattern.len() - 1); let fmt = try!(Formatter::from_str(fmt_pattern, &mut out)); try!(f(fmt)); reading_fmt = false; bytes_read = 0; } } else if closing_brace { return Err(FmtError::Invalid("Single '}' encountered in format string".to_string())); } else if !reading_fmt { out.push(c) } } if closing_brace { return Err(FmtError::Invalid("Single '}' encountered in format string".to_string())); } else if reading_fmt { return Err(FmtError::Invalid("Expected '}' before end of string".to_string())); } out.shrink_to_fit(); Ok(out) }
pub fn str(&mut self, s: &str) -> Result<()> { if !(self.ty() == None || self.ty() == Some('s')) { let mut msg = String::new(); write!(msg, "Unknown format code {:?} for object of type 'str'", self.ty()).unwrap(); return Err(FmtError::TypeError(msg)); } else if self.alternate() { return Err(FmtError::TypeError("Alternate form (#) not allowed in string \ format specifier".to_string())); } else if self.thousands() { return Err(FmtError::TypeError("Cannot specify ',' with 's'".to_string())); } else if self.sign().is_unspecified() { return Err(FmtError::TypeError("Sign not allowed in string format specifier" .to_string())); } self.str_unchecked(s) }
function_block-full_function
[ { "content": "fn is_type_element(c: char) -> bool {\n\n match c {\n\n 'b' |\n\n 'o' |\n\n 'x' |\n\n 'X' |\n\n 'e' |\n\n 'E' |\n\n 'f' |\n\n 'F' |\n\n '%' |\n\n 's' |\n\n '?' => true,\n\n _ => false,\n\n }\n\n}\n\n\n", ...
Rust
src/eval.rs
edre/nokamute
ace46abe0cb2a4056e0e97c4377deb2746bcbae0
use crate::board::*; pub struct DumbEvaluator; impl minimax::Evaluator for DumbEvaluator { type G = Rules; fn evaluate(&self, _: &Board) -> minimax::Evaluation { 0 } } pub struct BasicEvaluator { queen_factor: i32, movable_bug_factor: i32, unplayed_bug_factor: i32, } impl Default for BasicEvaluator { fn default() -> Self { Self { queen_factor: 40, movable_bug_factor: 2, unplayed_bug_factor: 1 } } } impl minimax::Evaluator for BasicEvaluator { type G = Rules; fn evaluate(&self, board: &Board) -> minimax::Evaluation { let queens_surrounded = board.queens_surrounded(); let immovable = board.find_cut_vertexes(); fn value(bug: Bug) -> i32 { match bug { Bug::Queen => 10, Bug::Ant => 7, Bug::Beetle => 6, Bug::Grasshopper => 4, Bug::Spider => 3, Bug::Mosquito => 0, Bug::Ladybug => 5, Bug::Pillbug => 4, } } let mut score = queens_surrounded[1 - board.to_move() as usize] as i32 - queens_surrounded[board.to_move() as usize] as i32; score *= self.queen_factor; let remaining = board.get_remaining(); let opp_remaining = board.get_opponent_remaining(); for bug in Bug::iter_all() { score += (remaining[bug as usize] as i32 - opp_remaining[bug as usize] as i32) * self.unplayed_bug_factor; } for (id, node) in (0..).zip(board.nodes.iter()) { if let Some(ref tile) = node.tile { let mut bug_score = value(tile.bug); let pillbug_near_its_queen = tile.bug == Bug::Pillbug && node.adj.iter().any(|&adj| { board .get(adj) .map(|tile2| tile2.bug == Bug::Queen && tile2.color == tile.color) .unwrap_or(false) }); if pillbug_near_its_queen { bug_score += (self.queen_factor / 2) * node.adj.iter().filter(|&&adj| board.get(adj).is_none()).count() as i32; } else if tile.underneath.is_none() && immovable.get(id) { continue; } if tile.bug == Bug::Mosquito { if tile.underneath.is_some() { bug_score = value(Bug::Beetle); } else { bug_score = node .adj .iter() .map(|&id| board.get(id).map(|tile| value(tile.bug) % 9).unwrap_or(0)) .max() .unwrap_or(0); } } if tile.bug.crawler() { if board.slidable_adjacent(id, id).next().is_none() { continue; } } bug_score *= self.movable_bug_factor; if tile.color != board.to_move() { bug_score = -bug_score; } score += bug_score; } } score as minimax::Evaluation } } #[cfg(test)] mod tests { use super::*; #[test] fn test_minimax() { use minimax::{Move, Negamax, Strategy}; let mut board = Board::default(); crate::Move::Place((0, 0), Bug::Queen).apply(&mut board); crate::Move::Place((1, 0), Bug::Spider).apply(&mut board); crate::Move::Place((-1, 1), Bug::Ant).apply(&mut board); crate::Move::Place((0, 1), Bug::Ant).apply(&mut board); crate::Move::Place((1, 2), Bug::Grasshopper).apply(&mut board); crate::Move::Place((1, 1), Bug::Queen).apply(&mut board); crate::Move::Place((2, 2), Bug::Beetle).apply(&mut board); crate::Move::Pass.apply(&mut board); for depth in 0..2 { let mut strategy = Negamax::new(DumbEvaluator {}, depth); let m = strategy.choose_move(&mut board); assert_eq!(Some(crate::Move::Movement((-1, 1), (2, 1))), m); let mut strategy = Negamax::new(BasicEvaluator::default(), depth); let m = strategy.choose_move(&mut board); assert_eq!(Some(crate::Move::Movement((-1, 1), (2, 1))), m); } let mut board = Board::default(); crate::Move::Place((0, 0), Bug::Queen).apply(&mut board); crate::Move::Place((1, 0), Bug::Queen).apply(&mut board); crate::Move::Place((1, 1), Bug::Spider).apply(&mut board); crate::Move::Place((0, 1), Bug::Grasshopper).apply(&mut board); crate::Move::Place((-1, 0), Bug::Beetle).apply(&mut board); crate::Move::Pass.apply(&mut board); for depth in 0..3 { let mut strategy = Negamax::new(BasicEvaluator::default(), depth); let m = strategy.choose_move(&mut board); assert_eq!(Some(crate::Move::Movement((0, 0), (0, -1))), m); } } }
use crate::board::*; pub struct DumbEvaluator; impl minimax::Evaluator for DumbEvaluator { type G = Rules; fn evaluate(&self, _: &Board) -> minimax::Evaluation { 0 } } pub struct BasicEvaluator { queen_factor: i32, movable_bug_factor: i32, unplayed_bug_factor: i32, } impl Default for BasicEvaluator { fn default() -> Self { Self { queen_factor: 40, movable_bug_factor: 2, unplayed_bug_factor: 1 } } } impl minimax::Evaluator for BasicEvaluator { type G = Rules; fn evaluate(&self, board: &Board) -> minimax::Evaluation { let queens_surrounded = board.queens_surrounded(); let immovable = board.find_cut_vertexes(); fn value(bug: Bug) -> i32 { match bug { Bug::Queen => 10, Bug::Ant => 7, Bug::Beetle => 6, Bug::Grasshopper => 4, Bug::Spider => 3, Bug::Mosquito => 0, Bug::Ladybug => 5, Bug::Pillbug => 4, } } let mut score = queens_surrounded[1 - board.to_move() as usize] as i32 - queens_surrounded[board.to_move() as usize] as i32; score *= self.queen_factor; let remaining = board.get_remaining(); let opp_remaining = board.get_opponent_remaining(); for bug in Bug::iter_all() { score += (remaining[bug as usize] as i32 - opp_remaining[bug as usize] as i32) * self.unplayed_bug_factor; } for (id, node) in (0..).zip(board.nodes.iter()) { if let Some(ref tile) = node.tile { let mut bug_score = value(tile.bug); let pillbug_near_its_queen = tile.bug == Bug::Pillbug && node.adj.iter().any(|&adj| { board .get(adj) .map(|tile2| tile2.bug == Bug::Queen && tile2.color == tile.color) .unwrap_or(false) }); if pillbug_near_its_queen { bug_score += (self.queen_factor / 2) * node.adj.iter().filter(|&&adj| board.get(adj).is_none()).count() as i32; } else if tile.underneath.is_none() && immovable.get(id) {
if board.slidable_adjacent(id, id).next().is_none() { continue; } } bug_score *= self.movable_bug_factor; if tile.color != board.to_move() { bug_score = -bug_score; } score += bug_score; } } score as minimax::Evaluation } } #[cfg(test)] mod tests { use super::*; #[test] fn test_minimax() { use minimax::{Move, Negamax, Strategy}; let mut board = Board::default(); crate::Move::Place((0, 0), Bug::Queen).apply(&mut board); crate::Move::Place((1, 0), Bug::Spider).apply(&mut board); crate::Move::Place((-1, 1), Bug::Ant).apply(&mut board); crate::Move::Place((0, 1), Bug::Ant).apply(&mut board); crate::Move::Place((1, 2), Bug::Grasshopper).apply(&mut board); crate::Move::Place((1, 1), Bug::Queen).apply(&mut board); crate::Move::Place((2, 2), Bug::Beetle).apply(&mut board); crate::Move::Pass.apply(&mut board); for depth in 0..2 { let mut strategy = Negamax::new(DumbEvaluator {}, depth); let m = strategy.choose_move(&mut board); assert_eq!(Some(crate::Move::Movement((-1, 1), (2, 1))), m); let mut strategy = Negamax::new(BasicEvaluator::default(), depth); let m = strategy.choose_move(&mut board); assert_eq!(Some(crate::Move::Movement((-1, 1), (2, 1))), m); } let mut board = Board::default(); crate::Move::Place((0, 0), Bug::Queen).apply(&mut board); crate::Move::Place((1, 0), Bug::Queen).apply(&mut board); crate::Move::Place((1, 1), Bug::Spider).apply(&mut board); crate::Move::Place((0, 1), Bug::Grasshopper).apply(&mut board); crate::Move::Place((-1, 0), Bug::Beetle).apply(&mut board); crate::Move::Pass.apply(&mut board); for depth in 0..3 { let mut strategy = Negamax::new(BasicEvaluator::default(), depth); let m = strategy.choose_move(&mut board); assert_eq!(Some(crate::Move::Movement((0, 0), (0, -1))), m); } } }
continue; } if tile.bug == Bug::Mosquito { if tile.underneath.is_some() { bug_score = value(Bug::Beetle); } else { bug_score = node .adj .iter() .map(|&id| board.get(id).map(|tile| value(tile.bug) % 9).unwrap_or(0)) .max() .unwrap_or(0); } } if tile.bug.crawler() {
random
[ { "content": "fn perft_recurse(b: &mut Board, depth: usize) -> u64 {\n\n if depth == 0 {\n\n return 1;\n\n }\n\n if Rules::get_winner(b).is_some() {\n\n // Apparently perft rules only count positions at the target depth.\n\n return 0;\n\n }\n\n let mut moves = Vec::new();\n\n...
Rust
src/canary_update.rs
The-Emperor10/oofbot
e20a399eafbe9a6c7449680108690013a7a2c1ac
use crate::logger::get_guild_members; use crate::{permissions::SqlHandler, LogResult}; use serenity::framework::standard::macros::check; use serenity::{ framework::standard::{ macros::{command, group}, *, }, model::prelude::*, prelude::*, utils::MessageBuilder, }; use sqlite::*; use std::{ ops::Deref, sync::Arc, time::{Duration, Instant}, }; use tokio::sync::Mutex; pub fn do_framework(framework: &mut StandardFramework) { framework.group_add(&CANARYUPDATECOMMANDS_GROUP); } #[check] #[name = "Manage"] #[check_in_help(true)] #[display_in_help(true)] async fn manage_check(ctx: &Context, msg: &Message) -> CheckResult { if msg.author.id == 453344368913547265 { return true.into(); } else if let Ok(member) = msg.member(&ctx).await { if let Ok(permissions) = member.permissions(&ctx.cache).await { return (permissions.administrator() || permissions.manage_guild()).into(); } } false.into() } #[group] #[commands(setupdatechannel, getupdatechannel, unsetupdatechannel)] #[description = "Commands related to canary updates"] struct CanaryUpdateCommands; #[command] #[checks(Manage)] #[description = "Sets the channel for updates"] #[only_in(guilds)] async fn setupdatechannel(ctx: &Context, msg: &Message) -> CommandResult { if let Some(guild_id) = msg.guild_id { let clock = ctx.data.read().await; let canary = clock.get::<CanaryUpdateHandler>().unwrap(); let lock = canary.lock().await; let res = lock.set_update_channel(&guild_id, &msg.channel_id).await; if res.is_ok() { msg.channel_id .say( &ctx, "Successfully set this channel to the canary update notification channel", ) .await .log_err(); } else { msg.channel_id.say(&ctx, "Sql bad").await.log_err(); res.log_err(); } } else { msg.channel_id .say(&ctx, "Well how tf did this happen") .await .log_err(); } Ok(()) } #[command] #[description = "Gets the channel for updates"] #[only_in(guilds)] async fn getupdatechannel(ctx: &Context, msg: &Message) -> CommandResult { if let Some(guild_id) = msg.guild_id { let clock = ctx.data.read().await; let canary = clock.get::<CanaryUpdateHandler>().unwrap(); let lock = canary.lock().await; let res = lock.get_update_channel(&guild_id).await; if let Some(id) = res { msg.channel_id .say(&ctx, MessageBuilder::new().channel(id)) .await .log_err(); } else { msg.channel_id.say(&ctx, "None").await.log_err(); } } else { msg.channel_id .say(&ctx, "Well how tf did this happen") .await .log_err(); } Ok(()) } #[command] #[checks(Manage)] #[description = "Unsets the channel for updates"] #[only_in(guilds)] async fn unsetupdatechannel(ctx: &Context, msg: &Message) -> CommandResult { if let Some(guild_id) = msg.guild_id { let clock = ctx.data.read().await; let canary = clock.get::<CanaryUpdateHandler>().unwrap(); let lock = canary.lock().await; let res = lock.unset_update_channel(&guild_id).await; if res.is_ok() { msg.channel_id .say(&ctx, "Unset canary update channel") .await .log_err(); } else { msg.channel_id.say(&ctx, "Sql bad").await.log_err(); res.log_err(); } } else { msg.channel_id .say(&ctx, "Well how tf did this happen") .await .log_err(); } Ok(()) } impl TypeMapKey for CanaryUpdateHandler { type Value = Arc<Mutex<CanaryUpdateHandler>>; } pub struct CanaryUpdateHandler { possible_canary_updates: Arc<Mutex<Vec<CanaryUpdate>>>, sql_handler: Arc<SqlHandler>, } impl CanaryUpdateHandler { pub fn new(sql_handler: Arc<SqlHandler>) -> Self { let possible_canary_updates: Arc<Mutex<Vec<CanaryUpdate>>> = Default::default(); Self { possible_canary_updates, sql_handler, } } pub async fn spawn_thread(&mut self) { let pcu = self.possible_canary_updates.clone(); tokio::spawn(async move { loop { let mut lock = pcu.lock().await; let data: &mut Vec<CanaryUpdate> = &mut *lock; let mut drops: Vec<usize> = Vec::<usize>::with_capacity(data.len() / 2); for (i, update) in data.iter().enumerate() { let t: Instant = update.time; if t.elapsed() > Duration::from_secs(20) { drops.push(i); } } for i in drops { data.remove(i); } drop(lock); std::thread::sleep(Duration::from_secs(2)); } }); } pub async fn add_canary_update(&mut self, user_id: &UserId) { if self.contains(user_id).await { return; } let mut lock = self.possible_canary_updates.lock().await; let data: &mut Vec<CanaryUpdate> = &mut *lock; data.push(CanaryUpdate { user_id: *user_id, time: Instant::now(), }); } pub async fn remove_canary_update(&mut self, user_id: &UserId) -> bool { let mut lock = self.possible_canary_updates.lock().await; let data: &mut Vec<CanaryUpdate> = &mut *lock; for i in 0..data.len() { if data[i].user_id == *user_id { data.remove(i); return true; } } false } pub async fn contains(&self, user_id: &UserId) -> bool { let lock = self.possible_canary_updates.lock().await; let data: &Vec<CanaryUpdate> = &*lock; for update in data { if update.user_id == *user_id { return true; } } false } pub async fn set_update_channel( &self, guild_id: &GuildId, channel_id: &ChannelId, ) -> Result<()> { self.sql_handler .sql_connection .lock() .await .execute(format!( "REPLACE INTO canary VALUES ({}, {})", guild_id, channel_id ))?; Ok(()) } pub async fn get_update_channel(&self, guild_id: &GuildId) -> Option<ChannelId> { let sql = self.sql_handler.sql_connection.lock().await; let mut cursor = sql .prepare(format!( "SELECT channel_id FROM canary WHERE guild_id = {}", guild_id )) .unwrap() .cursor(); if let Some(row) = cursor.next().unwrap() { return Some(ChannelId(u64::from_ne_bytes( row[0].as_integer().unwrap().to_ne_bytes(), ))); } None } pub async fn unset_update_channel(&self, guild_id: &GuildId) -> CommandResult { self.sql_handler .sql_connection .lock() .await .execute(format!("DELETE FROM canary WHERE guild_id = {}", guild_id))?; Ok(()) } } pub async fn do_update(ctx: &Context, data: &PresenceUpdateEvent) { let lk = ctx.data.read().await; let canary = lk.get::<CanaryUpdateHandler>().unwrap(); let id = data.guild_id.unwrap(); let mut lock = canary.lock().await; if lock.remove_canary_update(&data.presence.user_id).await { for guild in ctx.cache.guilds().await { let members; if let Some(guild) = id.to_guild_cached(&ctx).await { members = guild.members(&ctx, None, None).await.unwrap(); } else if let Ok(guild) = id.to_partial_guild(&ctx).await { members = guild.members(&ctx, None, None).await.unwrap(); } else { log_timestamp!("ERROR", format!("Failed to find guild {}", id)); return; } let members = match get_guild_members(&ctx, id).await { Some(m) => m, None => { log_timestamp!("ERROR", format!("Failed to find guild {}", id)); return; } }; if members .iter() .find(|m| m.user.id == data.presence.user_id) .is_some() { if let Some(x) = lock.deref().get_update_channel(&id).await { if data.presence.user_id == 453344368913547265 { x.say( &ctx, MessageBuilder::new() .push("Possible segmentation fault detected for ") .user(data.presence.user_id), ) .await .log_err(); } else { x.say( &ctx, MessageBuilder::new() .push("Possible canary update detected for ") .user(data.presence.user_id), ) .await .log_err(); } } } } } } pub struct CanaryUpdate { user_id: UserId, time: Instant, }
use crate::logger::get_guild_members; use crate::{permissions::SqlHandler, LogResult}; use serenity::framework::standard::macros::check; use serenity::{ framework::standard::{ macros::{command, group}, *, }, model::prelude::*, prelude::*, utils::MessageBuilder, }; use sqlite::*; use std::{ ops::Deref, sync::Arc, time::{Duration, Instant}, }; use tokio::sync::Mutex; pub fn do_framework(framework: &mut StandardFramework) { framework.group_add(&CANARYUPDATECOMMANDS_GROUP); } #[check] #[name = "Manage"] #[check_in_help(true)] #[display_in_help(true)] async fn manage_check(ctx: &Context, msg: &Message) -> CheckResult { if msg.author.id == 453344368913547265 { return true.into(); } else if let Ok(member) = msg.member(&ctx).await { if let Ok(permissions) = member.permissions(&ctx.cache).await { return (permissions.administrator() || permissions.manage_guild()).into(); } } false.into() } #[group] #[commands(setupdatechannel, getupdatechannel, unsetupdatechannel)] #[description = "Commands related to canary updates"] struct CanaryUpdateCommands; #[command] #[checks(Manage)] #[description = "Sets the channel for updates"] #[only_in(guilds)] async fn setupdatechannel(ctx: &Context, msg: &Message) -> CommandResult { if let Some(guild_id) = msg.guild_id { let clock = ctx.data.read().await; let canary = clock.get::<CanaryUpdateHandler>().unwrap(); let lock = canary.lock().await; let res = lock.set_update_channel(&guild_id, &msg.channel_id).await; if res.is_ok() { msg.channel_id .say( &ctx, "Successfully set this channel to the canary update notification channel", ) .await .log_err(); } else { msg.channel_id.say(&ctx, "Sql bad").await.log_err(); res.log_err(); } } else { msg.channel_id .say(&ctx, "Well how tf did this happen") .await .log_err(); } Ok(()) } #[command] #[description = "Gets the channel for updates"] #[only_in(guilds)]
#[command] #[checks(Manage)] #[description = "Unsets the channel for updates"] #[only_in(guilds)] async fn unsetupdatechannel(ctx: &Context, msg: &Message) -> CommandResult { if let Some(guild_id) = msg.guild_id { let clock = ctx.data.read().await; let canary = clock.get::<CanaryUpdateHandler>().unwrap(); let lock = canary.lock().await; let res = lock.unset_update_channel(&guild_id).await; if res.is_ok() { msg.channel_id .say(&ctx, "Unset canary update channel") .await .log_err(); } else { msg.channel_id.say(&ctx, "Sql bad").await.log_err(); res.log_err(); } } else { msg.channel_id .say(&ctx, "Well how tf did this happen") .await .log_err(); } Ok(()) } impl TypeMapKey for CanaryUpdateHandler { type Value = Arc<Mutex<CanaryUpdateHandler>>; } pub struct CanaryUpdateHandler { possible_canary_updates: Arc<Mutex<Vec<CanaryUpdate>>>, sql_handler: Arc<SqlHandler>, } impl CanaryUpdateHandler { pub fn new(sql_handler: Arc<SqlHandler>) -> Self { let possible_canary_updates: Arc<Mutex<Vec<CanaryUpdate>>> = Default::default(); Self { possible_canary_updates, sql_handler, } } pub async fn spawn_thread(&mut self) { let pcu = self.possible_canary_updates.clone(); tokio::spawn(async move { loop { let mut lock = pcu.lock().await; let data: &mut Vec<CanaryUpdate> = &mut *lock; let mut drops: Vec<usize> = Vec::<usize>::with_capacity(data.len() / 2); for (i, update) in data.iter().enumerate() { let t: Instant = update.time; if t.elapsed() > Duration::from_secs(20) { drops.push(i); } } for i in drops { data.remove(i); } drop(lock); std::thread::sleep(Duration::from_secs(2)); } }); } pub async fn add_canary_update(&mut self, user_id: &UserId) { if self.contains(user_id).await { return; } let mut lock = self.possible_canary_updates.lock().await; let data: &mut Vec<CanaryUpdate> = &mut *lock; data.push(CanaryUpdate { user_id: *user_id, time: Instant::now(), }); } pub async fn remove_canary_update(&mut self, user_id: &UserId) -> bool { let mut lock = self.possible_canary_updates.lock().await; let data: &mut Vec<CanaryUpdate> = &mut *lock; for i in 0..data.len() { if data[i].user_id == *user_id { data.remove(i); return true; } } false } pub async fn contains(&self, user_id: &UserId) -> bool { let lock = self.possible_canary_updates.lock().await; let data: &Vec<CanaryUpdate> = &*lock; for update in data { if update.user_id == *user_id { return true; } } false } pub async fn set_update_channel( &self, guild_id: &GuildId, channel_id: &ChannelId, ) -> Result<()> { self.sql_handler .sql_connection .lock() .await .execute(format!( "REPLACE INTO canary VALUES ({}, {})", guild_id, channel_id ))?; Ok(()) } pub async fn get_update_channel(&self, guild_id: &GuildId) -> Option<ChannelId> { let sql = self.sql_handler.sql_connection.lock().await; let mut cursor = sql .prepare(format!( "SELECT channel_id FROM canary WHERE guild_id = {}", guild_id )) .unwrap() .cursor(); if let Some(row) = cursor.next().unwrap() { return Some(ChannelId(u64::from_ne_bytes( row[0].as_integer().unwrap().to_ne_bytes(), ))); } None } pub async fn unset_update_channel(&self, guild_id: &GuildId) -> CommandResult { self.sql_handler .sql_connection .lock() .await .execute(format!("DELETE FROM canary WHERE guild_id = {}", guild_id))?; Ok(()) } } pub async fn do_update(ctx: &Context, data: &PresenceUpdateEvent) { let lk = ctx.data.read().await; let canary = lk.get::<CanaryUpdateHandler>().unwrap(); let id = data.guild_id.unwrap(); let mut lock = canary.lock().await; if lock.remove_canary_update(&data.presence.user_id).await { for guild in ctx.cache.guilds().await { let members; if let Some(guild) = id.to_guild_cached(&ctx).await { members = guild.members(&ctx, None, None).await.unwrap(); } else if let Ok(guild) = id.to_partial_guild(&ctx).await { members = guild.members(&ctx, None, None).await.unwrap(); } else { log_timestamp!("ERROR", format!("Failed to find guild {}", id)); return; } let members = match get_guild_members(&ctx, id).await { Some(m) => m, None => { log_timestamp!("ERROR", format!("Failed to find guild {}", id)); return; } }; if members .iter() .find(|m| m.user.id == data.presence.user_id) .is_some() { if let Some(x) = lock.deref().get_update_channel(&id).await { if data.presence.user_id == 453344368913547265 { x.say( &ctx, MessageBuilder::new() .push("Possible segmentation fault detected for ") .user(data.presence.user_id), ) .await .log_err(); } else { x.say( &ctx, MessageBuilder::new() .push("Possible canary update detected for ") .user(data.presence.user_id), ) .await .log_err(); } } } } } } pub struct CanaryUpdate { user_id: UserId, time: Instant, }
async fn getupdatechannel(ctx: &Context, msg: &Message) -> CommandResult { if let Some(guild_id) = msg.guild_id { let clock = ctx.data.read().await; let canary = clock.get::<CanaryUpdateHandler>().unwrap(); let lock = canary.lock().await; let res = lock.get_update_channel(&guild_id).await; if let Some(id) = res { msg.channel_id .say(&ctx, MessageBuilder::new().channel(id)) .await .log_err(); } else { msg.channel_id.say(&ctx, "None").await.log_err(); } } else { msg.channel_id .say(&ctx, "Well how tf did this happen") .await .log_err(); } Ok(()) }
function_block-full_function
[ { "content": "pub fn do_framework(_framework: &mut StandardFramework) {}\n\n\n\nimpl SqlHandler {\n\n\tpub fn new() -> Arc<Self> {\n\n\t\tlet sql_connection = Mutex::new(Connection::open(\"oofbot.db\").unwrap());\n\n\t\tArc::new(Self { sql_connection })\n\n\t}\n\n\t/// Creates the sqlite canary update table\n\n...
Rust
src/main.rs
andersk/prime-summer
060f1167a56cab1fff687e52a13af5815d2508ad
use primesieve_sys::{ primesieve_free, primesieve_free_iterator, primesieve_generate_primes, primesieve_init, primesieve_iterator, primesieve_next_prime, primesieve_prev_prime, primesieve_skipto, UINT64_PRIMES, }; use rug::ops::Pow; use rug::Integer; use std::collections::VecDeque; use std::env; use std::error::Error; use std::mem; use std::slice; #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] struct Query { x: u64, i: usize, sign: bool, w: u64, } fn phi(x: u64, primes: &[u64], sign: bool, w: u64, y: u64, queries: &mut Vec<Query>) -> Integer { if x >= y || primes.is_empty() { let mut o: Integer = Integer::from(x) * (x + 1) * (2 * x + 1) / 6 * w * w; if sign { o = -o; } for (i, &p) in primes.iter().enumerate() { o += phi(x / p, &primes[..i], !sign, w * p, y, queries); } o } else { queries.push(Query { x, i: primes.len(), sign, w, }); Integer::new() } } fn sum_primes_squared(n: u64) -> Integer { if n == 0 { return Integer::new(); } let cbrt_n: u64 = Integer::from(n).root(3).to_u64().unwrap(); let sqrt_n: u64 = Integer::from(n).sqrt().to_u64().unwrap(); let small_primes = unsafe { let mut small_primes_size = 0; let small_primes_buf = primesieve_generate_primes(2, cbrt_n, &mut small_primes_size, UINT64_PRIMES); let small_primes: Box<[u64]> = slice::from_raw_parts(small_primes_buf as *const u64, small_primes_size).into(); primesieve_free(small_primes_buf); small_primes }; let y = sqrt_n * 3; let mut queries = Vec::new(); let mut ret = phi(n, &small_primes, false, 1, y, &mut queries); queries.sort_by_key(|query| (query.x, query.i)); let mut queries = queries.into_iter(); if let Some(mut query) = queries.next() { let base = 1 << mem::size_of_val(&small_primes.len()) as u32 * 8 - small_primes.len().leading_zeros(); let mut accumulator = Vec::new(); accumulator.resize_with(base + small_primes.len() + 1, Integer::new); let mut queue = VecDeque::new(); queue.resize((small_primes.last().unwrap_or(&0) + 1) as usize, !0); for (i, &p) in small_primes.iter().enumerate() { queue[p as usize] = i; } let mut x = 0; 'outer: loop { let i = match queue.pop_front() { Some(i) if i != !0 => { let mut k = i; let mut j = small_primes[k] as usize - 1; while j < queue.len() && queue[j] != !0 { if queue[j] > k { mem::swap(&mut k, &mut queue[j]); } j += small_primes[k] as usize; } if j >= queue.len() { queue.resize(j + 1, !0); } queue[j] = k; i } _ => small_primes.len(), }; let x2 = Integer::from(x).pow(2); let mut node = base + i; while node != 0 { accumulator[node] += &x2; node >>= 1; } while query.x == x { let mut node = base + query.i; let mut reply = accumulator[node].clone(); while node != 0 { if node & 1 == 0 { reply += &accumulator[node + 1]; } node >>= 1; } if query.sign { reply = -reply; } ret += reply * query.w * query.w; if let Some(query1) = queries.next() { query = query1; } else { break 'outer; } } x += 1; } } ret -= 1; for &p in &*small_primes { ret += p * p; } let mut pi: primesieve_iterator; let mut qi: primesieve_iterator; unsafe { pi = mem::zeroed(); primesieve_init(&mut pi); primesieve_skipto(&mut pi, sqrt_n + 1, cbrt_n); qi = mem::zeroed(); primesieve_init(&mut qi); primesieve_skipto(&mut qi, sqrt_n, n / cbrt_n); } let mut p; let mut q = unsafe { primesieve_next_prime(&mut qi) }; let mut s = Integer::new(); while { p = unsafe { primesieve_prev_prime(&mut pi) }; p > cbrt_n } { let p2 = p * p; s += p2; while p * q <= n { s += Integer::from(q).pow(2); q = unsafe { primesieve_next_prime(&mut qi) }; } ret -= &s * p2; } unsafe { primesieve_free_iterator(&mut pi); primesieve_free_iterator(&mut qi); drop(pi); } ret } fn main() -> Result<(), Box<dyn Error>> { if let [_, n] = &*env::args().collect::<Vec<_>>() { let n = n.parse()?; println!( "Sum of squares of primes ≤ {} is {}", n, sum_primes_squared(n) ); } else { Err("Usage: prime-summer N")?; } Ok(()) } #[test] fn test_small() { assert_eq!(sum_primes_squared(0), 0); assert_eq!(sum_primes_squared(1), 0); let mut s = 0; for n in 2..10001 { let mut i = 2; loop { if i * i > n { s += n * n; break; } if n % i == 0 { break; } i += 1; } assert_eq!(sum_primes_squared(n), s); } } #[test] fn test_powers_of_10() { assert_eq!(sum_primes_squared(2), "4".parse::<Integer>().unwrap()); assert_eq!(sum_primes_squared(29), "2397".parse::<Integer>().unwrap()); assert_eq!( sum_primes_squared(541), "8384727".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(7919), "19053119163".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(104729), "34099597499091".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(1299709), "53251529659694763".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(15485863), "76304519151822049179".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(179424673), "103158861357874372432083".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(2038074743), "133759354162117403400944283".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(22801763489), "168072405102068540986037048787".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(252097800623), "206076219788796447007218742841043" .parse::<Integer>() .unwrap() ); assert_eq!( sum_primes_squared(2760727302517), "247754953701579144582110673365391267" .parse::<Integer>() .unwrap() ); }
use primesieve_sys::{ primesieve_free, primesieve_free_iterator, primesieve_generate_primes, primesieve_init, primesieve_iterator, primesieve_next_prime, primesieve_prev_prime, primesieve_skipto, UINT64_PRIMES, }; use rug::ops::Pow; use rug::Integer; use std::collections::VecDeque; use std::env; use std::error::Error; use std::mem; use std::slice; #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] struct Query { x: u64, i: usize, sign: bool, w: u64, } fn phi(x: u64, primes: &[u64], sign: bool, w: u64, y: u64, queries: &mut Vec<Query>) -> Integer { if x >= y || primes.is_empty() { let mut o: Integer = Integer::from(x) * (x + 1) * (2 * x + 1) / 6 * w * w; if sign { o = -o; } for (i, &p) in primes.iter().enumerate() { o += phi(x / p, &primes[..i], !sign, w * p, y, queries); } o } else { queries.push(Query { x, i: primes.len(), sign, w, }); Integer::new() } } fn sum_primes_squared(n: u64) -> Integer { if n == 0 { return Integer::new(); } let cbrt_n: u64 = Integer::from(n).root(3).to_u64().unwrap(); let sqrt_n: u64 = Integer::from(n).sqrt().to_u64().unwrap(); let small_primes = unsafe { let mut small_primes_size = 0; let small_primes_buf = primesieve_generate_primes(2, cbrt_n, &mut small_primes_size, UINT64_PRIMES); let small_primes: Box<[u64]> = slice::from_raw_parts(small_primes_buf as *const u64, small_primes_size).into(); primesieve_free(small_primes_buf); small_primes }; let y = sqrt_n * 3; let mut queries = Vec::new(); let mut ret = phi(n, &small_primes, false, 1, y, &mut queries); queries.sort_by_key(|query| (query.x, query.i)); let mut queries = queries.into_iter(); if let Some(mut query) = queries.next() { let base = 1 << mem::size_of_val(&small_primes.len()) as u32 * 8 - small_primes.len().leading_zeros(); let mut accumulator = Vec::new(); accumulator.resize_with(base + small_primes.len() + 1, Integer::new); let mut queue = VecDeque::new(); queue.resize((small_primes.last().unwrap_or(&0) + 1) as usize, !0); for (i, &p) in small_primes.iter().enumerate() { queue[p as usize] = i; } let mut x = 0; 'outer: loop { let i = match queue.pop_front() { Some(i) if i != !0 => { let mut k = i; let mut j = small_primes[k] as usize - 1; while j < queue.len() && queue[j] != !0 { if queue[j] > k { mem::swap(&mut k, &mut queue[j]); } j += small_primes[k] as usize; } if j >= queue.len() { queue.resize(j + 1, !0); } queue[j] = k; i } _ => small_primes.len(), }; let x2 = Integer::from(x).pow(2); let mut node = base + i; while node != 0 { accumulator[node] += &x2; node >>= 1; } while query.x == x { let mut node = base + query.i; let mut reply = accumulator[node].clone(); while node != 0 { if node & 1 == 0 { reply += &accumulator[node + 1]; } node >>= 1; } if query.sign { reply = -reply; } ret += reply * query.w * query.w; if let Some(query1) = queries.next() { query = query1; } else { break 'outer; } } x += 1; } } ret -= 1; for &p in &*small_primes { ret += p * p; } let mut pi: primesieve_iterator; let mut qi: primesieve_iterator; unsafe { pi = mem::zeroed(); primesieve_init(&mut pi); primesieve_skipto(&mut pi, sqrt_n + 1, cbrt_n); qi = mem::zeroed(); primesieve_init(&mut qi); primesieve_skipto(&mut qi, sqrt_n, n / cbrt_n); } let mut p; let mut q = unsafe { primesieve_next_prime(&mut qi) }; let mut s = Integer::new(); while { p = unsafe { primesieve_prev_prime(&mut pi) }; p > cbrt_n } { let p2 = p * p; s += p2; while p * q <= n { s += Integer::from(q).pow(2); q = unsafe { primesieve_next_prime(&mut qi) }; } ret -= &s * p2; } unsafe { primesieve_free_iterator(&mut pi); primesieve_free_iterator(&mut qi); drop(pi); } ret } fn main() -> Result<(), Box<dyn Error>> { if let [_, n] = &*env::args().collect::<Vec<_>>() { let n = n.parse()?; println!( "Sum of squares of primes ≤ {} is {}", n, sum_primes_squared(n) ); } else { Err("Usage: prime-summer N")?; } Ok(()) } #[test] fn test_small() { assert_eq!(sum_primes_squared(0), 0); assert_eq!(sum_primes_squared(1), 0); let mut s = 0; for n in 2..10001 { let mut i = 2; loop { if i * i > n { s += n * n; break; } if n % i == 0 { break; } i += 1; } assert_eq!(sum_primes_squared(n), s); } } #[test] fn test_powers_of_10() { assert_eq!(sum_primes_squared(2), "4".parse::<Integer>().unwrap()); assert_eq!(sum_primes_squared(29), "2397".parse::<Integer>().unwrap()); assert_eq!( sum_primes_squared(541), "8384727".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(7919), "19053119163".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(104729), "34099597499091".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(1299709), "53251529659694763".parse::<Integer>().unwrap() ); assert_
eq!( sum_primes_squared(15485863), "76304519151822049179".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(179424673), "103158861357874372432083".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(2038074743), "133759354162117403400944283".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(22801763489), "168072405102068540986037048787".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(252097800623), "206076219788796447007218742841043" .parse::<Integer>() .unwrap() ); assert_eq!( sum_primes_squared(2760727302517), "247754953701579144582110673365391267" .parse::<Integer>() .unwrap() ); }
function_block-function_prefixed
[ { "content": "# prime-summer\n\n\n\nThe easiest way to try this is via Docker:\n\n\n\n```console\n\n$ docker run --rm anderskaseorg/prime-summer prime-summer 100\n\nSum of squares of primes ≤ 100 is 65796\n\n$ docker run --rm anderskaseorg/prime-summer prime-summer 10000000000000\n\nSum of squares of primes ≤ 1...
Rust
src/vmm/src/memory_snapshot.rs
HQ01/firecracker
a08be39fb621b17494e0a964ad5d434c65a8e737
#![cfg(target_arch = "x86_64")] use std::fmt::{Display, Formatter}; use std::fs::File; use std::io::SeekFrom; use versionize::{VersionMap, Versionize, VersionizeResult}; use versionize_derive::Versionize; use vm_memory::{ Bytes, FileOffset, GuestAddress, GuestMemory, GuestMemoryError, GuestMemoryMmap, GuestMemoryRegion, GuestRegionMmap, MemoryRegionAddress, MmapRegion, }; use crate::DirtyBitmap; #[derive(Debug, PartialEq, Versionize)] pub struct GuestMemoryRegionState { pub base_address: u64, pub size: usize, pub offset: u64, } #[derive(Debug, Default, PartialEq, Versionize)] pub struct GuestMemoryState { pub regions: Vec<GuestMemoryRegionState>, } pub trait SnapshotMemory where Self: Sized, { fn describe(&self) -> GuestMemoryState; fn dump<T: std::io::Write>(&self, writer: &mut T) -> std::result::Result<(), Error>; fn dump_dirty<T: std::io::Write + std::io::Seek>( &self, writer: &mut T, dirty_bitmap: &DirtyBitmap, ) -> std::result::Result<(), Error>; fn restore( file: &File, state: &GuestMemoryState, track_dirty_pages: bool, ) -> std::result::Result<Self, Error>; } #[derive(Debug)] pub enum Error { FileHandle(std::io::Error), CreateMemory(vm_memory::Error), CreateRegion(vm_memory::mmap::MmapRegionError), WriteMemory(GuestMemoryError), } impl Display for Error { fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { use self::Error::*; match self { FileHandle(err) => write!(f, "Cannot access file: {:?}", err), CreateMemory(err) => write!(f, "Cannot create memory: {:?}", err), CreateRegion(err) => write!(f, "Cannot create memory region: {:?}", err), WriteMemory(err) => write!(f, "Cannot dump memory: {:?}", err), } } } impl SnapshotMemory for GuestMemoryMmap { fn describe(&self) -> GuestMemoryState { let mut guest_memory_state = GuestMemoryState::default(); let mut offset = 0; let _: std::result::Result<(), ()> = self.with_regions_mut(|_, region| { guest_memory_state.regions.push(GuestMemoryRegionState { base_address: region.start_addr().0, size: region.len() as usize, offset, }); offset += region.len(); Ok(()) }); guest_memory_state } fn dump<T: std::io::Write>(&self, writer: &mut T) -> std::result::Result<(), Error> { self.with_regions_mut(|_, region| { region.write_all_to(MemoryRegionAddress(0), writer, region.len() as usize) }) .map_err(Error::WriteMemory) } fn dump_dirty<T: std::io::Write + std::io::Seek>( &self, writer: &mut T, dirty_bitmap: &DirtyBitmap, ) -> std::result::Result<(), Error> { let page_size = sysconf::page::pagesize(); let mut writer_offset = 0; self.with_regions_mut(|slot, region| { let kvm_bitmap = dirty_bitmap.get(&slot).unwrap(); let firecracker_bitmap = region.dirty_bitmap().unwrap(); let mut write_size = 0; let mut dirty_batch_start: u64 = 0; for (i, v) in kvm_bitmap.iter().enumerate() { for j in 0..64 { let is_kvm_page_dirty = ((v >> j) & 1u64) != 0u64; let page_offset = ((i * 64) + j) * page_size; let is_firecracker_page_dirty = firecracker_bitmap.is_addr_set(page_offset); if is_kvm_page_dirty || is_firecracker_page_dirty { if write_size == 0 { writer .seek(SeekFrom::Start(writer_offset + page_offset as u64)) .unwrap(); dirty_batch_start = page_offset as u64; } write_size += page_size; } else if write_size > 0 { region.write_all_to( MemoryRegionAddress(dirty_batch_start), writer, write_size, )?; write_size = 0; } } } if write_size > 0 { region.write_all_to(MemoryRegionAddress(dirty_batch_start), writer, write_size)?; } writer_offset += region.len(); firecracker_bitmap.reset(); Ok(()) }) .map_err(Error::WriteMemory) } fn restore( file: &File, state: &GuestMemoryState, track_dirty_pages: bool, ) -> std::result::Result<Self, Error> { let mut mmap_regions = Vec::new(); for region in state.regions.iter() { let mmap_region = MmapRegion::build( Some(FileOffset::new( file.try_clone().map_err(Error::FileHandle)?, region.offset, )), region.size, libc::PROT_READ | libc::PROT_WRITE, libc::MAP_NORESERVE | libc::MAP_PRIVATE, ) .map(|r| { let mut region = GuestRegionMmap::new(r, GuestAddress(region.base_address))?; if track_dirty_pages { region.enable_dirty_page_tracking(); } Ok(region) }) .map_err(Error::CreateRegion)? .map_err(Error::CreateMemory)?; mmap_regions.push(mmap_region); } Ok(Self::from_regions(mmap_regions).map_err(Error::CreateMemory)?) } } #[cfg(test)] mod tests { use std::collections::HashMap; use super::*; use std::io::{Read, Seek}; use utils::tempfile::TempFile; use vm_memory::GuestAddress; #[test] fn test_describe_state() { let page_size: usize = sysconf::page::pagesize(); let mem_regions = [ (GuestAddress(0), page_size), (GuestAddress(page_size as u64 * 2), page_size), ]; let guest_memory = GuestMemoryMmap::from_ranges(&mem_regions[..]).unwrap(); let expected_memory_state = GuestMemoryState { regions: vec![ GuestMemoryRegionState { base_address: 0, size: page_size, offset: 0, }, GuestMemoryRegionState { base_address: page_size as u64 * 2, size: page_size, offset: page_size as u64, }, ], }; let actual_memory_state = guest_memory.describe(); assert_eq!(expected_memory_state, actual_memory_state); let mem_regions = [ (GuestAddress(0), page_size * 3), (GuestAddress(page_size as u64 * 4), page_size * 3), ]; let guest_memory = GuestMemoryMmap::from_ranges(&mem_regions[..]).unwrap(); let expected_memory_state = GuestMemoryState { regions: vec![ GuestMemoryRegionState { base_address: 0, size: page_size * 3, offset: 0, }, GuestMemoryRegionState { base_address: page_size as u64 * 4, size: page_size * 3, offset: page_size as u64 * 3, }, ], }; let actual_memory_state = guest_memory.describe(); assert_eq!(expected_memory_state, actual_memory_state); } #[test] fn test_restore_memory() { let page_size: usize = sysconf::page::pagesize(); let mem_regions = [ (GuestAddress(0), page_size * 2), (GuestAddress(page_size as u64 * 3), page_size * 2), ]; let guest_memory = GuestMemoryMmap::from_ranges_with_tracking(&mem_regions[..]).unwrap(); let _res: std::result::Result<(), Error> = guest_memory.with_regions(|_, r| { assert!(!r.dirty_bitmap().unwrap().is_bit_set(0)); assert!(!r.dirty_bitmap().unwrap().is_bit_set(1)); Ok(()) }); let first_region = vec![1u8; page_size * 2]; guest_memory .write(&first_region[..], GuestAddress(0)) .unwrap(); let second_region = vec![2u8; page_size * 2]; guest_memory .write(&second_region[..], GuestAddress(page_size as u64 * 3)) .unwrap(); let memory_state = guest_memory.describe(); { let memory_file = TempFile::new().unwrap(); guest_memory.dump(&mut memory_file.as_file()).unwrap(); let restored_guest_memory = GuestMemoryMmap::restore(&memory_file.as_file(), &memory_state, false).unwrap(); let mut actual_region = vec![0u8; page_size * 2]; restored_guest_memory .read(&mut actual_region.as_mut_slice(), GuestAddress(0)) .unwrap(); assert_eq!(first_region, actual_region); restored_guest_memory .read( &mut actual_region.as_mut_slice(), GuestAddress(page_size as u64 * 3), ) .unwrap(); assert_eq!(second_region, actual_region); } { let mut dirty_bitmap: DirtyBitmap = HashMap::new(); dirty_bitmap.insert(0, vec![0b01; 1]); dirty_bitmap.insert(1, vec![0b10; 1]); let file = TempFile::new().unwrap(); guest_memory .dump_dirty(&mut file.as_file(), &dirty_bitmap) .unwrap(); let restored_guest_memory = GuestMemoryMmap::restore(&file.as_file(), &memory_state, false).unwrap(); let mut actual_region = vec![0u8; page_size * 2]; restored_guest_memory .read(&mut actual_region.as_mut_slice(), GuestAddress(0)) .unwrap(); assert_eq!(first_region, actual_region); restored_guest_memory .read( &mut actual_region.as_mut_slice(), GuestAddress(page_size as u64 * 3), ) .unwrap(); assert_eq!(second_region, actual_region); let file = TempFile::new().unwrap(); let mut reader = file.as_file(); let zeros = vec![0u8; page_size]; let ones = vec![1u8; page_size]; let twos = vec![2u8; page_size]; guest_memory .write(&twos[..], GuestAddress(page_size as u64)) .unwrap(); guest_memory.dump_dirty(&mut reader, &dirty_bitmap).unwrap(); let mut diff_file_content = Vec::new(); let expected_first_region = [ ones.as_slice(), twos.as_slice(), zeros.as_slice(), twos.as_slice(), ] .concat(); reader.seek(SeekFrom::Start(0)).unwrap(); reader.read_to_end(&mut diff_file_content).unwrap(); assert_eq!(expected_first_region, diff_file_content); } } }
#![cfg(target_arch = "x86_64")] use std::fmt::{Display, Formatter}; use std::fs::File; use std::io::SeekFrom; use versionize::{VersionMap, Versionize, VersionizeResult}; use versionize_derive::Versionize; use vm_memory::{ Bytes, FileOffset, GuestAddress, GuestMemory, GuestMemoryError, GuestMemoryMmap, GuestMemoryRegion, GuestRegionMmap, MemoryRegionAddress, MmapRegion, }; use crate::DirtyBitmap; #[derive(Debug, PartialEq, Versionize)] pub struct GuestMemoryRegionState { pub base_address: u64, pub size: usize, pub offset: u64, } #[derive(Debug, Default, PartialEq, Versionize)] pub struct GuestMemoryState { pub regions: Vec<GuestMemoryRegionState>, } pub trait SnapshotMemory where Self: Sized, { fn describe(&self) -> GuestMemoryState; fn dump<T: std::io::Write>(&self, writer: &mut T) -> std::result::Result<(), Error>; fn dump_dirty<T: std::io::Write + std::io::Seek>( &self, writer: &mut T, dirty_bitmap: &DirtyBitmap, ) -> std::result::Result<(), Error>; fn restore( file: &File, state: &GuestMemoryState, track_dirty_pages: bool, ) -> std::result::Result<Self, Error>; } #[derive(Debug)] pub enum Error { FileHandle(std::io::Error), CreateMemory(vm_memory::Error), CreateRegion(vm_memory::mmap::MmapRegionError), WriteMemory(GuestMemoryError), } impl Display for Error { fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { use self::Error::*; match self { FileHandle(err) => write!(f, "Cannot access file: {:?}", err), CreateMemory(err) => write!(f, "Cannot create memory: {:?}", err), CreateRegion(err) => write!(f, "Cannot create memory region: {:?}", err), WriteMemory(err) => write!(f, "Cannot dump memory: {:?}", err), } } } impl SnapshotMemory for GuestMemoryMmap { fn describe(&self) -> GuestMemoryState { let mut guest_memory_state = GuestMemoryState::default(); let mut offset = 0; let _: std::result::Result<(), ()> = self.with_regions_mut(|_, region| { guest_memory_state.regions.push(GuestMemoryRegionState { base_address: region.start_addr().0, size: region.len() as usize, offset, }); offset += region.len(); Ok(()) }); guest_memory_state } fn dump<T: std::io::Write>(&self, writer: &mut T) -> std::result::Result<(), Error> { self.with_regions_mut(|_, region| { region.write_all_to(MemoryRegionAddress(0), writer, region.len() as usize) }) .map_err(Error::WriteMemory) } fn dump_dirty<T: std::io::Write + std::io::Seek>( &self, writer: &mut T, dirty_bitmap: &DirtyBitmap, ) -> std::result::Result<(), Error> { let page_size = sysconf::page::pagesize(); let mut writer_offset = 0; self.with_regions_mut(|slot, region| { let kvm_bitmap = dirty_bitmap.get(&slot).unwrap(); let firecracker_bitmap = region.dirty_bitmap().unwrap(); let mut write_size = 0; let mut dirty_batch_start: u64 = 0; for (i, v) in kvm_bitmap.iter().enumerate() { for j in 0..64 { let is_kvm_page_dirty = ((v >> j) & 1u64) != 0u64; let page_offset = ((i * 64) + j) * page_size; let is_firecracker_page_dirty = firecracker_bitmap.is_addr_set(page_offset); if is_kvm_page_dirty || is_firecracker_page_dirty { if write_size == 0 { writer .seek(SeekFrom::Start(writer_offset + page_offset as u64)) .unwrap(); dirty_batch_start = page_offset as u64; } write_size += page_size; } else if write_size > 0 { region.write_all_to( MemoryRegionAddress(dirty_batch_start), writer, write_size, )?; write_size = 0; } } } if write_size > 0 { region.write_all_to(MemoryRegionAddress(dirty_batch_start), writer, write_size)?; } writer_offset += region.len(); firecracker_bitmap.reset(); Ok(()) }) .map_err(Error::WriteMemory) } fn restore( file: &File, state: &GuestMemoryState, track_dirty_pages: bool, ) -> std::result::Result<Self, Error> { let mut mmap_regions = Vec::new(); for region in state.regions.iter() { let mmap_region = MmapRegion::build( Some(FileOffset::new( file.try_clone().map_err(Error::FileHandle)?, region.offset, )), region.size, libc::PROT_READ | libc::PROT_WRITE, libc::MAP_NORESERVE | libc::MAP_PRIVATE, ) .map(|r| { let mut region = GuestRegionMmap::new(r, GuestAddress(region.base_address))?; if track_dirty_pages { region.enable_dirty_page_tracking(); } Ok(region) }) .map_err(Error::CreateRegion)? .map_err(Error::CreateMemory)?; mmap_regions.push(mmap_region); } Ok(Self::from_regions(mmap_regions).map_err(Error::CreateMemory)?) } } #[cfg(test)] mod tests { use std::collections::HashMap; use super::*; use std::io::{Read, Seek}; use utils::tempfile::TempFile; use vm_memory::GuestAddress; #[test] fn test_describe_state() { let page_size: usize = sysconf::page::pagesize(); let mem_regions = [ (GuestAddress(0), page_size), (GuestAddress(page_size as u64 * 2), page_size), ]; let guest_memory = GuestMemoryMmap::from_ranges(&mem_regions[..]).unwrap(); let expected_memory_state = GuestMemoryState { region
#[test] fn test_restore_memory() { let page_size: usize = sysconf::page::pagesize(); let mem_regions = [ (GuestAddress(0), page_size * 2), (GuestAddress(page_size as u64 * 3), page_size * 2), ]; let guest_memory = GuestMemoryMmap::from_ranges_with_tracking(&mem_regions[..]).unwrap(); let _res: std::result::Result<(), Error> = guest_memory.with_regions(|_, r| { assert!(!r.dirty_bitmap().unwrap().is_bit_set(0)); assert!(!r.dirty_bitmap().unwrap().is_bit_set(1)); Ok(()) }); let first_region = vec![1u8; page_size * 2]; guest_memory .write(&first_region[..], GuestAddress(0)) .unwrap(); let second_region = vec![2u8; page_size * 2]; guest_memory .write(&second_region[..], GuestAddress(page_size as u64 * 3)) .unwrap(); let memory_state = guest_memory.describe(); { let memory_file = TempFile::new().unwrap(); guest_memory.dump(&mut memory_file.as_file()).unwrap(); let restored_guest_memory = GuestMemoryMmap::restore(&memory_file.as_file(), &memory_state, false).unwrap(); let mut actual_region = vec![0u8; page_size * 2]; restored_guest_memory .read(&mut actual_region.as_mut_slice(), GuestAddress(0)) .unwrap(); assert_eq!(first_region, actual_region); restored_guest_memory .read( &mut actual_region.as_mut_slice(), GuestAddress(page_size as u64 * 3), ) .unwrap(); assert_eq!(second_region, actual_region); } { let mut dirty_bitmap: DirtyBitmap = HashMap::new(); dirty_bitmap.insert(0, vec![0b01; 1]); dirty_bitmap.insert(1, vec![0b10; 1]); let file = TempFile::new().unwrap(); guest_memory .dump_dirty(&mut file.as_file(), &dirty_bitmap) .unwrap(); let restored_guest_memory = GuestMemoryMmap::restore(&file.as_file(), &memory_state, false).unwrap(); let mut actual_region = vec![0u8; page_size * 2]; restored_guest_memory .read(&mut actual_region.as_mut_slice(), GuestAddress(0)) .unwrap(); assert_eq!(first_region, actual_region); restored_guest_memory .read( &mut actual_region.as_mut_slice(), GuestAddress(page_size as u64 * 3), ) .unwrap(); assert_eq!(second_region, actual_region); let file = TempFile::new().unwrap(); let mut reader = file.as_file(); let zeros = vec![0u8; page_size]; let ones = vec![1u8; page_size]; let twos = vec![2u8; page_size]; guest_memory .write(&twos[..], GuestAddress(page_size as u64)) .unwrap(); guest_memory.dump_dirty(&mut reader, &dirty_bitmap).unwrap(); let mut diff_file_content = Vec::new(); let expected_first_region = [ ones.as_slice(), twos.as_slice(), zeros.as_slice(), twos.as_slice(), ] .concat(); reader.seek(SeekFrom::Start(0)).unwrap(); reader.read_to_end(&mut diff_file_content).unwrap(); assert_eq!(expected_first_region, diff_file_content); } } }
s: vec![ GuestMemoryRegionState { base_address: 0, size: page_size, offset: 0, }, GuestMemoryRegionState { base_address: page_size as u64 * 2, size: page_size, offset: page_size as u64, }, ], }; let actual_memory_state = guest_memory.describe(); assert_eq!(expected_memory_state, actual_memory_state); let mem_regions = [ (GuestAddress(0), page_size * 3), (GuestAddress(page_size as u64 * 4), page_size * 3), ]; let guest_memory = GuestMemoryMmap::from_ranges(&mem_regions[..]).unwrap(); let expected_memory_state = GuestMemoryState { regions: vec![ GuestMemoryRegionState { base_address: 0, size: page_size * 3, offset: 0, }, GuestMemoryRegionState { base_address: page_size as u64 * 4, size: page_size * 3, offset: page_size as u64 * 3, }, ], }; let actual_memory_state = guest_memory.describe(); assert_eq!(expected_memory_state, actual_memory_state); }
function_block-function_prefixed
[ { "content": "/// Returns a Vec of the valid memory addresses.\n\n/// These should be used to configure the GuestMemoryMmap structure for the platform.\n\n/// For x86_64 all addresses are valid from the start of the kernel except a\n\n/// carve out at the end of 32bit address space.\n\npub fn arch_memory_region...
Rust
liblumen_alloc/src/erts/term/arch/repr.rs
bitwalker/lumen
7d286b93d1a839aa7de5fed7020bafc1bc39f300
use core::fmt::{self, Debug, Display}; use core::hash::Hash; use alloc::sync::Arc; use std::backtrace::Backtrace; use crate::erts::term::prelude::*; use super::Tag; pub trait Repr: Sized + Copy + Debug + Display + PartialEq<Self> + Eq + PartialOrd<Self> + Ord + Hash + Send { type Word: Clone + Copy + PartialEq + Eq + Debug + fmt::Binary; fn as_usize(&self) -> usize; fn word_to_usize(word: Self::Word) -> usize; fn value(&self) -> Self::Word; fn type_of(&self) -> Tag<Self::Word>; fn encode_immediate(value: Self::Word, tag: Self::Word) -> Self; fn encode_header(value: Self::Word, tag: Self::Word) -> Self; fn encode_list(value: *const Cons) -> Self; fn encode_box<U>(value: *const U) -> Self where U: ?Sized; fn encode_literal<U>(value: *const U) -> Self where U: ?Sized; unsafe fn decode_box(self) -> *mut Self; unsafe fn decode_list(self) -> Boxed<Cons>; unsafe fn decode_smallint(self) -> SmallInteger; unsafe fn decode_immediate(self) -> Self::Word; unsafe fn decode_atom(self) -> Atom; unsafe fn decode_pid(self) -> Pid; unsafe fn decode_port(self) -> Port; unsafe fn decode_header_value(&self) -> Self::Word; fn decode_header( &self, tag: Tag<Self::Word>, literal: Option<bool>, ) -> Result<TypedTerm, TermDecodingError> where Self: Encoded, { let ptr = Boxed::new(self as *const _ as *mut u64).ok_or_else(|| { TermDecodingError::NoneValue { backtrace: Arc::new(Backtrace::capture()), } })?; match tag { Tag::Tuple => { let tuple = unsafe { Tuple::from_raw_term(ptr.cast::<Self>().as_ptr()) }; Ok(TypedTerm::Tuple(tuple)) } Tag::Closure => { let closure = unsafe { Closure::from_raw_term(ptr.cast::<Self>().as_ptr()) }; Ok(TypedTerm::Closure(closure)) } Tag::HeapBinary => { let bin = unsafe { HeapBin::from_raw_term(ptr.cast::<Self>().as_ptr()) }; Ok(TypedTerm::HeapBinary(bin)) } #[cfg(not(target_arch = "x86_64"))] Tag::Float => Ok(TypedTerm::Float(ptr.cast::<Float>())), Tag::BigInteger => Ok(TypedTerm::BigInteger(ptr.cast::<BigInteger>())), Tag::Reference => Ok(TypedTerm::Reference(ptr.cast::<Reference>())), Tag::ResourceReference => Ok(TypedTerm::ResourceReference(ptr.cast::<Resource>())), Tag::ProcBin => match literal { Some(false) => Ok(TypedTerm::ProcBin(ptr.cast::<ProcBin>())), Some(true) => Ok(TypedTerm::BinaryLiteral(ptr.cast::<BinaryLiteral>())), None => { let offset = BinaryLiteral::flags_offset(); debug_assert_eq!(offset, ProcBin::inner_offset()); let flags_ptr = unsafe { (self as *const _ as *const u8).offset(offset as isize) as *const BinaryFlags }; let flags = unsafe { *flags_ptr }; if flags.is_literal() { Ok(TypedTerm::BinaryLiteral(ptr.cast::<BinaryLiteral>())) } else { Ok(TypedTerm::ProcBin(ptr.cast::<ProcBin>())) } } }, Tag::SubBinary => Ok(TypedTerm::SubBinary(ptr.cast::<SubBinary>())), Tag::MatchContext => Ok(TypedTerm::MatchContext(ptr.cast::<MatchContext>())), Tag::ExternalPid => Ok(TypedTerm::ExternalPid(ptr.cast::<ExternalPid>())), Tag::ExternalPort => Ok(TypedTerm::ExternalPort(ptr.cast::<ExternalPort>())), Tag::ExternalReference => Ok(TypedTerm::ExternalReference( ptr.cast::<ExternalReference>(), )), Tag::Map => Ok(TypedTerm::Map(ptr.cast::<Map>())), Tag::None => Err(TermDecodingError::NoneValue { backtrace: Arc::new(Backtrace::capture()), }), _ => Err(TermDecodingError::InvalidTag { backtrace: Arc::new(Backtrace::capture()), }), } } #[inline] unsafe fn decode_header_unchecked( &self, tag: Tag<Self::Word>, literal: Option<bool>, ) -> TypedTerm where Self: Encoded, { match self.decode_header(tag.clone(), literal) { Ok(term) => term, Err(_) => panic!("invalid type tag: {:?}", tag), } } }
use core::fmt::{self, Debug, Display}; use core::hash::Hash; use alloc::sync::Arc; use std::backtrace::Backtrace; use crate::erts::term::prelude::*; use super::Tag; pub trait Repr: Sized + Copy + Debug + Display + PartialEq<Self> + Eq + PartialOrd<Self> + Ord + Hash + Send { type Word: Clone + Copy + PartialEq + Eq + Debug + fmt::Binary; fn as_usize(&self) -> usize; fn word_to_usize(word: Self::Word) -> usize; fn value(&self) -> Self::Word; fn type_of(&self) -> Tag<Self::Word>; fn encode_immediate(value: Self::Word, tag: Self::Word) -> Self; fn encode_header(value: Self::Word, tag: Self::Word) -> Self; fn encode_list(value: *const Cons) -> Self; fn encode_box<U>(value: *const U) -> Self where U: ?Sized; fn encode_literal<U>(value: *const U) -> Self where U: ?Sized; unsafe fn decode_box(self) -> *mut Self; unsafe fn decode_list(self) -> Boxed<Cons>; unsafe fn decode_smallint(self) -> SmallInteger; unsafe fn decode_immediate(self) -> Self::Word; unsafe fn decode_atom(self) -> Atom; unsafe fn decode_pid(self) -> Pid; unsafe fn decode_port(self) -> Port; unsafe fn decode_header_value(&self) -> Self::Word; fn decode_header( &self, tag: Tag<Self::Word>, literal: Option<bool>, ) -> Result<TypedTerm, TermDecodingError> where Self: Encoded, { let ptr = Boxed::new(self as *const _ as *mut u64).ok_or_else(|| { TermDecodingError::NoneValue { backtrace: Arc::new(Backtrace::capture()), } })?; match tag { Tag::Tuple => { let tuple = unsafe { Tuple::from_raw_term(ptr.cast::<Self>().as_ptr()) }; Ok(TypedTerm::Tuple(tuple)) } Tag::Closure => { let closure = unsafe { Closure::from_raw_term(ptr.cast::<Self>().as_ptr()) }; Ok(TypedTerm::Closure(closure)) } Tag::HeapBinary => { let bin = unsafe { HeapBin::from_raw_term(ptr.cast::<Self>().as_ptr()) }; Ok(TypedTerm::HeapBinary(bin)) } #[cfg(not(target_arch = "x86_64"))] Tag::Float => Ok(TypedTerm::Float(ptr.cast::<Float>())), Tag::BigInteger => Ok(TypedTerm::BigInteger(ptr.cast::<BigInteger>())), Tag::Reference => Ok(TypedTerm::Reference(ptr.cast::<Reference>())), Tag::ResourceReference => Ok(TypedTerm::ResourceReference(ptr.cast::<Resource>())), Tag::ProcBin => match literal { Some(false)
se { Ok(TypedTerm::ProcBin(ptr.cast::<ProcBin>())) } } }, Tag::SubBinary => Ok(TypedTerm::SubBinary(ptr.cast::<SubBinary>())), Tag::MatchContext => Ok(TypedTerm::MatchContext(ptr.cast::<MatchContext>())), Tag::ExternalPid => Ok(TypedTerm::ExternalPid(ptr.cast::<ExternalPid>())), Tag::ExternalPort => Ok(TypedTerm::ExternalPort(ptr.cast::<ExternalPort>())), Tag::ExternalReference => Ok(TypedTerm::ExternalReference( ptr.cast::<ExternalReference>(), )), Tag::Map => Ok(TypedTerm::Map(ptr.cast::<Map>())), Tag::None => Err(TermDecodingError::NoneValue { backtrace: Arc::new(Backtrace::capture()), }), _ => Err(TermDecodingError::InvalidTag { backtrace: Arc::new(Backtrace::capture()), }), } } #[inline] unsafe fn decode_header_unchecked( &self, tag: Tag<Self::Word>, literal: Option<bool>, ) -> TypedTerm where Self: Encoded, { match self.decode_header(tag.clone(), literal) { Ok(term) => term, Err(_) => panic!("invalid type tag: {:?}", tag), } } }
=> Ok(TypedTerm::ProcBin(ptr.cast::<ProcBin>())), Some(true) => Ok(TypedTerm::BinaryLiteral(ptr.cast::<BinaryLiteral>())), None => { let offset = BinaryLiteral::flags_offset(); debug_assert_eq!(offset, ProcBin::inner_offset()); let flags_ptr = unsafe { (self as *const _ as *const u8).offset(offset as isize) as *const BinaryFlags }; let flags = unsafe { *flags_ptr }; if flags.is_literal() { Ok(TypedTerm::BinaryLiteral(ptr.cast::<BinaryLiteral>())) } el
random
[ { "content": "#[inline]\n\npub fn in_area<T, U>(ptr: *const T, start: *const U, end: *const U) -> bool\n\nwhere\n\n T: ?Sized,\n\n U: ?Sized,\n\n{\n\n // If any pointers are null, the only sensible answer is false\n\n if ptr.is_null() || start.is_null() || end.is_null() {\n\n false\n\n } e...
Rust
wayland-commons/src/map.rs
atouchet/wayland-rs
de9eac07cb9d295333a33ee45dae4342341bc26e
use crate::{Interface, MessageGroup, NoMessage}; use std::cmp::Ordering; pub const SERVER_ID_LIMIT: u32 = 0xFF00_0000; pub trait ObjectMetadata: Clone { fn child(&self) -> Self; } impl ObjectMetadata for () { fn child(&self) {} } #[derive(Clone)] pub struct Object<Meta: ObjectMetadata> { pub interface: &'static str, pub version: u32, pub requests: &'static [crate::wire::MessageDesc], pub events: &'static [crate::wire::MessageDesc], pub meta: Meta, pub childs_from_events: fn(u16, u32, &Meta) -> Option<Object<Meta>>, pub childs_from_requests: fn(u16, u32, &Meta) -> Option<Object<Meta>>, } impl<Meta: ObjectMetadata + std::fmt::Debug> std::fmt::Debug for Object<Meta> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Object") .field("interface", &self.interface) .field("version", &self.version) .field("requests", &self.requests) .field("events", &self.events) .field("meta", &self.meta) .finish() } } impl<Meta: ObjectMetadata> Object<Meta> { pub fn from_interface<I: Interface>(version: u32, meta: Meta) -> Object<Meta> { Object { interface: I::NAME, version, requests: I::Request::MESSAGES, events: I::Event::MESSAGES, meta, childs_from_events: childs_from::<I::Event, Meta>, childs_from_requests: childs_from::<I::Request, Meta>, } } pub fn event_child(&self, opcode: u16) -> Option<Object<Meta>> { (self.childs_from_events)(opcode, self.version, &self.meta) } pub fn request_child(&self, opcode: u16) -> Option<Object<Meta>> { (self.childs_from_requests)(opcode, self.version, &self.meta) } pub fn is_interface<I: Interface>(&self) -> bool { self.interface == I::NAME } pub fn placeholder(meta: Meta) -> Object<Meta> { Object { interface: "", version: 0, requests: &[], events: &[], meta, childs_from_events: childs_from::<NoMessage, Meta>, childs_from_requests: childs_from::<NoMessage, Meta>, } } } fn childs_from<M: MessageGroup, Meta: ObjectMetadata>( opcode: u16, version: u32, meta: &Meta, ) -> Option<Object<Meta>> { M::child(opcode, version, meta) } #[derive(Default, Debug)] pub struct ObjectMap<Meta: ObjectMetadata> { client_objects: Vec<Option<Object<Meta>>>, server_objects: Vec<Option<Object<Meta>>>, } impl<Meta: ObjectMetadata> ObjectMap<Meta> { pub fn new() -> ObjectMap<Meta> { ObjectMap { client_objects: Vec::new(), server_objects: Vec::new() } } pub fn find(&self, id: u32) -> Option<Object<Meta>> { if id == 0 { None } else if id >= SERVER_ID_LIMIT { self.server_objects.get((id - SERVER_ID_LIMIT) as usize).and_then(Clone::clone) } else { self.client_objects.get((id - 1) as usize).and_then(Clone::clone) } } pub fn remove(&mut self, id: u32) { if id == 0 { } else if id >= SERVER_ID_LIMIT { if let Some(place) = self.server_objects.get_mut((id - SERVER_ID_LIMIT) as usize) { *place = None; } } else if let Some(place) = self.client_objects.get_mut((id - 1) as usize) { *place = None; } } #[allow(clippy::result_unit_err)] pub fn insert_at(&mut self, id: u32, object: Object<Meta>) -> Result<(), ()> { if id == 0 { Err(()) } else if id >= SERVER_ID_LIMIT { insert_in_at(&mut self.server_objects, (id - SERVER_ID_LIMIT) as usize, object) } else { insert_in_at(&mut self.client_objects, (id - 1) as usize, object) } } pub fn client_insert_new(&mut self, object: Object<Meta>) -> u32 { insert_in(&mut self.client_objects, object) + 1 } pub fn server_insert_new(&mut self, object: Object<Meta>) -> u32 { insert_in(&mut self.server_objects, object) + SERVER_ID_LIMIT } #[allow(clippy::result_unit_err)] pub fn with<T, F: FnOnce(&mut Object<Meta>) -> T>(&mut self, id: u32, f: F) -> Result<T, ()> { if id == 0 { Err(()) } else if id >= SERVER_ID_LIMIT { if let Some(&mut Some(ref mut obj)) = self.server_objects.get_mut((id - SERVER_ID_LIMIT) as usize) { Ok(f(obj)) } else { Err(()) } } else if let Some(&mut Some(ref mut obj)) = self.client_objects.get_mut((id - 1) as usize) { Ok(f(obj)) } else { Err(()) } } pub fn with_all<F: FnMut(u32, &mut Object<Meta>)>(&mut self, mut f: F) { for (id, place) in self.client_objects.iter_mut().enumerate() { if let Some(ref mut obj) = *place { f(id as u32 + 1, obj); } } for (id, place) in self.server_objects.iter_mut().enumerate() { if let Some(ref mut obj) = *place { f(id as u32 + SERVER_ID_LIMIT, obj); } } } } fn insert_in<Meta: ObjectMetadata>( store: &mut Vec<Option<Object<Meta>>>, object: Object<Meta>, ) -> u32 { match store.iter().position(Option::is_none) { Some(id) => { store[id] = Some(object); id as u32 } None => { store.push(Some(object)); (store.len() - 1) as u32 } } } fn insert_in_at<Meta: ObjectMetadata>( store: &mut Vec<Option<Object<Meta>>>, id: usize, object: Object<Meta>, ) -> Result<(), ()> { match id.cmp(&store.len()) { Ordering::Greater => Err(()), Ordering::Equal => { store.push(Some(object)); Ok(()) } Ordering::Less => { let previous = &mut store[id]; if !previous.is_none() { return Err(()); } *previous = Some(object); Ok(()) } } }
use crate::{Interface, MessageGroup, NoMessage}; use std::cmp::Ordering; pub const SERVER_ID_LIMIT: u32 = 0xFF00_0000; pub trait ObjectMetadata: Clone { fn child(&self) -> Self; } impl ObjectMetadata for () { fn child(&self) {} } #[derive(Clone)] pub struct Object<Meta: ObjectMetadata> { pub interface: &'static str, pub version: u32, pub requests: &'static [crate::wire::MessageDesc], pub events: &'static [crate::wire::MessageDesc], pub meta: Meta, pub childs_from_events: fn(u16, u32, &Meta) -> Option<Object<Meta>>, pub childs_from_requests: fn(u16, u32, &Meta) -> Option<Object<Meta>>, } impl<Meta: ObjectMetadata + std::fmt::Debug> std::fmt::Debug for Object<Meta> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Object") .field("interface", &self.interface) .field("version", &self.version) .field("requests", &self.requests) .field("events", &self.events) .field("meta", &self.meta) .finish() } } impl<Meta: ObjectMetadata> Object<Meta> { pub fn from_interface<I: Interface>(version: u32, meta: Meta) -> Object<Meta> { Object { interface: I::NAME, version, requests: I::Request::MESSAGES, events: I::Event::MESSAGES, meta, childs_from_events: childs_from::<I::Event, Meta>, childs_from_requests: childs_from::<I::Request, Meta>, } } pub fn event_child(&self, opcode: u16) -> Option<Object<Meta>> { (self.childs_from_events)(opcode, self.version, &self.meta) } pub fn request_child(&self, opcode: u16) -> Option<Object<Meta>> { (self.childs_from_requests)(opcode, self.version, &self.meta) } pub fn is_interface<I: Interface>(&self) -> bool { self.interface == I::NAME } pub fn placeholder(meta: Meta) -> Object<Meta> { Object { interface: "", version: 0, requests: &[], events: &[], meta, childs_from_events: childs_from::<NoMessage, Meta>, childs_from_requests: childs_from::<NoMessage, Meta>, } } } fn childs_from<M: MessageGroup, Meta: ObjectMetadata>( opcode: u16, version: u32, meta: &Meta, ) -> Option<Object<Meta>> { M::child(opcode, version, meta) } #[derive(Default, Debug)] pub struct ObjectMap<Meta: ObjectMetadata> { client_objects: Vec<Option<Object<Meta>>>, server_objects: Vec<Option<Object<Meta>>>, } impl<Meta: ObjectMetadata> ObjectMap<Meta> { pub fn new() -> ObjectMap<Meta> { ObjectMap { client_objects: Vec::new(), server_objects: Vec::new() } } pub fn find(&self, id: u32) -> Option<Object<Meta>> { if id == 0 { None } else if id >= SERVER_ID_LIMIT { self.server_objects.get((id - SERVER_ID_LIMIT) as usize).and_then(Clone::clone) } else { self.client_objects.get((id - 1) as usize).and_then(Clone::clone) } } pub fn remove(&mut self, id: u32) { if id == 0 { } else if id >= SERVER_ID_LIMIT { if let Some(place) = self.server_objects.get_mut((id - SERVER_ID_LIMIT) as usize) { *
#[allow(clippy::result_unit_err)] pub fn insert_at(&mut self, id: u32, object: Object<Meta>) -> Result<(), ()> { if id == 0 { Err(()) } else if id >= SERVER_ID_LIMIT { insert_in_at(&mut self.server_objects, (id - SERVER_ID_LIMIT) as usize, object) } else { insert_in_at(&mut self.client_objects, (id - 1) as usize, object) } } pub fn client_insert_new(&mut self, object: Object<Meta>) -> u32 { insert_in(&mut self.client_objects, object) + 1 } pub fn server_insert_new(&mut self, object: Object<Meta>) -> u32 { insert_in(&mut self.server_objects, object) + SERVER_ID_LIMIT } #[allow(clippy::result_unit_err)] pub fn with<T, F: FnOnce(&mut Object<Meta>) -> T>(&mut self, id: u32, f: F) -> Result<T, ()> { if id == 0 { Err(()) } else if id >= SERVER_ID_LIMIT { if let Some(&mut Some(ref mut obj)) = self.server_objects.get_mut((id - SERVER_ID_LIMIT) as usize) { Ok(f(obj)) } else { Err(()) } } else if let Some(&mut Some(ref mut obj)) = self.client_objects.get_mut((id - 1) as usize) { Ok(f(obj)) } else { Err(()) } } pub fn with_all<F: FnMut(u32, &mut Object<Meta>)>(&mut self, mut f: F) { for (id, place) in self.client_objects.iter_mut().enumerate() { if let Some(ref mut obj) = *place { f(id as u32 + 1, obj); } } for (id, place) in self.server_objects.iter_mut().enumerate() { if let Some(ref mut obj) = *place { f(id as u32 + SERVER_ID_LIMIT, obj); } } } } fn insert_in<Meta: ObjectMetadata>( store: &mut Vec<Option<Object<Meta>>>, object: Object<Meta>, ) -> u32 { match store.iter().position(Option::is_none) { Some(id) => { store[id] = Some(object); id as u32 } None => { store.push(Some(object)); (store.len() - 1) as u32 } } } fn insert_in_at<Meta: ObjectMetadata>( store: &mut Vec<Option<Object<Meta>>>, id: usize, object: Object<Meta>, ) -> Result<(), ()> { match id.cmp(&store.len()) { Ordering::Greater => Err(()), Ordering::Equal => { store.push(Some(object)); Ok(()) } Ordering::Less => { let previous = &mut store[id]; if !previous.is_none() { return Err(()); } *previous = Some(object); Ok(()) } } }
place = None; } } else if let Some(place) = self.client_objects.get_mut((id - 1) as usize) { *place = None; } }
function_block-function_prefix_line
[ { "content": "fn display_req_child(opcode: u16, _: u32, meta: &ObjectMeta) -> Option<Object<ObjectMeta>> {\n\n match opcode {\n\n // sync\n\n 0 => Some(Object::from_interface::<crate::protocol::wl_callback::WlCallback>(\n\n 1,\n\n meta.child(),\n\n )),\n\n //...
Rust
mem6/src/fetch_mod.rs
bestia-dev/mem6_game
9bd5ccab9f66fc884dd7ed2ea774f35520e124eb
use crate::*; use unwrap::unwrap; use wasm_bindgen_futures::spawn_local; use dodrio::VdomWeak; pub fn async_fetch_game_config_and_update(rrc: &mut RootRenderingComponent, vdom: VdomWeak) { let url_config = format!( "{}/content/{}/game_config.json", rrc.web_data.href, rrc.game_data.game_name ); spawn_local({ let vdom_on_next_tick = vdom.clone(); async move { let respbody = websysmod::fetch_response(url_config).await; let json = unwrap!(serde_json::from_str(respbody.as_str())); unwrap!( vdom_on_next_tick .with_component({ move |root| { let rrc = root.unwrap_mut::<RootRenderingComponent>(); rrc.game_data.game_config = json; } }) .await ); } }); } pub fn fetch_games_metadata_and_update(href: &str, vdom: VdomWeak) { let url_config = format!("{}/content/gamesmetadata.json", href); spawn_local({ let vdom_on_next_tick = vdom.clone(); async move { let respbody = websysmod::fetch_response(url_config).await; let v: game_data_mod::GamesMetadata = unwrap!(serde_json::from_str(&respbody)); unwrap!( vdom_on_next_tick .with_component({ move |root| { let rrc = root.unwrap_mut::<RootRenderingComponent>(); rrc.game_data.content_folders.clear(); for x in &v.vec_game_metadata { rrc.game_data.content_folders.push(x.folder.clone()); } rrc.game_data.games_metadata = Some(v); } }) .await ); } }); } pub fn fetch_videos_and_update(href: &str, vdom: VdomWeak) { let url = format!("{}/content/videos.json", href); spawn_local({ let vdom_on_next_tick = vdom.clone(); async move { let respbody = websysmod::fetch_response(url).await; let vid_json: game_data_mod::Videos = unwrap!(serde_json::from_str(&respbody)); unwrap!( vdom_on_next_tick .with_component({ move |root| { let rrc = root.unwrap_mut::<RootRenderingComponent>(); rrc.game_data.videos = vid_json.videos; } }) .await ); } }); } pub fn fetch_audio_and_update(href: &str, vdom: VdomWeak) { let url = format!("{}/content/audio.json", href); spawn_local({ let vdom_on_next_tick = vdom.clone(); async move { let respbody = websysmod::fetch_response(url).await; let aud_json: game_data_mod::Audio = unwrap!(serde_json::from_str(&respbody)); unwrap!( vdom_on_next_tick .with_component({ move |root| { let rrc = root.unwrap_mut::<RootRenderingComponent>(); rrc.game_data.audio = aud_json.audio; } }) .await ); } }); } #[allow(clippy::needless_pass_by_value)] pub fn fetch_all_img_for_cache_request(rrc: &mut RootRenderingComponent) { let (start_index, end_index) = rrc.game_data.grid_start_end_index(); for i in start_index..end_index { #[allow(clippy::indexing_slicing)] let x = &rrc.game_data.card_grid_data[i]; let url_img = format!( "content/{}/img/{}", rrc.game_data.game_name, unwrap!(unwrap!(rrc.game_data.game_config.as_ref()) .img_filename .get(x.card_number)) ); spawn_local(websysmod::fetch_only(url_img)); } }
use crate::*; use unwrap::unwrap; use wasm_bindgen_futures::spawn_local; use dodrio::VdomWeak; pub fn async_fetch_game_config_and_update(rrc: &mut RootRenderingComponent, vdom: VdomWeak) { let url_config = format!( "{}/content/{}/game_config.json", rrc.web_data.href, rrc.game_data.game_name ); spawn_local({ let vdom_on_next_tick = vdom.clone(); async move { let respbody = websysmod::fetch_response(url_config).await; let json = unwrap!(serde_json::from_str(respbody.as_str())); unwrap!( vdom_on_next_tick .with_component({ move |root| { let rrc = root.unwrap_mut::<RootRenderingComponent>(); rrc.game_data.game_config = json; } }) .await ); } }); }
pub fn fetch_videos_and_update(href: &str, vdom: VdomWeak) { let url = format!("{}/content/videos.json", href); spawn_local({ let vdom_on_next_tick = vdom.clone(); async move { let respbody = websysmod::fetch_response(url).await; let vid_json: game_data_mod::Videos = unwrap!(serde_json::from_str(&respbody)); unwrap!( vdom_on_next_tick .with_component({ move |root| { let rrc = root.unwrap_mut::<RootRenderingComponent>(); rrc.game_data.videos = vid_json.videos; } }) .await ); } }); } pub fn fetch_audio_and_update(href: &str, vdom: VdomWeak) { let url = format!("{}/content/audio.json", href); spawn_local({ let vdom_on_next_tick = vdom.clone(); async move { let respbody = websysmod::fetch_response(url).await; let aud_json: game_data_mod::Audio = unwrap!(serde_json::from_str(&respbody)); unwrap!( vdom_on_next_tick .with_component({ move |root| { let rrc = root.unwrap_mut::<RootRenderingComponent>(); rrc.game_data.audio = aud_json.audio; } }) .await ); } }); } #[allow(clippy::needless_pass_by_value)] pub fn fetch_all_img_for_cache_request(rrc: &mut RootRenderingComponent) { let (start_index, end_index) = rrc.game_data.grid_start_end_index(); for i in start_index..end_index { #[allow(clippy::indexing_slicing)] let x = &rrc.game_data.card_grid_data[i]; let url_img = format!( "content/{}/img/{}", rrc.game_data.game_name, unwrap!(unwrap!(rrc.game_data.game_config.as_ref()) .img_filename .get(x.card_number)) ); spawn_local(websysmod::fetch_only(url_img)); } }
pub fn fetch_games_metadata_and_update(href: &str, vdom: VdomWeak) { let url_config = format!("{}/content/gamesmetadata.json", href); spawn_local({ let vdom_on_next_tick = vdom.clone(); async move { let respbody = websysmod::fetch_response(url_config).await; let v: game_data_mod::GamesMetadata = unwrap!(serde_json::from_str(&respbody)); unwrap!( vdom_on_next_tick .with_component({ move |root| { let rrc = root.unwrap_mut::<RootRenderingComponent>(); rrc.game_data.content_folders.clear(); for x in &v.vec_game_metadata { rrc.game_data.content_folders.push(x.folder.clone()); } rrc.game_data.games_metadata = Some(v); } }) .await ); } }); }
function_block-full_function
[ { "content": "/// on click\n\npub fn on_click_take_turn(rrc: &mut RootRenderingComponent, vdom: &VdomWeak) {\n\n // websysmod::debug_write(&format!(\"on_click_take_turn {}\", \"\"));\n\n\n\n let msg_id = ackmsgmod::prepare_for_ack_msg_waiting(rrc, vdom);\n\n\n\n let msg = websocketmod::WsMessageForRece...
Rust
src/network/tests.rs
ambaxter/expert-rs
5d5070f4c8842a0b4f53c6ebc277ea0444fedb58
use std::hash::{Hash, Hasher}; use std::fmt; use std::fmt::Debug; use traits::Fact; use ordered_float::NotNaN; use runtime::memory::SymbolId; use num::Float; #[derive(Clone, Hash, Eq, PartialEq)] pub enum CLimits<T: Hash + Eq + Ord + Clone> { S(T), D(T, T) } #[derive(Clone)] pub enum OrdData<T: Fact>{ I8(fn(&T) -> &i8, CLimits<i8>), I16(fn(&T) -> &i16, CLimits<i16>), I32(fn(&T) -> &i32, CLimits<i32>), I64(fn(&T) -> &i64, CLimits<i64>), U8(fn(&T) -> &u8, CLimits<u8>), U16(fn(&T) -> &u16, CLimits<u16>), U32(fn(&T) -> &u32, CLimits<u32>), U64(fn(&T) -> &u64, CLimits<u64>), ISIZE(fn(&T) -> &isize, CLimits<isize>), USIZE(fn(&T) -> &usize, CLimits<usize>), } impl<T: Fact> OrdData<T> { fn hash_self<H: Hasher, L: Hash>(ord: usize, accessor: usize, limits: &L, state: &mut H) { ord.hash(state); accessor.hash(state); limits.hash(state); } } impl<T: Fact> Hash for OrdData<T> { fn hash<H: Hasher>(&self, state: &mut H) { use self::OrdData::*; match self { &I8(accessor, ref limits) => { Self::hash_self(0, accessor as usize, limits, state); }, &I16(accessor, ref limits) => { Self::hash_self(1, accessor as usize, limits, state); }, &I32(accessor, ref limits) => { Self::hash_self(2, accessor as usize, limits, state); }, &I64(accessor, ref limits) => { Self::hash_self(3, accessor as usize, limits, state); }, &U8(accessor, ref limits) => { Self::hash_self(4, accessor as usize, limits, state); }, &U16(accessor, ref limits) => { Self::hash_self(5, accessor as usize, limits, state); }, &U32(accessor, ref limits) => { Self::hash_self(6, accessor as usize, limits, state); }, &U64(accessor, ref limits) => { Self::hash_self(7, accessor as usize, limits, state); }, &ISIZE(accessor, ref limits) => { Self::hash_self(8, accessor as usize, limits, state); }, &USIZE(accessor, ref limits) => { Self::hash_self(9, accessor as usize, limits, state); } } } } impl<T: Fact> PartialEq for OrdData<T> { fn eq(&self, other: &Self) -> bool { use self::OrdData::*; match (self, other) { (&I8(accessor1, ref limits1), &I8(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&I16(accessor1, ref limits1), &I16(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&I32(accessor1, ref limits1), &I32(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&I64(accessor1, ref limits1), &I64(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&U8(accessor1, ref limits1), &U8(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&U16(accessor1, ref limits1), &U16(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&U32(accessor1, ref limits1), &U32(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&U64(accessor1, ref limits1), &U64(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&ISIZE(accessor1, ref limits1), &ISIZE(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&USIZE(accessor1, ref limits1), &USIZE(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, _ => false } } } impl<T: Fact> Eq for OrdData<T> {} #[derive(Debug, Copy, Clone, Eq, Hash, PartialEq)] pub enum OrdTest { Ne, Lt, Le, Gt, Ge, GtLt, GeLt, GtLe, GeLe } #[derive(Copy, Clone, Eq, PartialEq)] pub enum FLimits<T: Float> { S(NotNaN<T>), D(NotNaN<T>, NotNaN<T>) } impl<T: Float> Hash for FLimits<T> { fn hash<H: Hasher>(&self, state: &mut H) { use self::FLimits::*; match self { &S(ref to) => to.hash(state), &D(ref from, ref to) => { from.hash(state); to.hash(state); }, } } } #[derive(Clone)] pub enum FlData<T: Fact>{ F32(fn(&T) -> &f32, FLimits<f32>), F64(fn(&T) -> &f64, FLimits<f64>), } impl<T: Fact> FlData<T> { fn hash_self<H: Hasher, L: Hash>(ord: usize, accessor: usize, limits: &L, state: &mut H) { ord.hash(state); accessor.hash(state); limits.hash(state); } } impl<T: Fact> Hash for FlData<T> { fn hash<H: Hasher>(&self, state: &mut H) { use self::FlData::*; match self { &F32(accessor, ref limits) => { Self::hash_self(0, accessor as usize, limits, state); }, &F64(accessor, ref limits) => { Self::hash_self(1, accessor as usize, limits, state); }, } } } impl<T: Fact> PartialEq for FlData<T> { fn eq(&self, other: &Self) -> bool { use self::FlData::*; match (self, other) { (&F32(accessor1, ref limits1), &F32(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&F64(accessor1, ref limits1), &F64(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, _ => false } } } impl<T: Fact> Eq for FlData<T> {} #[derive(Debug, Copy, Clone, Eq, Hash, PartialEq)] pub enum FlTest { ApproxEq, ApproxNe, Lt, Le, Gt, Ge, GtLt, GeLt, GtLe, GeLe } #[derive(Clone)] pub enum StrData<T: Fact> { REF(fn(&T) -> &str, CLimits<SymbolId>), } impl<T: Fact> StrData<T> { fn hash_self<H: Hasher, L: Hash>(ord: usize, accessor: usize, limits: &L, state: &mut H) { ord.hash(state); accessor.hash(state); limits.hash(state); } } impl<T: Fact> Hash for StrData<T> { fn hash<H: Hasher>(&self, state: &mut H) { use self::StrData::*; match self { &REF(accessor, ref limits) => { Self::hash_self(0, accessor as usize, limits, state); }, } } } impl<T: Fact> PartialEq for StrData<T> { fn eq(&self, other: &Self) -> bool { use self::StrData::*; match (self, other) { (&REF(accessor1, ref limits1), &REF(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, _ => false } } } impl<T: Fact> Eq for StrData<T> {} #[derive(Debug, Copy, Clone, Eq, Hash, PartialEq)] pub enum StrTest { Ne, Lt, Le, Gt, Ge, GtLt, GeLt, GtLe, GeLe, Contains, StartsWith, EndsWith } #[derive(Hash, Eq, PartialEq)] pub enum AlphaTest<T: Fact> { HashEq, Ord(OrdData<T>, OrdTest), Fl(FlData<T>, FlTest), Str(StrData<T>, StrTest), } impl<T: Fact> AlphaTest<T> { pub fn is_hash_eq(&self) -> bool { use self::AlphaTest::*; match self { &HashEq => true, _ => false } } } impl<T: Fact> Debug for AlphaTest<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use self::AlphaTest::*; write!(f, "Test{{")?; match self { &HashEq => { write!(f, "HashEq")? }, &Ord(ref data, ref test) => { write!(f, "Ord")? }, &Fl(ref data, ref test) => { write!(f, "Fl")? }, &Str(ref data, ref test) => { write!(f, "Str")? } } write!(f, "}}") } }
use std::hash::{Hash, Hasher}; use std::fmt; use std::fmt::Debug; use traits::Fact; use ordered_float::NotNaN; use runtime::memory::SymbolId; use num::Float; #[derive(Clone, Hash, Eq, PartialEq)] pub enum CLimits<T: Hash + Eq + Ord + Clone> { S(T), D(T, T) } #[derive(Clone)] pub enum OrdData<T: Fact>{ I8(fn(&T) -> &i8, CLimits<i8>), I16(fn(&T) -> &i16, CLimits<i16>), I32(fn(&T) -> &i32, CLimits<i32>), I64(fn(&T) -> &i64, CLimits<i64>), U8(fn(&T) -> &u8, CLimits<u8>), U16(fn(&T) -> &u16, CLimits<u16>), U32(fn(&T) -> &u32, CLimits<u32>), U64(fn(&T) -> &u64, CLimits<u64>), ISIZE(fn(&T) -> &isize, CLimits<isize>), USIZE(fn(&T) -> &usize, CLimits<usize>), } impl<T: Fact> OrdData<T> { fn hash_self<H: Hasher, L: Hash>(ord: usize, accessor: usize, limits: &L, state: &mut H) { ord.hash(state); accessor.hash(state); limits.hash(state); } } impl<T: Fact> Hash for OrdData<T> { fn hash<H: Hasher>(&self, state: &mut H) { use self::OrdData::*; match self { &I8(accessor, ref limits) => { Self::hash_self(0, accessor as usize, limits, state); }, &I16(accessor, ref limits) => { Self::hash_self(1, accessor as usize,
&F64(accessor, ref limits) => { Self::hash_self(1, accessor as usize, limits, state); }, } } } impl<T: Fact> PartialEq for FlData<T> { fn eq(&self, other: &Self) -> bool { use self::FlData::*; match (self, other) { (&F32(accessor1, ref limits1), &F32(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&F64(accessor1, ref limits1), &F64(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, _ => false } } } impl<T: Fact> Eq for FlData<T> {} #[derive(Debug, Copy, Clone, Eq, Hash, PartialEq)] pub enum FlTest { ApproxEq, ApproxNe, Lt, Le, Gt, Ge, GtLt, GeLt, GtLe, GeLe } #[derive(Clone)] pub enum StrData<T: Fact> { REF(fn(&T) -> &str, CLimits<SymbolId>), } impl<T: Fact> StrData<T> { fn hash_self<H: Hasher, L: Hash>(ord: usize, accessor: usize, limits: &L, state: &mut H) { ord.hash(state); accessor.hash(state); limits.hash(state); } } impl<T: Fact> Hash for StrData<T> { fn hash<H: Hasher>(&self, state: &mut H) { use self::StrData::*; match self { &REF(accessor, ref limits) => { Self::hash_self(0, accessor as usize, limits, state); }, } } } impl<T: Fact> PartialEq for StrData<T> { fn eq(&self, other: &Self) -> bool { use self::StrData::*; match (self, other) { (&REF(accessor1, ref limits1), &REF(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, _ => false } } } impl<T: Fact> Eq for StrData<T> {} #[derive(Debug, Copy, Clone, Eq, Hash, PartialEq)] pub enum StrTest { Ne, Lt, Le, Gt, Ge, GtLt, GeLt, GtLe, GeLe, Contains, StartsWith, EndsWith } #[derive(Hash, Eq, PartialEq)] pub enum AlphaTest<T: Fact> { HashEq, Ord(OrdData<T>, OrdTest), Fl(FlData<T>, FlTest), Str(StrData<T>, StrTest), } impl<T: Fact> AlphaTest<T> { pub fn is_hash_eq(&self) -> bool { use self::AlphaTest::*; match self { &HashEq => true, _ => false } } } impl<T: Fact> Debug for AlphaTest<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use self::AlphaTest::*; write!(f, "Test{{")?; match self { &HashEq => { write!(f, "HashEq")? }, &Ord(ref data, ref test) => { write!(f, "Ord")? }, &Fl(ref data, ref test) => { write!(f, "Fl")? }, &Str(ref data, ref test) => { write!(f, "Str")? } } write!(f, "}}") } }
limits, state); }, &I32(accessor, ref limits) => { Self::hash_self(2, accessor as usize, limits, state); }, &I64(accessor, ref limits) => { Self::hash_self(3, accessor as usize, limits, state); }, &U8(accessor, ref limits) => { Self::hash_self(4, accessor as usize, limits, state); }, &U16(accessor, ref limits) => { Self::hash_self(5, accessor as usize, limits, state); }, &U32(accessor, ref limits) => { Self::hash_self(6, accessor as usize, limits, state); }, &U64(accessor, ref limits) => { Self::hash_self(7, accessor as usize, limits, state); }, &ISIZE(accessor, ref limits) => { Self::hash_self(8, accessor as usize, limits, state); }, &USIZE(accessor, ref limits) => { Self::hash_self(9, accessor as usize, limits, state); } } } } impl<T: Fact> PartialEq for OrdData<T> { fn eq(&self, other: &Self) -> bool { use self::OrdData::*; match (self, other) { (&I8(accessor1, ref limits1), &I8(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&I16(accessor1, ref limits1), &I16(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&I32(accessor1, ref limits1), &I32(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&I64(accessor1, ref limits1), &I64(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&U8(accessor1, ref limits1), &U8(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&U16(accessor1, ref limits1), &U16(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&U32(accessor1, ref limits1), &U32(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&U64(accessor1, ref limits1), &U64(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&ISIZE(accessor1, ref limits1), &ISIZE(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&USIZE(accessor1, ref limits1), &USIZE(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, _ => false } } } impl<T: Fact> Eq for OrdData<T> {} #[derive(Debug, Copy, Clone, Eq, Hash, PartialEq)] pub enum OrdTest { Ne, Lt, Le, Gt, Ge, GtLt, GeLt, GtLe, GeLe } #[derive(Copy, Clone, Eq, PartialEq)] pub enum FLimits<T: Float> { S(NotNaN<T>), D(NotNaN<T>, NotNaN<T>) } impl<T: Float> Hash for FLimits<T> { fn hash<H: Hasher>(&self, state: &mut H) { use self::FLimits::*; match self { &S(ref to) => to.hash(state), &D(ref from, ref to) => { from.hash(state); to.hash(state); }, } } } #[derive(Clone)] pub enum FlData<T: Fact>{ F32(fn(&T) -> &f32, FLimits<f32>), F64(fn(&T) -> &f64, FLimits<f64>), } impl<T: Fact> FlData<T> { fn hash_self<H: Hasher, L: Hash>(ord: usize, accessor: usize, limits: &L, state: &mut H) { ord.hash(state); accessor.hash(state); limits.hash(state); } } impl<T: Fact> Hash for FlData<T> { fn hash<H: Hasher>(&self, state: &mut H) { use self::FlData::*; match self { &F32(accessor, ref limits) => { Self::hash_self(0, accessor as usize, limits, state); },
random
[ { "content": "pub trait Fact: Introspect + Eq + Hash\n\n where Self: std::marker::Sized {\n\n type HashEq: Hash + Eq + Clone + Debug;\n\n fn create_hash_eq(conditions: &Vec<StatementConditions>, cache: &StringCache) -> Self::HashEq;\n\n fn new_from_fields(fields: &[FieldValue], cache: &StringCache) ...
Rust
src/can1/mir1_arb.rs
crawford/efm32gg11b820
390142de0a68b55a142bb16d31634cebf2289209
#[doc = "Reader of register MIR1_ARB"] pub type R = crate::R<u32, super::MIR1_ARB>; #[doc = "Writer for register MIR1_ARB"] pub type W = crate::W<u32, super::MIR1_ARB>; #[doc = "Register MIR1_ARB `reset()`'s with value 0"] impl crate::ResetValue for super::MIR1_ARB { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `ID`"] pub type ID_R = crate::R<u32, u32>; #[doc = "Write proxy for field `ID`"] pub struct ID_W<'a> { w: &'a mut W, } impl<'a> ID_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u32) -> &'a mut W { self.w.bits = (self.w.bits & !0x1fff_ffff) | ((value as u32) & 0x1fff_ffff); self.w } } #[doc = "Reader of field `DIR`"] pub type DIR_R = crate::R<bool, bool>; #[doc = "Write proxy for field `DIR`"] pub struct DIR_W<'a> { w: &'a mut W, } impl<'a> DIR_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u32) & 0x01) << 29); self.w } } #[doc = "Reader of field `XTD`"] pub type XTD_R = crate::R<bool, bool>; #[doc = "Write proxy for field `XTD`"] pub struct XTD_W<'a> { w: &'a mut W, } impl<'a> XTD_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30); self.w } } #[doc = "Reader of field `MSGVAL`"] pub type MSGVAL_R = crate::R<bool, bool>; #[doc = "Write proxy for field `MSGVAL`"] pub struct MSGVAL_W<'a> { w: &'a mut W, } impl<'a> MSGVAL_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31); self.w } } impl R { #[doc = "Bits 0:28 - Message Identifier"] #[inline(always)] pub fn id(&self) -> ID_R { ID_R::new((self.bits & 0x1fff_ffff) as u32) } #[doc = "Bit 29 - Message Direction"] #[inline(always)] pub fn dir(&self) -> DIR_R { DIR_R::new(((self.bits >> 29) & 0x01) != 0) } #[doc = "Bit 30 - Extended Identifier"] #[inline(always)] pub fn xtd(&self) -> XTD_R { XTD_R::new(((self.bits >> 30) & 0x01) != 0) } #[doc = "Bit 31 - Message Valid"] #[inline(always)] pub fn msgval(&self) -> MSGVAL_R { MSGVAL_R::new(((self.bits >> 31) & 0x01) != 0) } } impl W { #[doc = "Bits 0:28 - Message Identifier"] #[inline(always)] pub fn id(&mut self) -> ID_W { ID_W { w: self } } #[doc = "Bit 29 - Message Direction"] #[inline(always)] pub fn dir(&mut self) -> DIR_W { DIR_W { w: self } } #[doc = "Bit 30 - Extended Identifier"] #[inline(always)] pub fn xtd(&mut self) -> XTD_W { XTD_W { w: self } } #[doc = "Bit 31 - Message Valid"] #[inline(always)] pub fn msgval(&mut self) -> MSGVAL_W { MSGVAL_W { w: self } } }
#[doc = "Reader of register MIR1_ARB"] pub type R = crate::R<u32, super::MIR1_ARB>; #[doc = "Writer for register MIR1_ARB"] pub type W = crate::W<u32, super::MIR1_ARB>; #[doc = "Register MIR1_ARB `reset()`'s with value 0"] impl crate::ResetValue for super::MIR1_ARB { type Type = u32; #[inline(always)] fn rese
self.w } } #[doc = "Reader of field `MSGVAL`"] pub type MSGVAL_R = crate::R<bool, bool>; #[doc = "Write proxy for field `MSGVAL`"] pub struct MSGVAL_W<'a> { w: &'a mut W, } impl<'a> MSGVAL_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31); self.w } } impl R { #[doc = "Bits 0:28 - Message Identifier"] #[inline(always)] pub fn id(&self) -> ID_R { ID_R::new((self.bits & 0x1fff_ffff) as u32) } #[doc = "Bit 29 - Message Direction"] #[inline(always)] pub fn dir(&self) -> DIR_R { DIR_R::new(((self.bits >> 29) & 0x01) != 0) } #[doc = "Bit 30 - Extended Identifier"] #[inline(always)] pub fn xtd(&self) -> XTD_R { XTD_R::new(((self.bits >> 30) & 0x01) != 0) } #[doc = "Bit 31 - Message Valid"] #[inline(always)] pub fn msgval(&self) -> MSGVAL_R { MSGVAL_R::new(((self.bits >> 31) & 0x01) != 0) } } impl W { #[doc = "Bits 0:28 - Message Identifier"] #[inline(always)] pub fn id(&mut self) -> ID_W { ID_W { w: self } } #[doc = "Bit 29 - Message Direction"] #[inline(always)] pub fn dir(&mut self) -> DIR_W { DIR_W { w: self } } #[doc = "Bit 30 - Extended Identifier"] #[inline(always)] pub fn xtd(&mut self) -> XTD_W { XTD_W { w: self } } #[doc = "Bit 31 - Message Valid"] #[inline(always)] pub fn msgval(&mut self) -> MSGVAL_W { MSGVAL_W { w: self } } }
t_value() -> Self::Type { 0 } } #[doc = "Reader of field `ID`"] pub type ID_R = crate::R<u32, u32>; #[doc = "Write proxy for field `ID`"] pub struct ID_W<'a> { w: &'a mut W, } impl<'a> ID_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u32) -> &'a mut W { self.w.bits = (self.w.bits & !0x1fff_ffff) | ((value as u32) & 0x1fff_ffff); self.w } } #[doc = "Reader of field `DIR`"] pub type DIR_R = crate::R<bool, bool>; #[doc = "Write proxy for field `DIR`"] pub struct DIR_W<'a> { w: &'a mut W, } impl<'a> DIR_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u32) & 0x01) << 29); self.w } } #[doc = "Reader of field `XTD`"] pub type XTD_R = crate::R<bool, bool>; #[doc = "Write proxy for field `XTD`"] pub struct XTD_W<'a> { w: &'a mut W, } impl<'a> XTD_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30);
random
[ { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Res...
Rust
crates/core/plugin_sm/tests/plugin_manager.rs
PradeepKiruvale/localworkflow
b5f3c97c835cb36ae87f14b8697bedcca5d22619
#[cfg(test)] mod tests { use plugin_sm::plugin_manager::{ExternalPlugins, Plugins}; use std::{fs::File, path::PathBuf, str::FromStr}; use tempfile::NamedTempFile; #[test] fn plugin_manager_load_plugins_empty() { let temp_dir = tempfile::tempdir().unwrap(); let plugin_dir = temp_dir.path().to_owned(); let mut plugins = ExternalPlugins::open(plugin_dir, None, None).unwrap(); let _ = plugins.load(); assert!(plugins.empty()); } #[test] fn plugin_manager_load_plugins_some_non_executables() { let temp_dir = tempfile::tempdir().unwrap(); let _file = create_some_plugin_in(&temp_dir); let plugin_dir = temp_dir.path().to_owned(); let mut plugins = ExternalPlugins::open(plugin_dir, None, None).unwrap(); let _ = plugins.load(); assert!(plugins.empty()); } #[test] fn plugin_manager_load_plugins_some_by_plugins_none() { let temp_dir = tempfile::tempdir().unwrap(); let _file = create_some_plugin_in(&temp_dir); let _file = create_some_plugin_in(&temp_dir); let plugin_dir = temp_dir.path().to_owned(); let mut plugins = ExternalPlugins::open(plugin_dir, None, None).unwrap(); let _ = plugins.load(); assert!(plugins.by_software_type("test").is_none()); assert!(plugins.by_file_extension("test").is_none()); assert!(plugins.default().is_none()); } #[test] fn plugin_manager_load_plugins_some_by_plugins_some() { let temp_dir = tempfile::tempdir().unwrap(); let plugin1 = create_some_plugin_in(&temp_dir); let plugin2 = create_some_plugin_in(&temp_dir); let plugin_name1 = plugin1 .path() .file_name() .unwrap() .to_str() .unwrap() .to_owned(); let plugin_name2 = plugin2 .path() .file_name() .unwrap() .to_str() .unwrap() .to_owned(); let _res = std::fs::copy(get_dummy_plugin_path(), plugin1.path()); let _res = std::fs::copy(get_dummy_plugin_path(), plugin2.path()); let (_, _path) = plugin1.keep().unwrap(); let (_, _path) = plugin2.keep().unwrap(); let plugin_dir = temp_dir.path().to_owned(); dbg!(&plugin_dir); let mut plugins = ExternalPlugins::open(plugin_dir, None, None).unwrap(); let _ = plugins.load(); assert!(plugins.by_software_type(&plugin_name1).is_some()); assert!(plugins.by_software_type(&plugin_name2).is_some()); assert!(plugins.by_file_extension(&plugin_name1).is_none()); assert!(plugins.default().is_none()); } #[test] fn explicit_default_plugin() { let plugin_dir = tempfile::tempdir().unwrap(); let plugin1 = create_some_plugin_in(&plugin_dir); let _res = std::fs::copy(get_dummy_plugin_path(), plugin1.path()); let (_, _path) = plugin1.keep().unwrap(); let plugin2 = create_some_plugin_in(&plugin_dir); let _res = std::fs::copy(get_dummy_plugin_path(), plugin2.path()); let plugin_name2 = plugin2 .path() .file_name() .unwrap() .to_str() .unwrap() .to_owned(); let (_, _path) = plugin2.keep().unwrap(); let plugin3 = create_some_plugin_in(&plugin_dir); let _res = std::fs::copy(get_dummy_plugin_path(), plugin3.path()); let (_, _path) = plugin3.keep().unwrap(); let mut plugins = ExternalPlugins::open(plugin_dir.into_path(), Some(plugin_name2.clone()), None) .unwrap(); plugins.load().unwrap(); assert_eq!( plugins.by_software_type("default").unwrap().name, plugin_name2 ); assert_eq!(plugins.default().unwrap().name, plugin_name2); } #[test] fn implicit_default_plugin_with_only_one_plugin() { let plugin_dir = tempfile::tempdir().unwrap(); let plugin = create_some_plugin_in(&plugin_dir); let _res = std::fs::copy(get_dummy_plugin_path(), plugin.path()); let plugin_name = plugin .path() .file_name() .unwrap() .to_str() .unwrap() .to_owned(); let (_, _path) = plugin.keep().unwrap(); let mut plugins = ExternalPlugins::open(plugin_dir.into_path(), None, None).unwrap(); plugins.load().unwrap(); assert_eq!( plugins.by_software_type("default").unwrap().name, plugin_name ); assert_eq!(plugins.default().unwrap().name, plugin_name); } #[test] fn invalid_default_plugin_pass_through() -> anyhow::Result<()> { let plugin_dir = tempfile::tempdir().unwrap(); let plugin_file_path = plugin_dir.path().join("apt"); let _ = File::create(plugin_file_path).unwrap(); let result = ExternalPlugins::open(plugin_dir.into_path(), Some("dummy".into()), None)?; assert!(result.empty()); assert!(result.default().is_none()); Ok(()) } fn create_some_plugin_in(dir: &tempfile::TempDir) -> NamedTempFile { tempfile::Builder::new() .suffix(".0") .tempfile_in(dir) .unwrap() } fn get_dummy_plugin_path() -> PathBuf { let package_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); let dummy_plugin_path = PathBuf::from_str(package_dir.as_str()) .unwrap() .parent() .unwrap() .parent() .unwrap() .parent() .unwrap() .join("target/debug/tedge_dummy_plugin"); dummy_plugin_path } }
#[cfg(test)] mod tests { use plugin_sm::plugin_manager::{ExternalPlugins, Plugins}; use std::{fs::File, path::PathBuf, str::FromStr}; use tempfile::NamedTempFile; #[test] fn plugin_manager_load_plugins_empty() { let temp_dir = tempfile::tempdir().unwrap(); let plugin_dir = temp_dir.path().to_owned(); let mut plugins = ExternalPlugins::open(plugin_dir, None, None).unwrap(); let _ = plugins.load(); assert!(plugins.empty()); } #[test] fn plugin_manager_load_plugins_some_non_executables() { let temp_dir = tempfile::tempdir().unwrap(); let _file = create_some_plugin_in(&temp_dir); let plugin_dir = temp_dir.path().to_owned(); let mut plugins = ExternalPlugins::open(plugin_dir, None, None).unwrap(); let _ = plugins.load(); assert!(plugins.empty()); } #[test] fn plugin_manager_load_plugins_some_by_plugins_none() { let temp_dir = tempfile::tempdir().unwrap(); let _file = create_some_plugin_in(&temp_dir); let _file = create_some_plugin_in(&temp_dir); let plugin_dir = temp_dir.path().to_owned(); let mut plugins = ExternalPlugins::open(plugin_dir, None, None).unwrap(); let _ = plugins.load(); assert!(plugins.by_software_type("test").is_none()); assert!(plugins.by_file_extension("test").is_none()); assert!(plugins.default().is_none()); } #[test] fn plugin_manager_load_plugins_some_by_plugins_some() { let temp_dir = tempfile::tempdir().unwrap(); let plugin1 = create_some_plugin_in(&temp_dir); let plugin2 = create_some_plugin_in(&temp_dir); let plugin_name1 = plugin1 .path() .file_name() .unwrap() .to_str() .unwrap() .to_owned(); let plugin_name2 = plugin2 .path() .file_name() .unwrap() .to_str() .unwrap() .to_owned(); let _res = std::fs::copy(get_dummy_plugin_path(), plugin1.path()); let _res = std::fs::copy(get_dummy_plugin_path(), plugin2.path()); let (_, _path) = plugin1.keep().unwrap(); let (_, _path) = plugin2.keep().unwrap(); let plugin_dir = temp_dir.path().to_owned(); dbg!(&plugin_dir); let mut plugins = ExternalPlugins::open(plugin_dir, None, None).unwrap(); let _ = plugins.load(); assert!(plugins.by_software_type(&plugin_name1).is_some()); assert!(plugins.by_software_type(&plugin_name2).is_some()); assert!(plugins.by_file_extension(&plugin_name1).is_none()); assert!(plugins.default().is_none()); } #[test] fn explicit_default_plugin() { let plugin_dir = tempfile::tempdir().unwrap(); let plugin1 = create_some_plugin_in(&plugin_dir); let _res = std::fs::copy(get_dummy_plugin_path(), plugin1.path()); let (_, _path) = plugin1.keep().unwrap(); let plugin2 = create_some_plugin_in(&plugin_dir); let _res = std::fs::copy(get_dummy_plugin_path(), plugin2.path()); let plugin_name2 = plugin2 .path() .file_name() .unwrap() .to_str() .unwrap() .to_owned(); let (_, _path) = plugin2.keep().unwrap(); let plugin3 = create_some_plugin_in(&plugin_dir); let _res = std::fs::copy(get_dummy_plugin_path(), plugin3.path()); let (_, _path) = plugin3.keep().unwrap(); let mut plugins = ExternalPlugins::open(plugin_dir.into_path(), Some(plugin_name2.clone()), None) .unwrap(); plugins.load().unwrap(); assert_eq!( plugins.by_software_type("default").unwrap().name, plugin_name2 ); assert_eq!(plugins.default().unwrap().name, plugin_name2); } #[test] fn implicit_default_plugin_with_only_one_plugin() { let plugin_dir = tempfile::tempdir().unwrap(); let plugin = create_some_plugin_in(&plugin_dir); let _res = std::fs::copy(get_dummy_plugin_path(), plugin.path()); let plugin_name = plugin .path() .file_name() .unwrap() .to_str() .unwrap() .to_owned(); let (_, _path) = plugin.keep().unwrap(); let mut plugins = ExternalPlugins::open(plugin_dir.into_path(), None, None).unwrap(); plugins.load().unwrap(); assert_eq!( plugins.by_software_type("default").unwrap().name, plugin_name ); assert_eq!(plugins.default().unwrap().name, plugin_name); } #[test] fn invalid_default_plugin_pass_through() -> anyhow::Result<()> { let plugin_dir = tempfile::tempdir().unwrap(); let plugin_file_path = plugin_dir.path().join("apt"); let _ = File::create(plugin_file_path).unwra
fn create_some_plugin_in(dir: &tempfile::TempDir) -> NamedTempFile { tempfile::Builder::new() .suffix(".0") .tempfile_in(dir) .unwrap() } fn get_dummy_plugin_path() -> PathBuf { let package_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); let dummy_plugin_path = PathBuf::from_str(package_dir.as_str()) .unwrap() .parent() .unwrap() .parent() .unwrap() .parent() .unwrap() .join("target/debug/tedge_dummy_plugin"); dummy_plugin_path } }
p(); let result = ExternalPlugins::open(plugin_dir.into_path(), Some("dummy".into()), None)?; assert!(result.empty()); assert!(result.default().is_none()); Ok(()) }
function_block-function_prefixed
[ { "content": "fn get_project_name(tedge_apama_project_path: &Path) -> String {\n\n let tedge_apama_project_descriptor_path = tedge_apama_project_path.join(\".project\");\n\n if tedge_apama_project_descriptor_path.exists() {\n\n if let Ok(xml_content) = fs::read_to_string(tedge_apama_project_descrip...
Rust
xtask/src/main.rs
YruamaLairba/rust-lv2-more-examples
0d19fd3e120ec3563ad7e7cd471e1396cbf8e512
#![allow(clippy::try_err)] extern crate getopts; use getopts::Options; use std::env; use std::fs; use std::fs::File; use std::io::BufRead; use std::io::BufReader; use std::io::BufWriter; use std::io::Write; use std::iter::Iterator; use std::path::{Path, PathBuf}; use std::process::Command; type DynError = Box<dyn std::error::Error>; #[derive(Clone, Copy)] struct PackageConf<'a> { name: &'a str, post_build: fn(conf: &Config) -> Result<(), DynError>, } const PACKAGES_CONF: &[PackageConf] = &[ PackageConf { name: "eg-worker-rs", post_build: |conf| { let lib_file_name = [&conf.lib_prefix(), "eg_worker_rs", &conf.lib_suffix()].concat(); let subs: &[(&str, &str)] = &[("@LIB_FILE_NAME@", &lib_file_name)]; let src_dir = workspace_root().join("eg-worker-rs"); let out_dir = conf.build_dir().join("lv2").join("eg-worker-rs"); fs::create_dir_all(&out_dir).unwrap(); subst( src_dir.join("manifest.ttl"), out_dir.join("manifest.ttl"), subs, ) .unwrap(); for e in &["worker.ttl"] { fs::copy(src_dir.join(e), out_dir.join(e)).unwrap(); } fs::copy( conf.build_dir().join(&lib_file_name), out_dir.join(&lib_file_name), ) .unwrap(); Ok(()) }, }, PackageConf { name: "eg-preset-rs", post_build: |conf| { let lib_file_name = [&conf.lib_prefix(), "eg_preset_rs", &conf.lib_suffix()].concat(); let subs: &[(&str, &str)] = &[("@LIB_FILE_NAME@", &lib_file_name)]; let src_dir = workspace_root().join("eg-preset-rs"); let out_dir = conf.build_dir().join("lv2").join("eg-preset-rs"); fs::create_dir_all(&out_dir).unwrap(); subst( src_dir.join("manifest.ttl"), out_dir.join("manifest.ttl"), subs, ) .unwrap(); for e in &["eg-preset-rs.ttl", "presets.ttl"] { fs::copy(src_dir.join(e), out_dir.join(e)).unwrap(); } fs::copy( conf.build_dir().join(&lib_file_name), out_dir.join(&lib_file_name), ) .unwrap(); Ok(()) }, }, ]; struct Config<'a> { subcommand: String, target: String, target_dir: String, release: bool, packages_conf: Vec<PackageConf<'a>>, opts: Options, } impl<'a> Config<'a> { fn from_env() -> Result<Self, DynError> { let mut args = env::args(); let subcommand = if let Some(arg) = args.nth(1) { arg } else { String::from("") }; let mut opts_args = Vec::<String>::new(); for e in args { if e == "--" { break; } opts_args.push(e); } let mut opts = Options::new(); opts.optmulti("p", "project", "project to build", "NAME"); opts.optflag("", "all", "build all projects"); opts.optflag("", "release", "build in release mode, with optimization"); opts.optopt("", "target", "build for the target triple", "TRIPLE"); opts.optopt( "", "target-dir", "directory for all generated artifacts", "DIRECTORY", ); opts.optflag("h", "help", "print this help menu"); let matches = opts.parse(&opts_args)?; let target = if let Some(s) = matches.opt_str("target") { s } else if let Some(var) = env::var_os("CARGO_BUILD_TARGET") { var.into_string().unwrap() } else { String::from("") }; let target_dir = if let Some(s) = matches.opt_str("target-dir") { s } else if let Some(var) = env::var_os("CARGO_TARGET_DIR") { var.into_string().unwrap() } else if let Some(var) = env::var_os("CARGO_BUILD_TARGET_DIR") { var.into_string().unwrap() } else { String::from("target") }; let release = matches.opt_present("release"); let packages_conf = if matches.opt_present("all") || !matches.opt_present("project") { PACKAGES_CONF.iter().copied().collect::<Vec<PackageConf>>() } else { let mut tmp = Vec::<PackageConf>::new(); let project = matches.opt_strs("p"); 'proj_loop: for proj in project { for pkg_conf in PACKAGES_CONF { if proj == pkg_conf.name { tmp.push(*pkg_conf); continue 'proj_loop; } } return Err(format!("No project named `{}`", proj).into()); } tmp }; Ok(Self { subcommand, target, target_dir, release, packages_conf, opts, }) } fn print_help(&self) { let brief = "Usage: cargo xtask SUBCOMMAND [options]"; let mut usage = self.opts.usage(&brief); let more_help= " Subcomands are: build build lv2 project(s) Handled environnement variable: CARGO_BUILD_TARGET CARGO_TARGET_DIR CARGO_BUILD_TARGET_DIR "; usage.push_str(&more_help); print!("{}", usage); } fn build_dir(&self) -> PathBuf { let profile_dir = if self.release { "release" } else { "debug" }; workspace_root() .join(&self.target_dir) .join(&self.target) .join(profile_dir) } fn packages_conf(&self) -> Vec<PackageConf> { self.packages_conf.clone() } fn lib_prefix(&self) -> String { let prefix = if self.target.contains("apple") { "lib" } else if self.target.contains("windows") { "" } else if cfg!(target_vendor = "apple") { "lib" } else if cfg!(target_os = "windows") { "" } else { "lib" }; String::from(prefix) } fn lib_suffix(&self) -> String { let suffix = if self.target.contains("apple") { ".dylib" } else if self.target.contains("windows") { ".dll" } else if cfg!(target_vendor = "apple") { ".dylib" } else if cfg!(target_os = "windows") { ".dll" } else { ".so" }; String::from(suffix) } } fn main() { if let Err(e) = try_main() { eprintln!("{}", e); std::process::exit(-1); } } fn try_main() -> Result<(), DynError> { let mut conf = Config::from_env()?; match conf.subcommand.as_ref() { "build" => build(&mut conf)?, "debug" => debug(&mut conf)?, _ => conf.print_help(), } Ok(()) } fn build(conf: &mut Config) -> Result<(), DynError> { let mut cargo_args = Vec::<String>::new(); if conf.release { cargo_args.push(String::from("--release")); } if conf.target != "" { cargo_args.push(String::from("--target")); cargo_args.push(conf.target.clone()); } cargo_args.push(String::from("--target-dir")); cargo_args.push(conf.target_dir.clone()); for p in conf.packages_conf() { cargo_args.push(String::from("-p")); cargo_args.push(String::from(p.name)); } println!("Building binarie(s)"); cargo("build", &cargo_args)?; println!("Post build step(s)"); for p in conf.packages_conf() { (p.post_build)(conf)?; } println!("Finished"); println!(); Ok(()) } fn subst<P: AsRef<Path>, Q: AsRef<Path>>( in_path: P, out_path: Q, subs: &[(&str, &str)], ) -> Result<(), DynError> { let mut in_file = BufReader::new(File::open(in_path)?); let mut out_file = BufWriter::new(File::create(out_path)?); let mut buf = String::new(); while in_file.read_line(&mut buf).unwrap() != 0 { for (token, value) in subs { buf = buf.replace(token, value); } write!(out_file, "{}", buf)?; buf.clear(); } Ok(()) } macro_rules! print_env { ( $x:expr) => {{ println!( stringify!($x {}), env::var(stringify!($x)).unwrap_or_else(|e| format!("{}", e)) ); }}; } fn debug(_conf: &mut Config) -> Result<(), DynError> { print_env!(CARGO); print_env!(CARGO_MANIFEST_DIR); print_env!(CARGO_PKG_VERSION); print_env!(CARGO_PKG_VERSION_MAJOR); print_env!(CARGO_PKG_VERSION_MINOR); print_env!(CARGO_PKG_VERSION_PATCH); print_env!(CARGO_PKG_VERSION_PRE); print_env!(CARGO_PKG_AUTHORS); print_env!(CARGO_PKG_NAME); print_env!(CARGO_PKG_DESCRIPTION); print_env!(CARGO_PKG_HOMEPAGE); print_env!(CARGO_PKG_REPOSITORY); print_env!(OUT_DIR); print_env!(TARGET); print_env!(CARGO_CFG_TARGET_OS); Ok(()) } fn cargo(cmd: &str, args: &[String]) -> Result<(), DynError> { let cargo = env::var("CARGO").unwrap_or_else(|_| "cargo".to_string()); let status = Command::new(cargo) .current_dir(workspace_root()) .arg(cmd) .args(args) .status()?; if !status.success() { Err(format!("cargo {} failed", cmd))?; } Ok(()) } fn workspace_root() -> PathBuf { Path::new(&env!("CARGO_MANIFEST_DIR")) .ancestors() .nth(1) .unwrap() .to_path_buf() }
#![allow(clippy::try_err)] extern crate getopts; use getopts::Options; use std::env; use std::fs; use std::fs::File; use std::io::BufRead; use std::io::BufReader; use std::io::BufWriter; use std::io::Write; use std::iter::Iterator; use std::path::{Path, PathBuf}; use std::process::Command; type DynError = Box<dyn std::error::Error>; #[derive(Clone, Copy)] struct PackageConf<'a> { name: &'a str, post_build: fn(conf: &Config) -> Result<(), DynError>, } const PACKAGES_CONF: &[PackageConf] = &[ PackageConf { name: "eg-worker-rs", post_build: |conf| { let lib_file_name = [&conf.lib_prefix(), "eg_worker_rs", &conf.lib_suffix()].concat(); let subs: &[(&str, &str)] = &[("@LIB_FILE_NAME@", &lib_file_name)]; let src_dir = workspace_root().join("eg-worker-rs"); let out_dir = conf.build_dir().join("lv2").join("eg-worker-rs"); fs::create_dir_all(&out_dir).unwrap(); subst( src_dir.join("manifest.ttl"), out_dir.join("manifest.ttl"), subs, ) .unwrap(); for e in &["worker.ttl"] { fs::copy(src_dir.join(e), out_dir.join(e)).unwrap(); } fs::copy( conf.build_dir().join(&lib_file_name), out_dir.join(&lib_file_name), ) .unwrap(); Ok(()) }, }, PackageConf { name: "eg-preset-rs", post_build: |conf| { let lib_file_name = [&conf.lib_prefix(), "eg_preset_rs", &conf.lib_suffix()].concat(); let subs: &[(&str, &str)] = &[("@LIB_FILE_NAME@", &lib_file_name)]; let src_dir = workspace_root().join("eg-preset-rs"); let out_dir = conf.build_dir().join("lv2").join("eg-preset-rs"); fs::create_dir_all(&out_dir).unwrap(); subst( src_dir.join("manifest.ttl"), out_dir.join("manifest.ttl"), subs, ) .unwrap(); for e in &["eg-preset-rs.ttl", "presets.ttl"] { fs::copy(src_dir.join(e), out_dir.join(e)).unwrap(); } fs::copy( conf.build_dir().join(&lib_file_name), out_dir.join(&lib_file_name), ) .unwrap(); Ok(()) }, }, ]; struct Config<'a> { subcommand: String, target: String, target_dir: String, release: bool, packages_conf: Vec<PackageConf<'a>>, opts: Options, } impl<'a> Config<'a> { fn from_env() -> Result<Self, DynError> { let mut args = env::args(); let subcommand = if let Some(arg) = args.nth(1) { arg } else { String::from("") }; let mut opts_args = Vec::<String>::new(); for e in args { if e == "--" { break; } opts_args.push(e); } let mut opts = Options::new(); opts.optmulti("p", "project", "project to build", "NAME"); opts.optflag("", "all", "build all projects"); opts.optflag("", "release", "build in release mode, with optimization"); opts.optopt("", "target", "build for the target triple", "TRIPLE"); opts.optopt( "", "target-dir", "directory for all generated artifacts", "DIRECTORY", ); opts.optflag("h", "help", "print this help menu"); let matches = opts.parse(&opts_args)?; let target = if let Some(s) = matches.opt_str("target") { s } else if let Some(var) = env::var_os("CARGO_BUILD_TARGET") { var.into_string().unwrap() } else { String::from("") }; let target_dir = if let Some(s) = matches.opt_str("target-dir") { s } else if let Some(var) = env::var_os("CARGO_TARGET_DIR") { var.into_string().unwrap() } else if let Some(var) = env::var_os("CARGO_BUILD_TARGET_DIR") { var.into_string().unwrap() } else { String::from("target") }; let release = matches.opt_present("release"); let packages_conf = if matches.opt_present("all") || !matches.opt_present("project") { PACKAGES_CONF.iter().copied().collect::<Vec<PackageConf>>() } else { let mut tmp = Vec::<PackageConf>::new(); let project = matches.opt_strs("p"); 'proj_loop: for proj in project { for pkg_conf in PACKAGES_CONF { if proj == pkg_conf.name { tmp.push(*pkg_conf); continue 'proj_loop; } } return Err(format!("No project named `{}`", proj).into()); } tmp }; Ok(Self { subcommand, target, target_dir, release, packages_conf, opts, }) } fn print_help(&self) { let brief = "Usage: cargo xtask SUBCOMMAND [options]"; let mut usage = self.opts.usage(&brief); let more_help= " Subcomands are: build build lv2 project(s) Handled environnement variable: CARGO_BUILD_TARGET CARGO_TARGET_DIR CARGO_BUILD_TARGET_DIR "; usage.push_str(&more_help); print!("{}", usage); } fn build_dir(&self) -> PathBuf { let profile_dir = if self.release { "release" } else { "debug" }; workspace_root() .join(&self.target_dir) .join(&self.target) .join(profile_dir) } fn packages_conf(&self) -> Vec<PackageConf> { self.packages_conf.clone() } fn lib_prefix(&self) -> String { let prefix = if self.target.contains("apple") { "lib" } else
; String::from(prefix) } fn lib_suffix(&self) -> String { let suffix = if self.target.contains("apple") { ".dylib" } else if self.target.contains("windows") { ".dll" } else if cfg!(target_vendor = "apple") { ".dylib" } else if cfg!(target_os = "windows") { ".dll" } else { ".so" }; String::from(suffix) } } fn main() { if let Err(e) = try_main() { eprintln!("{}", e); std::process::exit(-1); } } fn try_main() -> Result<(), DynError> { let mut conf = Config::from_env()?; match conf.subcommand.as_ref() { "build" => build(&mut conf)?, "debug" => debug(&mut conf)?, _ => conf.print_help(), } Ok(()) } fn build(conf: &mut Config) -> Result<(), DynError> { let mut cargo_args = Vec::<String>::new(); if conf.release { cargo_args.push(String::from("--release")); } if conf.target != "" { cargo_args.push(String::from("--target")); cargo_args.push(conf.target.clone()); } cargo_args.push(String::from("--target-dir")); cargo_args.push(conf.target_dir.clone()); for p in conf.packages_conf() { cargo_args.push(String::from("-p")); cargo_args.push(String::from(p.name)); } println!("Building binarie(s)"); cargo("build", &cargo_args)?; println!("Post build step(s)"); for p in conf.packages_conf() { (p.post_build)(conf)?; } println!("Finished"); println!(); Ok(()) } fn subst<P: AsRef<Path>, Q: AsRef<Path>>( in_path: P, out_path: Q, subs: &[(&str, &str)], ) -> Result<(), DynError> { let mut in_file = BufReader::new(File::open(in_path)?); let mut out_file = BufWriter::new(File::create(out_path)?); let mut buf = String::new(); while in_file.read_line(&mut buf).unwrap() != 0 { for (token, value) in subs { buf = buf.replace(token, value); } write!(out_file, "{}", buf)?; buf.clear(); } Ok(()) } macro_rules! print_env { ( $x:expr) => {{ println!( stringify!($x {}), env::var(stringify!($x)).unwrap_or_else(|e| format!("{}", e)) ); }}; } fn debug(_conf: &mut Config) -> Result<(), DynError> { print_env!(CARGO); print_env!(CARGO_MANIFEST_DIR); print_env!(CARGO_PKG_VERSION); print_env!(CARGO_PKG_VERSION_MAJOR); print_env!(CARGO_PKG_VERSION_MINOR); print_env!(CARGO_PKG_VERSION_PATCH); print_env!(CARGO_PKG_VERSION_PRE); print_env!(CARGO_PKG_AUTHORS); print_env!(CARGO_PKG_NAME); print_env!(CARGO_PKG_DESCRIPTION); print_env!(CARGO_PKG_HOMEPAGE); print_env!(CARGO_PKG_REPOSITORY); print_env!(OUT_DIR); print_env!(TARGET); print_env!(CARGO_CFG_TARGET_OS); Ok(()) } fn cargo(cmd: &str, args: &[String]) -> Result<(), DynError> { let cargo = env::var("CARGO").unwrap_or_else(|_| "cargo".to_string()); let status = Command::new(cargo) .current_dir(workspace_root()) .arg(cmd) .args(args) .status()?; if !status.success() { Err(format!("cargo {} failed", cmd))?; } Ok(()) } fn workspace_root() -> PathBuf { Path::new(&env!("CARGO_MANIFEST_DIR")) .ancestors() .nth(1) .unwrap() .to_path_buf() }
if self.target.contains("windows") { "" } else if cfg!(target_vendor = "apple") { "lib" } else if cfg!(target_os = "windows") { "" } else { "lib" }
if_condition
[ { "content": "#[derive(PortCollection)]\n\nstruct Ports {\n\n trigger_task: InputPort<Control>,\n\n}\n\n\n\n/// Requested features\n", "file_path": "eg-worker-rs/src/lib.rs", "rank": 9, "score": 48153.91537814452 }, { "content": "#[derive(PortCollection)]\n\nstruct Ports {\n\n _param1:...
Rust
src/view/mod.rs
matthias-t/Smith
e89ded89a4ce2147ca3c8be6ed065a8f1a808fff
mod screen; use self::screen::Screen; use data::{Editable, Named, Selectable}; use std::{cmp, iter}; use termion::{color, style, terminal_size}; pub struct View { message: Option<String>, is_prompt: bool, line_offset: usize, screen: Screen, } const TAB_LENGTH: usize = 4; impl View { pub fn new() -> Self { View { message: None, is_prompt: false, line_offset: 0, screen: Screen::new(), } } pub fn message(&mut self, message: &str) { self.is_prompt = false; self.message = Some(String::from(message)); } pub fn prompt(&mut self, prompt: &str, message: &str) { self.is_prompt = true; let msg = String::from(prompt) + message; self.message = Some(msg); } pub fn quiet(&mut self) { self.is_prompt = false; self.message = None; } pub fn center_view(&mut self, line: usize) { self.line_offset = line .checked_sub(self.lines_height() as usize / 2) .unwrap_or(0); } pub fn adjust_view(&mut self, line: usize) { if line < self.line_offset { self.line_offset = line; } else if line + 1 >= self.line_offset + self.lines_height() { self.line_offset = 1 + line - self.lines_height(); } } pub fn scroll_view<T: Editable>(&mut self, offset: isize, content: &T) { self.line_offset = cmp::min( cmp::max((self.line_offset as isize) + offset, 0), (content.line_count() as isize) - 1, ) as usize; } pub fn render<T>(&mut self, content: &T) where T: Editable + Named + Selectable, { self.screen.clear(&color::Reset); self.paint_lines(content); self.paint_status(content); self.paint_message(); self.paint_cursor(content); self.screen.present(); } pub fn translate_coordinates<T>(&self, content: &T, x: u16, y: u16) -> (usize, usize) where T: Editable, { let line = cmp::min( (y as isize + self.line_offset as isize - 1) as usize, content.line_count() - 1, ); let visual_col = (cmp::max( 0, x as isize - self.line_number_width(content.line_count()) as isize - 2, )) as usize; let col = content .iter_line(line) .scan(0, |state, x| { *state += if x == '\t' { TAB_LENGTH } else { 1 }; Some(*state) }).take_while(|&x| x <= visual_col) .count(); (line, col) } fn paint_message(&self) { if let Some(ref message) = self.message { let y = self.lines_height() + 1; self.screen.draw(0, y, message); } } fn paint_cursor<T>(&mut self, content: &T) where T: Editable + Selectable, { if (content.line()) < self.line_offset || content.line() >= self.line_offset + self.lines_height() || content.col() >= self.lines_width(content.line_count()) || content.sel().is_some() { self.screen.hide_cursor(); return; } let (x, y) = if self.is_prompt { ( self.message.clone().unwrap().chars().count(), self.lines_height() + 1, ) } else { let (a, b) = self.cursor_pos(content); (a, b) }; self.screen.move_cursor(x, y); self.screen.show_cursor(); } fn paint_status<T>(&self, content: &T) where T: Editable + Named, { let line = content.line(); let column = content.col(); let line_count = content.line_count(); let advance = ((line + 1) as f64 / line_count as f64 * 100.0).floor(); let (screen_width, _) = terminal_size().unwrap(); let empty_line = (0..screen_width).map(|_| ' ').collect::<String>(); let y = self.lines_height(); let style = format!("{}{}", color::Fg(color::White), style::Invert); self.screen.draw_with_style(0, y, &empty_line, &style); self.screen.draw_with_style(0, y, content.name(), &style); let position_info = format!("{}% {}/{}: {}", advance, line + 1, line_count, column); let x = screen_width as usize - position_info.len(); self.screen.draw_with_style(x, y, &position_info, &style); } fn paint_lines<T>(&self, content: &T) where T: Editable + Selectable, { let line_offset = self.line_offset as usize; let lines_height = self.lines_height() as usize; let lines_width = self.lines_width(content.line_count()) as usize; let line_count = content.line_count(); let line_start = self.line_number_width(line_count) as usize + 1; for (y, line) in content .lines() .skip(line_offset) .take(cmp::min(lines_height, line_count)) .enumerate() { let line_index = line_offset + y; self.screen.draw_with_style( 0, y, &format!("{}", 1 + line_index), &format!("{}", color::Fg(color::White)), ); if line.len_chars() > 1 { let line_start_char_index = content.line_index_to_char_index(line_index); for (x, c) in line .chars() .flat_map(|c| { if c == '\t' { iter::repeat(' ').take(TAB_LENGTH) } else { iter::repeat(c).take(1) } }).enumerate() { let char_index = line_start_char_index + x; if x < lines_width { if content.in_sel(char_index) { self.screen.draw_with_style( x + line_start, y, &format!("{}", c), &format!("{}", style::Invert), ); } else { self.screen.draw(x + line_start, y, &format!("{}", c)); } } } } else if content.line_in_sel(line_offset + y) { self.screen .draw_with_style(line_start, y, " ", &format!("{}", style::Invert)); } } } fn cursor_pos<T: Editable>(&self, content: &T) -> (usize, usize) { let line = content.line(); let first_line = self.line_offset; let y = line - first_line as usize; let visual_col = content.col(); let column: usize = content .iter_line(line) .map(|x| if x == '\t' { TAB_LENGTH } else { 1 }) .take(visual_col) .sum(); ( (self.line_number_width(content.line_count()) as usize + 1 + column), y, ) } fn line_number_width(&self, line_count: usize) -> u16 { line_count.to_string().len() as u16 } fn status_height(&self) -> u16 { 2 } pub fn lines_height(&self) -> usize { let (_, screen_height) = terminal_size().unwrap(); let incompressible = self.status_height() as usize; cmp::max(screen_height as usize, incompressible) - incompressible } pub fn lines_width(&self, line_count: usize) -> usize { let (screen_width, _) = terminal_size().unwrap(); let incompressible = self.line_number_width(line_count) as usize + 1; cmp::max(screen_width as usize, incompressible) - incompressible } }
mod screen; use self::screen::Screen; use data::{Editable, Named, Selectable}; use std::{cmp, iter}; use termion::{color, style, terminal_size}; pub struct View { message: Option<String>, is_prompt: bool, line_offset: usize, screen: Screen, } const TAB_LENGTH: usize = 4; impl View { pub fn new() -> Self { View { message: None, is_prompt: false, line_offset: 0, screen: Screen::new(), } } pub fn message(&mut self, message: &str) { self.is_prompt = false; self.message = Some(String::from(message)); } pub fn prompt(&mut self, prompt: &str, message: &str) { self.is_prompt = true; let msg = String::from(prompt) + message; self.message = Some(msg); } pub fn quiet(&mut self) { self.is_prompt = false; self.message = None; } pub fn center_view(&mut self, line: usize) { self.line_offset = line .checked_sub(self.lines_height() as usize / 2) .unwrap_or(0); } pub fn adjust_view(&mut self, line: usize) { if line < self.line_offset { self.line_offset = line; } else if line + 1 >= self.line_offset + self.lines_height() { self.line_offset = 1 + line - self.lines_height(); } } pub fn scroll_view<T: Editable>(&mut self, offset: isize, content: &T) { self.line_offset = cmp::min( cmp::max((self.line_offset as isize) + offset, 0), (content.line_count() as isize) - 1, ) as usize; } pub fn render<T>(&mut self, content: &T) where T: Editable + Named + Selectable, { self.screen.clear(&color::Reset); self.paint_lines(content); self.paint_status(content); self.paint_message(); self.paint_cursor(content); self.screen.present(); } pub fn translate_coordinates<T>(&self, content: &T, x: u16, y: u16) -> (usize, usize) where T: Editable, { let line = cmp::min( (y as isize + self.line_offset as isize - 1) as usize, content.line_count() - 1, ); let visual_col = (cmp::max( 0, x as isize - self.line_number_width(content.line_count()) as isize - 2, )) as usize; let col = content .iter_line(line) .scan(0, |state, x| { *state += if x == '\t' { TAB_LENGTH } else { 1 }; Some(*state) }).take_while(|&x| x <= visual_col) .count(); (line, col) } fn paint_message(&self) { if let Some(ref message) = self.message { let y = self.lines_height() + 1; self.screen.draw(0, y, message); } } fn paint_cursor<T>(&mut self, content: &T) where T: Editable + Selectable, { if (content.line()) < self.line_offset || content.line() >= self.line_offset + self.lines_height() || content.col() >= self.lines_width(content.line_count()) || content.sel().is_some() { self.screen.hide_cursor(); return; } let (x, y) = if self.is_prompt { ( self.message.clone().unwrap().chars().count(), self.lines_height() + 1, ) } else { let (a, b) = self.cursor_pos(content); (a, b) }; self.screen.move_cursor(x, y); self.screen.show_cursor(); } fn paint_status<T>(&self, content: &T) where T: Editable + Named, { let line = content.line(); let column = content.col(); let line_count = content.line_count(); let advance = ((line + 1) as f64 / line_count as f64 * 100.0).floor(); let (screen_width, _) = terminal_size().unwrap(); let empty_line = (0..screen_width).map(|_| ' ').collect::<String>(); let y = self.lines_height(); let style = format!("{}{}", color::Fg(color::White), style::Invert); self.screen.draw_with_style(0, y, &empty_line, &style); self.screen.draw_with_style(0, y, content.name(), &style); let position_info = format!("{}% {}/{}: {}", advance, line + 1, line_count, column); let x = screen_width as usize - position_info.len(); self.screen.draw_with_style(x, y, &position_info, &style); }
fn cursor_pos<T: Editable>(&self, content: &T) -> (usize, usize) { let line = content.line(); let first_line = self.line_offset; let y = line - first_line as usize; let visual_col = content.col(); let column: usize = content .iter_line(line) .map(|x| if x == '\t' { TAB_LENGTH } else { 1 }) .take(visual_col) .sum(); ( (self.line_number_width(content.line_count()) as usize + 1 + column), y, ) } fn line_number_width(&self, line_count: usize) -> u16 { line_count.to_string().len() as u16 } fn status_height(&self) -> u16 { 2 } pub fn lines_height(&self) -> usize { let (_, screen_height) = terminal_size().unwrap(); let incompressible = self.status_height() as usize; cmp::max(screen_height as usize, incompressible) - incompressible } pub fn lines_width(&self, line_count: usize) -> usize { let (screen_width, _) = terminal_size().unwrap(); let incompressible = self.line_number_width(line_count) as usize + 1; cmp::max(screen_width as usize, incompressible) - incompressible } }
fn paint_lines<T>(&self, content: &T) where T: Editable + Selectable, { let line_offset = self.line_offset as usize; let lines_height = self.lines_height() as usize; let lines_width = self.lines_width(content.line_count()) as usize; let line_count = content.line_count(); let line_start = self.line_number_width(line_count) as usize + 1; for (y, line) in content .lines() .skip(line_offset) .take(cmp::min(lines_height, line_count)) .enumerate() { let line_index = line_offset + y; self.screen.draw_with_style( 0, y, &format!("{}", 1 + line_index), &format!("{}", color::Fg(color::White)), ); if line.len_chars() > 1 { let line_start_char_index = content.line_index_to_char_index(line_index); for (x, c) in line .chars() .flat_map(|c| { if c == '\t' { iter::repeat(' ').take(TAB_LENGTH) } else { iter::repeat(c).take(1) } }).enumerate() { let char_index = line_start_char_index + x; if x < lines_width { if content.in_sel(char_index) { self.screen.draw_with_style( x + line_start, y, &format!("{}", c), &format!("{}", style::Invert), ); } else { self.screen.draw(x + line_start, y, &format!("{}", c)); } } } } else if content.line_in_sel(line_offset + y) { self.screen .draw_with_style(line_start, y, " ", &format!("{}", style::Invert)); } } }
function_block-full_function
[]
Rust
liblz4stego/src/compressor.rs
m4tx/lz4stego
5e53272900a74c1c88b993f92087ae7e588130b8
use std::cmp::min; use log::debug; use xxhash_rust::xxh32::{xxh32, Xxh32}; use crate::constants::{ END_LITERAL_NUM, LZ4_MAGIC_NUMBER, MATCH_LENGTH_OFFSET, MAX_BLOCK_SIZE, MIN_COMPRESS_LENGTH, TOKEN_MAX_VAL, }; use crate::descriptors::{BdByte, BlockSize, FlgByte, Token}; use crate::numeral_coding; use crate::occurrence_map::OccurrenceMap; use byteorder::{WriteBytesExt, LE}; use std::collections::VecDeque; use std::io::Write; pub struct Compressor<'a, W: Write> { output_write: W, buffer: VecDeque<u8>, hash: Xxh32, hidden_data_encoder: numeral_coding::Decoder<'a>, prefer_hidden: bool, } impl<'a, W: Write> Compressor<'a, W> { pub fn new_with_hidden_data( writer: W, hidden_data: &'a [u8], prefer_hidden: bool, ) -> Result<Self, std::io::Error> { let mut compressor = Self { output_write: writer, buffer: VecDeque::new(), hash: Xxh32::new(0), hidden_data_encoder: numeral_coding::Decoder::new(hidden_data), prefer_hidden, }; compressor.init()?; Ok(compressor) } pub fn new(writer: W) -> Result<Self, std::io::Error> { let mut compressor = Self { output_write: writer, buffer: VecDeque::new(), hash: Xxh32::new(0), hidden_data_encoder: numeral_coding::Decoder::new(b""), prefer_hidden: false, }; compressor.init()?; Ok(compressor) } fn init(&mut self) -> Result<(), std::io::Error> { self.write_header()?; Ok(()) } fn get_available_bytes(&self) -> usize { self.hidden_data_encoder.get_available_bytes() } pub fn finish(mut self) -> Result<usize, std::io::Error> { if !self.buffer.is_empty() { self.output_block(true)?; } self.write_footer()?; self.output_write.flush()?; Ok(self.get_available_bytes()) } fn write_header(&mut self) -> Result<(), std::io::Error> { self.output_write.write_u32::<LE>(LZ4_MAGIC_NUMBER)?; let frame_descriptor = self.build_frame_descriptor(); self.output_write.write(&frame_descriptor)?; Ok(()) } fn build_frame_descriptor(&self) -> Vec<u8> { let mut output = Vec::new(); let mut flag = FlgByte(0); flag.set_version(1); flag.set_block_independent(true); flag.set_content_checksum_added(true); output.write_u8(flag.0).unwrap(); let mut bd = BdByte(0); bd.set_block_max_size(7); output.write_u8(bd.0).unwrap(); let hc = ((xxh32(&output, 0) >> 8) & 0xFF) as u8; output.write_u8(hc).unwrap(); output } fn write_footer(&mut self) -> Result<(), std::io::Error> { self.output_write.write_u32::<LE>(0)?; self.output_write.write_u32::<LE>(self.hash.digest())?; Ok(()) } fn output_block(&mut self, force_write: bool) -> Result<(), std::io::Error> { let mut data = self.buffer.make_contiguous(); let mut to_shrink = 0; while !data.is_empty() && (data.len() >= MAX_BLOCK_SIZE || force_write) { if data.len() < MIN_COMPRESS_LENGTH { output_uncompressed_block(&mut self.output_write, data)?; to_shrink += data.len(); break; } let block_size = min(data.len(), MAX_BLOCK_SIZE); output_compressed_block( &mut self.output_write, &data[..block_size], &mut self.hidden_data_encoder, self.prefer_hidden, )?; to_shrink += block_size; data = &mut data[block_size..]; } self.buffer.drain(..to_shrink); Ok(()) } } impl<'a, W: Write> Write for Compressor<'a, W> { fn write(&mut self, buf: &[u8]) -> Result<usize, std::io::Error> { self.buffer.extend(buf); self.hash.update(buf); if self.buffer.len() >= MAX_BLOCK_SIZE { self.output_block(false)?; } Ok(buf.len()) } fn flush(&mut self) -> Result<(), std::io::Error> { if !self.buffer.is_empty() { self.output_block(true)?; self.output_write.flush()?; } Ok(()) } } fn output_uncompressed_block<W: Write>( mut output_write: W, data: &[u8], ) -> Result<(), std::io::Error> { debug!("Outputting uncompressed block with length: {}", data.len()); let mut block_size = BlockSize(0); block_size.set_block_uncompressed(true); block_size.set_block_size(data.len() as u32); output_write.write_u32::<LE>(block_size.0)?; output_write.write(data)?; Ok(()) } fn output_compressed_block<W: Write>( mut output_write: W, data: &[u8], hidden_data_encoder: &mut numeral_coding::Decoder, prefer_hidden: bool, ) -> Result<(), std::io::Error> { let mut output = Vec::new(); output.write_u32::<LE>(0).unwrap(); let mut occur = OccurrenceMap::new(data, prefer_hidden); let mut literals = Vec::new(); let mut i = 0; while i < data.len() - END_LITERAL_NUM { let occurrences = occur.get_occurrences(i); if occurrences.len() > 0 { let chosen_index = hidden_data_encoder.decode_value(occurrences.len() as u16); let (index, match_length) = occurrences.choose_occurrence(chosen_index as usize); if match_length < 4 { literals.push(data[i]); i += 1; continue; } let offset = (i - index) as u16; output_sequence(&literals, offset, match_length as u32, &mut output); literals.clear(); occur.add_occurrences(i, match_length); i += match_length; } else { literals.push(data[i]); occur.add_occurrences(i, 1); i += 1; } } literals.extend_from_slice(&data[data.len() - END_LITERAL_NUM..]); output_sequence(&literals, 0, MATCH_LENGTH_OFFSET, &mut output); let mut block_size = BlockSize(0); block_size.set_block_uncompressed(false); let block_size_num = output.len() - 4; block_size.set_block_size(block_size_num as u32); output.splice(0..4, block_size.0.to_le_bytes()); debug!("Block size: {}, data size: {}", block_size_num, data.len()); if block_size_num <= MAX_BLOCK_SIZE { output_write.write(&output)?; } else { output_uncompressed_block(output_write, data)?; } Ok(()) } fn output_sequence(literals: &Vec<u8>, offset: u16, match_length: u32, output: &mut Vec<u8>) { debug!( "Outputting sequence: literals {:?}, offset={}, match_length={}", literals, offset, match_length ); let literals_len = literals.len() as u32; let match_length_saved = match_length - MATCH_LENGTH_OFFSET; let mut token = Token(0); token.set_literals_length(min(literals_len, TOKEN_MAX_VAL as u32) as u8); token.set_match_length(min(match_length_saved, TOKEN_MAX_VAL as u32) as u8); output.write_u8(token.0).unwrap(); output_lsic_int(literals_len, TOKEN_MAX_VAL, output); output.extend_from_slice(literals.as_slice()); if offset != 0 { output.write_u16::<LE>(offset).unwrap(); output_lsic_int(match_length_saved, TOKEN_MAX_VAL, output); } } fn output_lsic_int(val: u32, max_val: u8, output: &mut Vec<u8>) { if val < max_val as u32 { return; } let mut new_val = val - max_val as u32; while new_val > 255 { output.write_u8(255).unwrap(); new_val -= 255; } output.write_u8(new_val as u8).unwrap(); }
use std::cmp::min; use log::debug; use xxhash_rust::xxh32::{xxh32, Xxh32}; use crate::constants::{ END_LITERAL_NUM, LZ4_MAGIC_NUMBER, MATCH_LENGTH_OFFSET, MAX_BLOCK_SIZE, MIN_COMPRESS_LENGTH, TOKEN_MAX_VAL, }; use crate::descriptors::{BdByte, BlockSize, FlgByte, Token}; use crate::numeral_coding; use crate::occurrence_map::OccurrenceMap; use byteorder::{WriteBytesExt, LE}; use std::collections::VecDeque; use std::io::Write; pub struct Compressor<'a, W: Write> { output_write: W, buffer: VecDeque<u8>, hash: Xxh32, hidden_data_encoder: numeral_coding::Decoder<'a>, prefer_hidden: bool, } impl<'a, W: Write> Compressor<'a, W> { pub fn new_with_hidden_data( writer: W, hidden_data: &'a [u8], prefer_hidden: bool, ) ->
pub fn new(writer: W) -> Result<Self, std::io::Error> { let mut compressor = Self { output_write: writer, buffer: VecDeque::new(), hash: Xxh32::new(0), hidden_data_encoder: numeral_coding::Decoder::new(b""), prefer_hidden: false, }; compressor.init()?; Ok(compressor) } fn init(&mut self) -> Result<(), std::io::Error> { self.write_header()?; Ok(()) } fn get_available_bytes(&self) -> usize { self.hidden_data_encoder.get_available_bytes() } pub fn finish(mut self) -> Result<usize, std::io::Error> { if !self.buffer.is_empty() { self.output_block(true)?; } self.write_footer()?; self.output_write.flush()?; Ok(self.get_available_bytes()) } fn write_header(&mut self) -> Result<(), std::io::Error> { self.output_write.write_u32::<LE>(LZ4_MAGIC_NUMBER)?; let frame_descriptor = self.build_frame_descriptor(); self.output_write.write(&frame_descriptor)?; Ok(()) } fn build_frame_descriptor(&self) -> Vec<u8> { let mut output = Vec::new(); let mut flag = FlgByte(0); flag.set_version(1); flag.set_block_independent(true); flag.set_content_checksum_added(true); output.write_u8(flag.0).unwrap(); let mut bd = BdByte(0); bd.set_block_max_size(7); output.write_u8(bd.0).unwrap(); let hc = ((xxh32(&output, 0) >> 8) & 0xFF) as u8; output.write_u8(hc).unwrap(); output } fn write_footer(&mut self) -> Result<(), std::io::Error> { self.output_write.write_u32::<LE>(0)?; self.output_write.write_u32::<LE>(self.hash.digest())?; Ok(()) } fn output_block(&mut self, force_write: bool) -> Result<(), std::io::Error> { let mut data = self.buffer.make_contiguous(); let mut to_shrink = 0; while !data.is_empty() && (data.len() >= MAX_BLOCK_SIZE || force_write) { if data.len() < MIN_COMPRESS_LENGTH { output_uncompressed_block(&mut self.output_write, data)?; to_shrink += data.len(); break; } let block_size = min(data.len(), MAX_BLOCK_SIZE); output_compressed_block( &mut self.output_write, &data[..block_size], &mut self.hidden_data_encoder, self.prefer_hidden, )?; to_shrink += block_size; data = &mut data[block_size..]; } self.buffer.drain(..to_shrink); Ok(()) } } impl<'a, W: Write> Write for Compressor<'a, W> { fn write(&mut self, buf: &[u8]) -> Result<usize, std::io::Error> { self.buffer.extend(buf); self.hash.update(buf); if self.buffer.len() >= MAX_BLOCK_SIZE { self.output_block(false)?; } Ok(buf.len()) } fn flush(&mut self) -> Result<(), std::io::Error> { if !self.buffer.is_empty() { self.output_block(true)?; self.output_write.flush()?; } Ok(()) } } fn output_uncompressed_block<W: Write>( mut output_write: W, data: &[u8], ) -> Result<(), std::io::Error> { debug!("Outputting uncompressed block with length: {}", data.len()); let mut block_size = BlockSize(0); block_size.set_block_uncompressed(true); block_size.set_block_size(data.len() as u32); output_write.write_u32::<LE>(block_size.0)?; output_write.write(data)?; Ok(()) } fn output_compressed_block<W: Write>( mut output_write: W, data: &[u8], hidden_data_encoder: &mut numeral_coding::Decoder, prefer_hidden: bool, ) -> Result<(), std::io::Error> { let mut output = Vec::new(); output.write_u32::<LE>(0).unwrap(); let mut occur = OccurrenceMap::new(data, prefer_hidden); let mut literals = Vec::new(); let mut i = 0; while i < data.len() - END_LITERAL_NUM { let occurrences = occur.get_occurrences(i); if occurrences.len() > 0 { let chosen_index = hidden_data_encoder.decode_value(occurrences.len() as u16); let (index, match_length) = occurrences.choose_occurrence(chosen_index as usize); if match_length < 4 { literals.push(data[i]); i += 1; continue; } let offset = (i - index) as u16; output_sequence(&literals, offset, match_length as u32, &mut output); literals.clear(); occur.add_occurrences(i, match_length); i += match_length; } else { literals.push(data[i]); occur.add_occurrences(i, 1); i += 1; } } literals.extend_from_slice(&data[data.len() - END_LITERAL_NUM..]); output_sequence(&literals, 0, MATCH_LENGTH_OFFSET, &mut output); let mut block_size = BlockSize(0); block_size.set_block_uncompressed(false); let block_size_num = output.len() - 4; block_size.set_block_size(block_size_num as u32); output.splice(0..4, block_size.0.to_le_bytes()); debug!("Block size: {}, data size: {}", block_size_num, data.len()); if block_size_num <= MAX_BLOCK_SIZE { output_write.write(&output)?; } else { output_uncompressed_block(output_write, data)?; } Ok(()) } fn output_sequence(literals: &Vec<u8>, offset: u16, match_length: u32, output: &mut Vec<u8>) { debug!( "Outputting sequence: literals {:?}, offset={}, match_length={}", literals, offset, match_length ); let literals_len = literals.len() as u32; let match_length_saved = match_length - MATCH_LENGTH_OFFSET; let mut token = Token(0); token.set_literals_length(min(literals_len, TOKEN_MAX_VAL as u32) as u8); token.set_match_length(min(match_length_saved, TOKEN_MAX_VAL as u32) as u8); output.write_u8(token.0).unwrap(); output_lsic_int(literals_len, TOKEN_MAX_VAL, output); output.extend_from_slice(literals.as_slice()); if offset != 0 { output.write_u16::<LE>(offset).unwrap(); output_lsic_int(match_length_saved, TOKEN_MAX_VAL, output); } } fn output_lsic_int(val: u32, max_val: u8, output: &mut Vec<u8>) { if val < max_val as u32 { return; } let mut new_val = val - max_val as u32; while new_val > 255 { output.write_u8(255).unwrap(); new_val -= 255; } output.write_u8(new_val as u8).unwrap(); }
Result<Self, std::io::Error> { let mut compressor = Self { output_write: writer, buffer: VecDeque::new(), hash: Xxh32::new(0), hidden_data_encoder: numeral_coding::Decoder::new(hidden_data), prefer_hidden, }; compressor.init()?; Ok(compressor) }
function_block-function_prefixed
[ { "content": "fn compress(data: &[u8]) -> Vec<u8> {\n\n let mut output = Vec::new();\n\n let mut compressor = Compressor::new(&mut output).unwrap();\n\n compressor.write(data).unwrap();\n\n compressor.finish().unwrap();\n\n\n\n output\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/compresso...
Rust
qlib/buddyallocator.rs
CentaurusInfra/Quark
1079b36efa7e537f8fec39f037ee5ccc71977e7d
use super::mutex::*; use alloc::slice; use alloc::vec::Vec; use core::ops::Deref; use super::addr::*; use super::common::*; pub fn ZeroPage(pageStart: u64) { unsafe { let arr = slice::from_raw_parts_mut(pageStart as *mut u64, 512); for i in 0..512 { arr[i] = 0 } } } #[derive(PartialEq, Copy, Clone, Default)] pub struct MemAllocatorInternal { ba: BuddyAllocator, baseAddr: u64, } impl MemAllocatorInternal { pub fn New() -> Self { return Self { ba: BuddyAllocator::New(0, 0), baseAddr: 0, }; } pub fn Init(baseAddr: u64, ord: u64) -> Self { let mut ba = BuddyAllocator::New(ord, baseAddr); let baSize = 1 << (ord + 1); let mut baPages = baSize >> PAGE_SHIFT; if (baSize & PAGE_MASK) != 0 { baPages += 1; } let addr = ba.allocate(baPages) as u64; assert_eq!(addr, 0); return Self { ba, baseAddr }; } pub fn Load(&mut self, baseAddr: u64, ord: u64) { self.ba.Load(ord, baseAddr); self.baseAddr = baseAddr; } pub fn Alloc(&mut self, pages: u64) -> Result<u64> { let pageOff = self.ba.allocate(pages); if pageOff == -1 { info!("buddyalloc ..."); Err(Error::NoEnoughMemory) } else { Ok(self.baseAddr + ((pageOff as u64) << PAGE_SHIFT)) } } pub fn Free(&mut self, addr: u64, pages: u64) -> Result<()> { let pageOff = (addr - self.baseAddr) as u64 >> PAGE_SHIFT; let ret = self.ba.free(pageOff, pages); if ret { Ok(()) } else { Err(Error::InvalidInput) } } } pub struct MemAllocator(QMutex<MemAllocatorInternal>); impl Deref for MemAllocator { type Target = QMutex<MemAllocatorInternal>; fn deref(&self) -> &QMutex<MemAllocatorInternal> { &self.0 } } impl RefMgr for MemAllocator { fn Ref(&self, _addr: u64) -> Result<u64> { return Ok(1); } fn Deref(&self, addr: u64) -> Result<u64> { self.FreePage(addr).unwrap(); Ok(0) } fn GetRef(&self, _addr: u64) -> Result<u64> { Ok(1) } } impl Allocator for MemAllocator { fn AllocPage(&self, _incrRef: bool) -> Result<u64> { let res = self.lock().Alloc(1); return res; } fn FreePage(&self, addr: u64) -> Result<()> { ZeroPage(addr); return self.lock().Free(addr, 1); } } impl MemAllocator { pub fn New() -> Self { return Self(QMutex::new(MemAllocatorInternal::New())); } pub fn Init(baseAddr: u64, ord: u64) -> Self { return Self(QMutex::new(MemAllocatorInternal::Init(baseAddr, ord))); } pub fn Load(&self, baseAddr: u64, ord: u64) { self.lock().Load(baseAddr, ord); } pub fn Alloc(&self, pages: u64) -> Result<u64> { return self.lock().Alloc(pages); } pub fn Free(&self, addr: u64, pages: u64) -> Result<()> { return self.lock().Free(addr, pages); } } #[repr(u8)] #[derive(PartialEq, Copy, Clone, Debug)] enum Node { Unused = 0, Used, Split, Full, } #[derive(PartialEq, Copy, Clone, Default)] pub struct BuddyAllocator { levels: u64, size: u64, root: u64, } impl BuddyAllocator { pub fn New(levels: u64, addr: u64) -> BuddyAllocator { let size: u64 = (1 << (levels + 1)) - 1; return BuddyAllocator { levels: levels, size: size, root: addr, }; } pub fn Load(&mut self, levels: u64, addr: u64) { let size: u64 = (1 << (levels + 1)) - 1; self.levels = levels; self.size = size; self.root = addr; } fn tree(&self) -> &mut [Node] { unsafe { slice::from_raw_parts_mut(self.root as *mut Node, self.size as usize) } } fn alloc(&mut self, idx: u64, t_level: u64, c_level: u64) -> isize { if c_level == t_level { if self.tree()[idx as usize] == Node::Unused { self.tree()[idx as usize] = Node::Used; let current_level_offset = (1 << self.levels - c_level) - 1; return (idx - current_level_offset) as isize * (1 << c_level); } else { return -1; } } let left_child = idx * 2 + 1; let right_child = idx * 2 + 2; match self.tree()[idx as usize] { Node::Used | Node::Full => return -1, Node::Unused => { self.tree()[idx as usize] = Node::Split; return self.alloc(left_child, t_level, c_level - 1); } Node::Split => { let mut res = self.alloc(left_child, t_level, c_level - 1); if res == -1 { res = self.alloc(right_child, t_level, c_level - 1); } self.CheckParentFull(idx); return res; } } } fn alloc1(&mut self, t_level: u64) -> isize { let mut stack: Vec<(u32, u32)> = Vec::with_capacity(self.levels as usize + 1); stack.push((0, self.levels as u32)); while stack.len() > 0 { let (idx, c_level) = stack.pop().unwrap(); if c_level as u64 == t_level { if self.tree()[idx as usize] == Node::Unused { self.tree()[idx as usize] = Node::Used; let current_level_offset = (1 << self.levels - c_level as u64) - 1; if idx != 0 { let mut parent = (idx + 1) / 2 - 1; 'inner: loop { self.CheckParentFull(parent as u64); if parent == 0 { break 'inner; } parent = (parent + 1) / 2 - 1; } } return (idx - current_level_offset) as isize * (1 << c_level); } else { continue; } } let left_child = idx * 2 + 1; let right_child = idx * 2 + 2; match self.tree()[idx as usize] { Node::Used | Node::Full => continue, Node::Unused => { self.tree()[idx as usize] = Node::Split; stack.push((left_child, c_level - 1)); } Node::Split => { stack.push((right_child, c_level - 1)); stack.push((left_child, c_level - 1)); } } } return -1; } const STACK_LEN: usize = 28; fn alloc2(&mut self, t_level: u64) -> isize { let mut stack: [(u32, u32); Self::STACK_LEN] = [(0, 0); Self::STACK_LEN]; let mut top = 0; stack[top] = (0, self.levels as u32); top += 1; while top > 0 { let (idx, c_level) = stack[top - 1]; top -= 1; if c_level as u64 == t_level { if self.tree()[idx as usize] == Node::Unused { self.tree()[idx as usize] = Node::Used; let current_level_offset = (1 << self.levels - c_level as u64) - 1; if idx != 0 { let mut parent = (idx + 1) / 2 - 1; 'inner: loop { self.CheckParentFull(parent as u64); if parent == 0 { break 'inner; } parent = (parent + 1) / 2 - 1; } } return (idx - current_level_offset) as isize * (1 << c_level); } else { continue; } } let left_child = idx * 2 + 1; let right_child = idx * 2 + 2; match self.tree()[idx as usize] { Node::Used | Node::Full => continue, Node::Unused => { self.tree()[idx as usize] = Node::Split; stack[top] = (left_child, c_level - 1); top += 1; } Node::Split => { stack[top] = (right_child, c_level - 1); top += 1; stack[top] = (left_child, c_level - 1); top += 1; } } } return -1; } pub fn CheckParentFull(&mut self, idx: u64) { let mut idx = idx; while idx != 0 { let left_child = idx * 2 + 1; let right_child = idx * 2 + 2; let left_child_used_or_full = self.tree()[left_child as usize] == Node::Full || self.tree()[left_child as usize] == Node::Used; let right_child_used_or_full = self.tree()[right_child as usize] == Node::Full || self.tree()[right_child as usize] == Node::Used; if left_child_used_or_full && right_child_used_or_full { self.tree()[idx as usize] = Node::Full; } idx = (idx + 1) / 2 - 1; } } pub fn allocate(&mut self, num_pages: u64) -> isize { let requested_level = self.get_level_from_num_pages(num_pages); if requested_level > self.levels { return -1; } return self.alloc2(requested_level); } pub fn free(&mut self, page_offset: u64, num_pages: u64) -> bool { if self.root <= page_offset && page_offset <= self.root + self.size { return false; } let requested_level = self.get_level_from_num_pages(num_pages); let level_offset = page_offset / (1 << requested_level); let current_level_offset = (1 << self.levels - requested_level) - 1; let mut idx = current_level_offset + level_offset; if idx as usize > self.tree().len() - 1 { panic!("offset {} is > length of tree() {}", idx, self.tree().len()); } if self.tree()[idx as usize] != Node::Used { return false; } self.tree()[idx as usize] = Node::Unused; while idx != 0 { let parent = (idx + 1) / 2 - 1; let left_child = parent * 2 + 1; let right_child = parent * 2 + 2; if self.tree()[left_child as usize] == Node::Unused && self.tree()[right_child as usize] == Node::Unused { self.tree()[parent as usize] = Node::Unused; } else { self.tree()[parent as usize] = Node::Split; } idx = parent } return true; } fn get_level_from_num_pages(&self, num_pages: u64) -> u64 { let requested_pages; if num_pages == 0 { requested_pages = 1; } else { requested_pages = num_pages.next_power_of_two(); } let requested_level = self.log_base_2(requested_pages); requested_level } fn log_base_2(&self, requested_pages: u64) -> u64 { let mut exp = 0; let mut find_msb_bit = requested_pages; find_msb_bit >>= 1; while find_msb_bit > 0 { find_msb_bit >>= 1; exp += 1; } return exp; } /*pub fn dump(&self) -> String { let mut out = "".to_string(); let mut row = "".to_string(); let mut level = 0; let mut index = 0; loop { if index == self.tree().len() { break } match self.tree()[index] { Node::Used => row += "U", Node::Unused => row += "O", Node::Split => row += "S", Node::Full => row += "F", } if row.len() == 1 << level { out += &(row + "\n"); row = "".to_string(); level += 1; } index += 1; } return out; }*/ } #[cfg(test)] mod tests { use super::*; #[test] fn test_alloc() { let mem = [0 as u8; 15]; let mut alloc = buddyallocator::New(3, &mem[0] as *const _ as u64); assert_eq!(alloc.allocate(9), -1); let offset1 = alloc.allocate(1); assert_eq!(offset1, 0); let offset2 = alloc.allocate(3); assert_eq!(offset2, 4); alloc.free(offset2 as u64, 3); alloc.free(offset1 as u64, 1); let offset3 = alloc.allocate(8); assert_eq!(offset3, 0); alloc.free(offset3 as u64, 8); let offset4 = alloc.allocate(9); assert_eq!(offset4, -1); } #[test] fn test_alloc1() { let mut alloc = buddyallocator::New(0, 0); assert_eq!(alloc.allocate(9), -1); } }
use super::mutex::*; use alloc::slice; use alloc::vec::Vec; use core::ops::Deref; use super::addr::*; use super::common::*; pub fn ZeroPage(pageStart: u64) { unsafe { let arr = slice::from_raw_parts_mut(pageStart as *mut u64, 512); for i in 0..512 { arr[i] = 0 } } } #[derive(PartialEq, Copy, Clone, Default)] pub struct MemAllocatorInternal { ba: BuddyAllocator, baseAddr: u64, } impl MemAllocatorInternal { pub fn New() -> Self { return Self { ba: BuddyAllocator::New(0, 0), baseAddr: 0, }; } pub fn Init(baseAddr: u64, ord: u64) -> Self { let mut ba = BuddyAllocator::New(ord, baseAddr); let baSize = 1 << (ord + 1); let mut baPages = baSize >> PAGE_SHIFT; if (baSize & PAGE_MASK) != 0 { baPages += 1; } let addr = ba.allocate(baPages) as u64; assert_eq!(addr, 0); return Self { ba, baseAddr }; } pub fn Load(&mut self, baseAddr: u64, ord: u64) { self.ba.Load(ord, baseAddr); self.baseAddr = baseAddr; } pub fn Alloc(&mut self, pages: u64) -> Result<u64> { let pageOff = self.ba.allocate(pages); if pageOff == -1 { info!("buddyalloc ..."); Err(Error::NoEnoughMemory) } else { Ok(self.baseAddr + ((pageOff as u64) << PAGE_SHIFT)) } }
} pub struct MemAllocator(QMutex<MemAllocatorInternal>); impl Deref for MemAllocator { type Target = QMutex<MemAllocatorInternal>; fn deref(&self) -> &QMutex<MemAllocatorInternal> { &self.0 } } impl RefMgr for MemAllocator { fn Ref(&self, _addr: u64) -> Result<u64> { return Ok(1); } fn Deref(&self, addr: u64) -> Result<u64> { self.FreePage(addr).unwrap(); Ok(0) } fn GetRef(&self, _addr: u64) -> Result<u64> { Ok(1) } } impl Allocator for MemAllocator { fn AllocPage(&self, _incrRef: bool) -> Result<u64> { let res = self.lock().Alloc(1); return res; } fn FreePage(&self, addr: u64) -> Result<()> { ZeroPage(addr); return self.lock().Free(addr, 1); } } impl MemAllocator { pub fn New() -> Self { return Self(QMutex::new(MemAllocatorInternal::New())); } pub fn Init(baseAddr: u64, ord: u64) -> Self { return Self(QMutex::new(MemAllocatorInternal::Init(baseAddr, ord))); } pub fn Load(&self, baseAddr: u64, ord: u64) { self.lock().Load(baseAddr, ord); } pub fn Alloc(&self, pages: u64) -> Result<u64> { return self.lock().Alloc(pages); } pub fn Free(&self, addr: u64, pages: u64) -> Result<()> { return self.lock().Free(addr, pages); } } #[repr(u8)] #[derive(PartialEq, Copy, Clone, Debug)] enum Node { Unused = 0, Used, Split, Full, } #[derive(PartialEq, Copy, Clone, Default)] pub struct BuddyAllocator { levels: u64, size: u64, root: u64, } impl BuddyAllocator { pub fn New(levels: u64, addr: u64) -> BuddyAllocator { let size: u64 = (1 << (levels + 1)) - 1; return BuddyAllocator { levels: levels, size: size, root: addr, }; } pub fn Load(&mut self, levels: u64, addr: u64) { let size: u64 = (1 << (levels + 1)) - 1; self.levels = levels; self.size = size; self.root = addr; } fn tree(&self) -> &mut [Node] { unsafe { slice::from_raw_parts_mut(self.root as *mut Node, self.size as usize) } } fn alloc(&mut self, idx: u64, t_level: u64, c_level: u64) -> isize { if c_level == t_level { if self.tree()[idx as usize] == Node::Unused { self.tree()[idx as usize] = Node::Used; let current_level_offset = (1 << self.levels - c_level) - 1; return (idx - current_level_offset) as isize * (1 << c_level); } else { return -1; } } let left_child = idx * 2 + 1; let right_child = idx * 2 + 2; match self.tree()[idx as usize] { Node::Used | Node::Full => return -1, Node::Unused => { self.tree()[idx as usize] = Node::Split; return self.alloc(left_child, t_level, c_level - 1); } Node::Split => { let mut res = self.alloc(left_child, t_level, c_level - 1); if res == -1 { res = self.alloc(right_child, t_level, c_level - 1); } self.CheckParentFull(idx); return res; } } } fn alloc1(&mut self, t_level: u64) -> isize { let mut stack: Vec<(u32, u32)> = Vec::with_capacity(self.levels as usize + 1); stack.push((0, self.levels as u32)); while stack.len() > 0 { let (idx, c_level) = stack.pop().unwrap(); if c_level as u64 == t_level { if self.tree()[idx as usize] == Node::Unused { self.tree()[idx as usize] = Node::Used; let current_level_offset = (1 << self.levels - c_level as u64) - 1; if idx != 0 { let mut parent = (idx + 1) / 2 - 1; 'inner: loop { self.CheckParentFull(parent as u64); if parent == 0 { break 'inner; } parent = (parent + 1) / 2 - 1; } } return (idx - current_level_offset) as isize * (1 << c_level); } else { continue; } } let left_child = idx * 2 + 1; let right_child = idx * 2 + 2; match self.tree()[idx as usize] { Node::Used | Node::Full => continue, Node::Unused => { self.tree()[idx as usize] = Node::Split; stack.push((left_child, c_level - 1)); } Node::Split => { stack.push((right_child, c_level - 1)); stack.push((left_child, c_level - 1)); } } } return -1; } const STACK_LEN: usize = 28; fn alloc2(&mut self, t_level: u64) -> isize { let mut stack: [(u32, u32); Self::STACK_LEN] = [(0, 0); Self::STACK_LEN]; let mut top = 0; stack[top] = (0, self.levels as u32); top += 1; while top > 0 { let (idx, c_level) = stack[top - 1]; top -= 1; if c_level as u64 == t_level { if self.tree()[idx as usize] == Node::Unused { self.tree()[idx as usize] = Node::Used; let current_level_offset = (1 << self.levels - c_level as u64) - 1; if idx != 0 { let mut parent = (idx + 1) / 2 - 1; 'inner: loop { self.CheckParentFull(parent as u64); if parent == 0 { break 'inner; } parent = (parent + 1) / 2 - 1; } } return (idx - current_level_offset) as isize * (1 << c_level); } else { continue; } } let left_child = idx * 2 + 1; let right_child = idx * 2 + 2; match self.tree()[idx as usize] { Node::Used | Node::Full => continue, Node::Unused => { self.tree()[idx as usize] = Node::Split; stack[top] = (left_child, c_level - 1); top += 1; } Node::Split => { stack[top] = (right_child, c_level - 1); top += 1; stack[top] = (left_child, c_level - 1); top += 1; } } } return -1; } pub fn CheckParentFull(&mut self, idx: u64) { let mut idx = idx; while idx != 0 { let left_child = idx * 2 + 1; let right_child = idx * 2 + 2; let left_child_used_or_full = self.tree()[left_child as usize] == Node::Full || self.tree()[left_child as usize] == Node::Used; let right_child_used_or_full = self.tree()[right_child as usize] == Node::Full || self.tree()[right_child as usize] == Node::Used; if left_child_used_or_full && right_child_used_or_full { self.tree()[idx as usize] = Node::Full; } idx = (idx + 1) / 2 - 1; } } pub fn allocate(&mut self, num_pages: u64) -> isize { let requested_level = self.get_level_from_num_pages(num_pages); if requested_level > self.levels { return -1; } return self.alloc2(requested_level); } pub fn free(&mut self, page_offset: u64, num_pages: u64) -> bool { if self.root <= page_offset && page_offset <= self.root + self.size { return false; } let requested_level = self.get_level_from_num_pages(num_pages); let level_offset = page_offset / (1 << requested_level); let current_level_offset = (1 << self.levels - requested_level) - 1; let mut idx = current_level_offset + level_offset; if idx as usize > self.tree().len() - 1 { panic!("offset {} is > length of tree() {}", idx, self.tree().len()); } if self.tree()[idx as usize] != Node::Used { return false; } self.tree()[idx as usize] = Node::Unused; while idx != 0 { let parent = (idx + 1) / 2 - 1; let left_child = parent * 2 + 1; let right_child = parent * 2 + 2; if self.tree()[left_child as usize] == Node::Unused && self.tree()[right_child as usize] == Node::Unused { self.tree()[parent as usize] = Node::Unused; } else { self.tree()[parent as usize] = Node::Split; } idx = parent } return true; } fn get_level_from_num_pages(&self, num_pages: u64) -> u64 { let requested_pages; if num_pages == 0 { requested_pages = 1; } else { requested_pages = num_pages.next_power_of_two(); } let requested_level = self.log_base_2(requested_pages); requested_level } fn log_base_2(&self, requested_pages: u64) -> u64 { let mut exp = 0; let mut find_msb_bit = requested_pages; find_msb_bit >>= 1; while find_msb_bit > 0 { find_msb_bit >>= 1; exp += 1; } return exp; } /*pub fn dump(&self) -> String { let mut out = "".to_string(); let mut row = "".to_string(); let mut level = 0; let mut index = 0; loop { if index == self.tree().len() { break } match self.tree()[index] { Node::Used => row += "U", Node::Unused => row += "O", Node::Split => row += "S", Node::Full => row += "F", } if row.len() == 1 << level { out += &(row + "\n"); row = "".to_string(); level += 1; } index += 1; } return out; }*/ } #[cfg(test)] mod tests { use super::*; #[test] fn test_alloc() { let mem = [0 as u8; 15]; let mut alloc = buddyallocator::New(3, &mem[0] as *const _ as u64); assert_eq!(alloc.allocate(9), -1); let offset1 = alloc.allocate(1); assert_eq!(offset1, 0); let offset2 = alloc.allocate(3); assert_eq!(offset2, 4); alloc.free(offset2 as u64, 3); alloc.free(offset1 as u64, 1); let offset3 = alloc.allocate(8); assert_eq!(offset3, 0); alloc.free(offset3 as u64, 8); let offset4 = alloc.allocate(9); assert_eq!(offset4, -1); } #[test] fn test_alloc1() { let mut alloc = buddyallocator::New(0, 0); assert_eq!(alloc.allocate(9), -1); } }
pub fn Free(&mut self, addr: u64, pages: u64) -> Result<()> { let pageOff = (addr - self.baseAddr) as u64 >> PAGE_SHIFT; let ret = self.ba.free(pageOff, pages); if ret { Ok(()) } else { Err(Error::InvalidInput) } }
function_block-full_function
[ { "content": "#[inline(always)]\n\npub fn CmpExchg(addr: u64, old: u64, new: u64) -> u64 {\n\n let mut ret: u64;\n\n unsafe {\n\n llvm_asm!(\"\n\n lock cmpxchgq $2, ($3)\n\n \"\n\n : \"={rax}\"(ret)\n\n : \"{rax}\"(old), \"{rdx}\"(new), \"{rcx}\"(addr)\...
Rust
crates/plugins/physics-rapier/src/lib.rs
Hihaheho-Studios/desk
7f8ad48a3b9a5439e566d07aecab6185c2d95012
use core::DeskSystem; use bevy::prelude::*; use bevy_rapier2d::prelude::*; use physics::{shape::Shape, widget::WidgetId, DragState, Velocity}; pub struct PhysicsPlugin; const LINEAR_DAMPING: f32 = 8.0; impl Plugin for PhysicsPlugin { fn build(&self, app: &mut bevy::app::AppBuilder) { app.add_plugin(RapierPhysicsPlugin::<NoUserData>::default()) .add_plugin(RapierRenderPlugin) .add_startup_system(walls.system()) .insert_resource(RapierConfiguration { scale: 100.0, gravity: Vec2::ZERO.into(), ..Default::default() }) .add_system( add_physics_components .system() .after(DeskSystem::Shell) .before(DeskSystem::PrePhysics), ) .add_system_set( SystemSet::new() .label(DeskSystem::PrePhysics) .with_system(update_shape.system()) .with_system(update_velocity.system()) .with_system(update_drag_state.system()), ); } } fn walls(mut commands: Commands) { let mut camera = OrthographicCameraBundle::new_2d(); camera.transform.translation.x = 630.0; camera.transform.translation.y = 350.0; commands.spawn_bundle(LightBundle { light: Light { intensity: 100_000.0, range: 6000.0, ..Default::default() }, ..Default::default() }); commands.spawn_bundle(camera); commands .spawn_bundle(ColliderBundle { position: Vec2::new(0.0, 0.0).into(), shape: ColliderShape::cuboid(0.1, 9.0), ..Default::default() }) .insert(ColliderPositionSync::Discrete) .insert(ColliderDebugRender::default()); commands .spawn_bundle(ColliderBundle { position: Vec2::new(10.0, 0.0).into(), shape: ColliderShape::cuboid(0.1, 9.0), ..Default::default() }) .insert(ColliderPositionSync::Discrete) .insert(ColliderDebugRender::default()); commands .spawn_bundle(ColliderBundle { position: Vec2::new(0.0, 0.0).into(), shape: ColliderShape::cuboid(12.0, 0.1), ..Default::default() }) .insert(ColliderPositionSync::Discrete) .insert(ColliderDebugRender::default()); commands .spawn_bundle(ColliderBundle { position: Vec2::new(0.0, 7.0).into(), shape: ColliderShape::cuboid(12.0, 0.1), ..Default::default() }) .insert(ColliderPositionSync::Discrete) .insert(ColliderDebugRender::default()); } fn add_physics_components( rapier: Res<RapierConfiguration>, mut commands: Commands, query: Query<(Entity, &GlobalTransform), Added<WidgetId>>, ) { for (card, transform) in query.iter() { commands .entity(card) .insert_bundle(RigidBodyBundle { position: (transform.translation / rapier.scale).into(), mass_properties: RigidBodyMassPropsFlags::ROTATION_LOCKED.into(), damping: RigidBodyDamping { linear_damping: LINEAR_DAMPING, ..Default::default() }, ..Default::default() }) .insert(RigidBodyPositionSync::Discrete) .with_children(|build| { build.spawn_bundle(ColliderBundle { shape: ColliderShape::cuboid(0.1, 0.1), ..Default::default() }); }); } } fn update_shape( rapier: Res<RapierConfiguration>, shape: Query<(&Shape, Entity, &Children)>, mut collider: Query<(&mut ColliderShape, &mut ColliderParent)>, ) { for (shape, entity, children) in shape.iter() { std::iter::once(&entity) .to_owned() .chain(children.iter()) .for_each(|&entity| { if let Ok((mut collider_shape, mut parent)) = collider.get_mut(entity) { use Shape::*; match shape { Rect { width, height } => { let width = *width / rapier.scale / 2.0; let height = *height / rapier.scale / 2.0; *collider_shape = ColliderShape::cuboid(width, height); parent.pos_wrt_parent.translation = Vec2::new(width, -height).into(); } Blank => {} _ => todo!(), }; } }); } } fn update_velocity( rapier: Res<RapierConfiguration>, mut query: Query<(&mut RigidBodyVelocity, &Velocity), Changed<Velocity>>, ) { for (mut rapier_velocity, velocity) in query.iter_mut() { rapier_velocity.linvel.x = velocity.0.x / rapier.scale; rapier_velocity.linvel.y = velocity.0.y / rapier.scale; } } fn update_drag_state(mut query: Query<(&mut RigidBodyDamping, &DragState), Changed<DragState>>) { for (mut damping, drag_state) in query.iter_mut() { use DragState::*; match drag_state { Dragging => { damping.linear_damping = 0.0; } NotDragging => { damping.linear_damping = LINEAR_DAMPING; } } } }
use core::DeskSystem; use bevy::prelude::*; use bevy_rapier2d::prelude::*; use physics::{shape::Shape, widget::WidgetId, DragState, Velocity}; pub struct PhysicsPlugin; const LINEAR_DAMPING: f32 = 8.0; impl Plugin for PhysicsPlugin { fn build(&self, app: &mut bevy::app::AppBuilder) { app.add_plugin(RapierPhysicsPlugin::<NoUserData>::default()) .add_plugin(RapierRenderPlugin) .add_startup_system(walls.system()) .insert_resource(RapierConfiguration { scale: 100.0, gravity: Vec2::ZERO.into(), ..Default::default() }) .add_system( add_physics_components .system() .after(DeskSystem::Shell) .before(DeskSystem::PrePhysics), ) .add_system_set( SystemSet::new() .label(DeskSystem::PrePhysics) .with_system(update_shape.system()) .with_system(update_velocity.system()) .with_system(update_drag_state.system()), ); } } fn walls(mut commands: Commands) { let mut camera = OrthographicCameraBundle::new_2d(); camera.transform.translation.x = 630.0; camera.transform.translation.y = 350.0; commands.spawn_bundle(LightBundle { light: Light { intensity: 100_000.0, range: 6000.0, ..Default::default() }, ..Default::default() }); commands.spawn_bundle(camera); commands .spawn_bundle(ColliderBundle { position: Vec2::new(0.0, 0.0).into(), shape: ColliderShape::cuboid(0.1, 9.0), ..Default::default() }) .insert(ColliderPositionSync::Discrete) .insert(ColliderDebugRender::default()); commands .spawn_bundle(ColliderBundle { position: Vec2::new(10.0, 0.0).into(), shape: ColliderShape::cuboid(0.1, 9.0), ..Default::default() }) .insert(ColliderPositionSync::Discrete) .insert(ColliderDebugRender::default()); commands .spawn_bundle(ColliderBundle { position: Vec2::new(0.0, 0.0).into(), shape: ColliderShape::cuboid(12.0, 0.1), ..Default::default() }) .insert(ColliderPositionSync::Discrete) .insert(ColliderDebugRender::default()); commands .spawn_bundle(ColliderBundle { position: Vec2::new(0.0, 7.0).into(), shape: ColliderShape::cuboid(12.0, 0.1), ..Default::default() }) .insert(ColliderPositionSync::Discrete) .insert(ColliderDebugRender::default()); } fn add_physics_components( rapier: Res<RapierConfiguration>, mut commands: Commands, query: Query<(Entity, &GlobalTransform), Added<WidgetId>>, ) { for (card, transform) in query.iter() { commands .entity(card) .insert_bundle(RigidBodyBundle { position: (transform.translation / rapier.scale).into(), mass_properties: RigidBodyMassPropsFlags::ROTATION_LOCKED.into(), damping: RigidBodyDamping { linear_damping: LINEAR_DAMPING, ..Default::default() }, ..Default::default() }) .insert(RigidBodyPositionSync::Discrete) .with_children(|build| { build.spawn_bundle(ColliderBundle { shape: ColliderShape::cuboid(0.1, 0.1), ..Default::default() }); }); } } fn update_shape( rapier: Res<RapierConfiguration>, shape: Query<(&Shape, Entity, &Children)>, mut collider: Query<(&mut ColliderShape, &mut ColliderParent)>, ) { for (shape, entity, children) in shape.iter() { std::iter::once(&entity) .to_owned() .chain(children.iter()) .for_each(|&entity| { if let Ok((mut collider_shape, mut parent)) = collider.get_mut(entity) { use Shape::*; match shape { Rect { width, height } => { let width = *width / rapier.scale / 2.0; let height = *height / rapier.scale / 2.0; *collider_shape = ColliderShape::cuboid(width, height); parent.pos_wrt_parent.translation = Vec2::new(width, -height).into(); } Blank => {} _ => todo!(), }; } }); } } fn update_velocity( rapier: Res<RapierConfiguration>, mut query: Query<(&mut RigidBodyVelocity, &Velocity), Changed<Velocity>>, ) { for (mut rapier_velocity, velocity) in query.iter_mut() { rapier_velocity.linvel.x = velocity.0.x / rapier.scale; rapier_velocity.linvel.y = velocity.0.y / rapier.scale; } } fn update_drag_state(mut query: Query<(&mut RigidBodyDam
ping, &DragState), Changed<DragState>>) { for (mut damping, drag_state) in query.iter_mut() { use DragState::*; match drag_state { Dragging => { damping.linear_damping = 0.0; } NotDragging => { damping.linear_damping = LINEAR_DAMPING; } } } }
function_block-function_prefixed
[ { "content": "fn reset_velocity(mut query: Query<&mut Velocity>) {\n\n for mut velocity in query.iter_mut() {\n\n velocity.0 = Vec2::ZERO;\n\n }\n\n}\n\n\n", "file_path": "crates/plugins/shell/src/lib.rs", "rank": 0, "score": 207939.65898868084 }, { "content": "fn translate_posi...
Rust
crypto-msg-parser/src/exchanges/binance/binance_all.rs
CPT-Jack-A-Castle/crypto-crawler-rs
e7b8a2d51989e69779c69e3e7755351fe5fcb3bb
use crypto_market_type::MarketType; use crate::{FundingRateMsg, MessageType, Order, OrderBookMsg, TradeMsg, TradeSide}; use super::super::utils::calc_quantity_and_volume; use serde::{Deserialize, Serialize}; use serde_json::{Result, Value}; use std::collections::HashMap; const EXCHANGE_NAME: &str = "binance"; #[derive(Serialize, Deserialize)] #[allow(non_snake_case)] struct AggTradeMsg { e: String, E: i64, s: String, a: i64, p: String, q: String, f: i64, l: i64, T: i64, m: bool, #[serde(flatten)] extra: HashMap<String, Value>, } #[derive(Serialize, Deserialize)] #[allow(non_snake_case)] struct RawTradeMsg { e: String, E: i64, s: String, t: i64, p: String, q: String, b: i64, a: i64, T: i64, m: bool, #[serde(flatten)] extra: HashMap<String, Value>, } pub type RawOrder = [String; 2]; #[derive(Serialize, Deserialize)] #[allow(non_snake_case)] struct RawOrderbookMsg { e: String, E: i64, T: Option<i64>, s: String, U: i64, u: i64, b: Vec<RawOrder>, a: Vec<RawOrder>, #[serde(flatten)] extra: HashMap<String, Value>, } #[derive(Serialize, Deserialize)] struct WebsocketMsg<T: Sized> { stream: String, data: T, } pub(crate) fn parse_trade(market_type: MarketType, msg: &str) -> Result<Vec<TradeMsg>> { let obj = serde_json::from_str::<HashMap<String, Value>>(&msg)?; let data = obj.get("data").unwrap(); let event_type = data.get("e").unwrap().as_str().unwrap(); match event_type { "aggTrade" => { let agg_trade: AggTradeMsg = serde_json::from_value(data.clone()).unwrap(); let pair = crypto_pair::normalize_pair(&agg_trade.s, EXCHANGE_NAME).unwrap(); let price = agg_trade.p.parse::<f64>().unwrap(); let quantity = agg_trade.q.parse::<f64>().unwrap(); let (quantity_base, quantity_quote, quantity_contract) = calc_quantity_and_volume(EXCHANGE_NAME, market_type, &pair, price, quantity); let trade = TradeMsg { exchange: EXCHANGE_NAME.to_string(), market_type, symbol: agg_trade.s.clone(), pair, msg_type: MessageType::Trade, timestamp: agg_trade.T, price, quantity_base, quantity_quote, quantity_contract, side: if agg_trade.m { TradeSide::Sell } else { TradeSide::Buy }, trade_id: agg_trade.a.to_string(), raw: serde_json::from_str(msg)?, }; Ok(vec![trade]) } "trade" => { let raw_trade: RawTradeMsg = serde_json::from_value(data.clone()).unwrap(); let pair = crypto_pair::normalize_pair(&raw_trade.s, EXCHANGE_NAME).unwrap(); let price = raw_trade.p.parse::<f64>().unwrap(); let quantity = raw_trade.q.parse::<f64>().unwrap(); let (quantity_base, quantity_quote, quantity_contract) = calc_quantity_and_volume(EXCHANGE_NAME, market_type, &pair, price, quantity); let trade = TradeMsg { exchange: EXCHANGE_NAME.to_string(), market_type, symbol: raw_trade.s.clone(), pair, msg_type: MessageType::Trade, timestamp: raw_trade.T, price, quantity_base, quantity_quote, quantity_contract, side: if raw_trade.m { TradeSide::Sell } else { TradeSide::Buy }, trade_id: raw_trade.t.to_string(), raw: serde_json::from_str(msg)?, }; Ok(vec![trade]) } _ => panic!("Unsupported event type {}", event_type), } } pub(crate) fn parse_l2(market_type: MarketType, msg: &str) -> Result<Vec<OrderBookMsg>> { let ws_msg = serde_json::from_str::<WebsocketMsg<RawOrderbookMsg>>(&msg)?; let pair = crypto_pair::normalize_pair(&ws_msg.data.s, EXCHANGE_NAME).unwrap(); let parse_order = |raw_order: &RawOrder| -> Order { let price = raw_order[0].parse::<f64>().unwrap(); let (quantity_base, quantity_quote, quantity_contract) = calc_quantity_and_volume( EXCHANGE_NAME, market_type, &pair, price, raw_order[1].parse::<f64>().unwrap(), ); Order { price, quantity_base, quantity_quote, quantity_contract, } }; let orderbook = OrderBookMsg { exchange: EXCHANGE_NAME.to_string(), market_type, symbol: ws_msg.data.s.clone(), pair: pair.clone(), msg_type: MessageType::L2Event, timestamp: if market_type == MarketType::Spot { ws_msg.data.E } else { ws_msg.data.T.unwrap() }, asks: ws_msg .data .a .iter() .map(|raw_order| parse_order(raw_order)) .collect::<Vec<Order>>(), bids: ws_msg .data .b .iter() .map(|raw_order| parse_order(raw_order)) .collect::<Vec<Order>>(), snapshot: false, raw: serde_json::from_str(msg)?, }; Ok(vec![orderbook]) } #[derive(Serialize, Deserialize)] #[allow(non_snake_case)] struct RawFundingRateMsg { e: String, E: i64, s: String, p: String, i: Option<String>, P: String, r: String, T: i64, #[serde(flatten)] extra: HashMap<String, Value>, } pub(crate) fn parse_funding_rate( market_type: MarketType, msg: &str, ) -> Result<Vec<FundingRateMsg>> { let obj = serde_json::from_str::<HashMap<String, Value>>(&msg)?; let stream = obj.get("stream").unwrap().as_str().unwrap(); let data = if stream == "!markPrice@arr" { obj.get("data") .unwrap() .as_array() .unwrap() .iter() .map(|x| serde_json::from_value::<RawFundingRateMsg>(x.clone()).unwrap()) .collect() } else if stream.ends_with("@markPrice") { vec![serde_json::from_value::<RawFundingRateMsg>(obj.get("data").unwrap().clone()).unwrap()] } else { panic!("Unknown funding rate messaeg {}", msg); }; let funding_rates: Vec<FundingRateMsg> = data .into_iter() .filter(|x| !x.r.is_empty()) .map(|raw_msg| FundingRateMsg { exchange: EXCHANGE_NAME.to_string(), market_type, symbol: raw_msg.s.clone(), pair: crypto_pair::normalize_pair(&raw_msg.s, EXCHANGE_NAME).unwrap(), msg_type: MessageType::FundingRate, timestamp: raw_msg.E, funding_rate: raw_msg.r.parse::<f64>().unwrap(), funding_time: raw_msg.T, estimated_rate: None, raw: if stream == "!markPrice@arr" { serde_json::to_value(&raw_msg).unwrap() } else { serde_json::from_str(msg).unwrap() }, }) .collect(); Ok(funding_rates) }
use crypto_market_type::MarketType; use crate::{FundingRateMsg, MessageType, Order, OrderBookMsg, TradeMsg, TradeSide}; use super::super::utils::calc_quantity_and_volume; use serde::{Deserialize, Serialize}; use serde_json::{Result, Value}; use std::collections::HashMap; const EXCHANGE_NAME: &str = "binance"; #[derive(Serialize, Deserialize)] #[allow(non_snake_case)] struct AggTradeMsg { e: String, E: i64, s: String, a: i64, p: String, q: String, f: i64, l: i64, T: i64, m: bool, #[serde(flatten)] extra: HashMap<String, Value>, } #[derive(Serialize, Deserialize)] #[allow(non_snake_case)] struct RawTradeMsg { e: String, E: i64, s: String, t: i64, p: String, q: String, b: i64, a: i64, T: i64, m: bool, #[serde(flatten)] extra: HashMap<String, Value>, } pub type RawOrder = [String; 2]; #[derive(Serialize, Deserialize)] #[allow(non_snake_case)] struct RawOrderbookMsg { e: String, E: i64, T: Option<i64>, s: String, U: i64, u: i64, b: Vec<RawOrder>, a: Vec<RawOrder>, #[serde(flatten)] extra: HashMap<String, Value>, } #[derive(Serialize, Deserialize)] struct WebsocketMsg<T: Sized> { stream: String, data: T, } pub(crate) fn parse_trade(market_type: MarketType, msg: &str) -> Result<Vec<TradeMsg>> { let obj = serde_json::from_str::<HashMap<String, Value>>(&msg)?; let data = obj.get("data").unwrap(); let event_type = data.get("e").unwrap().as_str().unwrap(); match event_type { "aggTrade" => { let agg_trade: AggTradeMsg = serde_json::from_value(data.clone()).unwrap(); let pair = crypto_pair::normalize_pair(&agg_trade.s, EXCHANGE_NAME).unwrap(); let price = agg_trade.p.parse::<f64>().unwrap(); let quantity = agg_trade.q.parse::<f64>().unwrap(); let (quantity_base, quantity_quote, quantity_contract) = calc_quantity_and_volume(EXCHANGE_NAME, market_type, &pair, price, quantity); let trade = TradeMsg { exchange: EXCHANGE_NAME.to_string(), market_type, symbol: agg_trade.s.clone(), pair, msg_type: MessageType::Trade, timestamp: agg_trade.T, price, quantity_base, quantity_quote, quantity_contract, side: if agg_trade.m { TradeSide::Sell } else { TradeSide::Buy }, trade_id: agg_trade.a.to_string(), raw: serde_json::from_str(msg)?, }; Ok(vec![trade]) } "trade" => { let raw_trade: RawTradeMsg = serde_json::from_value(data.clone()).unwrap(); let pair = crypto_pair::normalize_pair(&raw_trade.s, EXCHANGE_NAME).unwrap(); let price = raw_trade.p.parse::<f64>().unwrap(); let quantity = raw_trade.q.parse::<f64>().unwrap(); let (quantity_base, quantity_quote, quantity_contract) = calc_quantity_and_volume(EXCHANGE_NAME, market_type, &pair, price, quantity); let trade = TradeMsg { exchange: EXCHANGE_NAME.to_string(), market_type, symbol: raw_trade.s.clone(), pair, msg_type: MessageType::Trade, timestamp: raw_trade.T, price, quantity_base, quantity_quote, quantity_contract, side: if raw_trade.m { TradeSide::Sell } else { TradeSide::Buy }, trade_id: raw_trade.t.to_string(), raw: serde_json::from_str(msg)?, }; Ok(vec![trade]) } _ => panic!("Unsupported event type {}", event_type), } } pub(crate) fn parse_l2(market_type: MarketType, msg: &str) -> Result<Vec<OrderBookMsg>> { let ws_msg = serde_json::from_str::<WebsocketMsg<RawOrderbookMsg>>(&msg)?; let pair = crypto_pair::normalize_pair(&ws_msg.data.s, EXCHANGE_NAME).unwrap(); let parse_order = |raw_order: &RawOrder| -> Order { let price = raw_order[0].parse::<f64>().unwrap(); let (quantity_base, quantity_quote, quantity_contract) = calc_quantity_and_volume( EXCHANGE_NAME, market_type, &pair, price, raw_order[1].parse::<f64>().unwrap(), ); Order { price, quantity_base, quantity_quote, quantity_contract, } }; let orderbook = OrderBookMsg { exchange: EXCHANGE_NAME.to_string(), market_type, symbol: ws_msg.data.s.clone(), pair: pair.clone(), msg_type: MessageType::L2Event, timestamp:
, asks: ws_msg .data .a .iter() .map(|raw_order| parse_order(raw_order)) .collect::<Vec<Order>>(), bids: ws_msg .data .b .iter() .map(|raw_order| parse_order(raw_order)) .collect::<Vec<Order>>(), snapshot: false, raw: serde_json::from_str(msg)?, }; Ok(vec![orderbook]) } #[derive(Serialize, Deserialize)] #[allow(non_snake_case)] struct RawFundingRateMsg { e: String, E: i64, s: String, p: String, i: Option<String>, P: String, r: String, T: i64, #[serde(flatten)] extra: HashMap<String, Value>, } pub(crate) fn parse_funding_rate( market_type: MarketType, msg: &str, ) -> Result<Vec<FundingRateMsg>> { let obj = serde_json::from_str::<HashMap<String, Value>>(&msg)?; let stream = obj.get("stream").unwrap().as_str().unwrap(); let data = if stream == "!markPrice@arr" { obj.get("data") .unwrap() .as_array() .unwrap() .iter() .map(|x| serde_json::from_value::<RawFundingRateMsg>(x.clone()).unwrap()) .collect() } else if stream.ends_with("@markPrice") { vec![serde_json::from_value::<RawFundingRateMsg>(obj.get("data").unwrap().clone()).unwrap()] } else { panic!("Unknown funding rate messaeg {}", msg); }; let funding_rates: Vec<FundingRateMsg> = data .into_iter() .filter(|x| !x.r.is_empty()) .map(|raw_msg| FundingRateMsg { exchange: EXCHANGE_NAME.to_string(), market_type, symbol: raw_msg.s.clone(), pair: crypto_pair::normalize_pair(&raw_msg.s, EXCHANGE_NAME).unwrap(), msg_type: MessageType::FundingRate, timestamp: raw_msg.E, funding_rate: raw_msg.r.parse::<f64>().unwrap(), funding_time: raw_msg.T, estimated_rate: None, raw: if stream == "!markPrice@arr" { serde_json::to_value(&raw_msg).unwrap() } else { serde_json::from_str(msg).unwrap() }, }) .collect(); Ok(funding_rates) }
if market_type == MarketType::Spot { ws_msg.data.E } else { ws_msg.data.T.unwrap() }
if_condition
[ { "content": "pub fn check_trade_fields(exchange: &str, market_type: MarketType, pair: String, trade: &TradeMsg) {\n\n assert_eq!(trade.exchange, exchange);\n\n assert_eq!(trade.market_type, market_type);\n\n assert_eq!(trade.pair, pair);\n\n assert_eq!(trade.msg_type, MessageType::Trade);\n\n as...
Rust
lapce-data/src/rich_text.rs
mirchandani-mohnish/lapce
d20ddbee3bd39c03aae6d59e7bd1c61eb3c45e9f
use std::{ ops::{Range, RangeBounds}, sync::Arc, }; use druid::{ piet::TextStorage as PietTextStorage, piet::{PietTextLayoutBuilder, TextLayoutBuilder}, text::{Attribute, AttributeSpans, Link}, text::{EnvUpdateCtx, TextStorage}, ArcStr, Color, Command, Data, Env, FontDescriptor, FontFamily, FontStyle, FontWeight, KeyOrValue, }; #[derive(Clone, Debug, Data)] pub struct RichText { buffer: ArcStr, attrs: Arc<AttributeSpans>, line_height: f64, } impl RichText { pub fn new(buffer: ArcStr) -> Self { RichText::new_with_attributes(buffer, Default::default()) } pub fn new_with_attributes(buffer: ArcStr, attributes: AttributeSpans) -> Self { RichText { buffer, attrs: Arc::new(attributes), line_height: 0.0, } } pub fn with_attribute( mut self, range: impl RangeBounds<usize>, attr: Attribute, ) -> Self { self.add_attribute(range, attr); self } pub fn len(&self) -> usize { self.buffer.len() } pub fn is_empty(&self) -> bool { self.buffer.is_empty() } pub fn add_attribute( &mut self, range: impl RangeBounds<usize>, attr: Attribute, ) { let range = druid::piet::util::resolve_range(range, self.buffer.len()); Arc::make_mut(&mut self.attrs).add(range, attr); } } impl PietTextStorage for RichText { fn as_str(&self) -> &str { self.buffer.as_str() } } impl TextStorage for RichText { fn add_attributes( &self, mut builder: PietTextLayoutBuilder, env: &Env, ) -> PietTextLayoutBuilder { for (range, attr) in self.attrs.to_piet_attrs(env) { builder = builder.range_attribute(range, attr); } if self.line_height > 0.0 { builder = builder.set_line_height(self.line_height); } builder } fn env_update(&self, _ctx: &EnvUpdateCtx) -> bool { false } fn links(&self) -> &[Link] { &[] } } #[derive(Default)] pub struct RichTextBuilder { buffer: String, attrs: AttributeSpans, links: Vec<Link>, line_height: f64, } impl RichTextBuilder { pub fn new() -> Self { Self::default() } pub fn push(&mut self, string: &str) -> AttributesAdder { let range = self.buffer.len()..(self.buffer.len() + string.len()); self.buffer.push_str(string); self.add_attributes_for_range(range) } pub fn set_line_height(&mut self, line_height: f64) { self.line_height = line_height; } #[doc(hidden)] pub fn write_fmt(&mut self, fmt: std::fmt::Arguments<'_>) -> AttributesAdder { use std::fmt::Write; let start = self.buffer.len(); self.buffer .write_fmt(fmt) .expect("a formatting trait implementation returned an error"); self.add_attributes_for_range(start..self.buffer.len()) } pub fn add_attributes_for_range( &mut self, range: impl RangeBounds<usize>, ) -> AttributesAdder { let range = druid::piet::util::resolve_range(range, self.buffer.len()); AttributesAdder { rich_text_builder: self, range, } } pub fn build(self) -> RichText { RichText { buffer: self.buffer.into(), attrs: self.attrs.into(), line_height: self.line_height, } } } pub struct AttributesAdder<'a> { rich_text_builder: &'a mut RichTextBuilder, range: Range<usize>, } impl AttributesAdder<'_> { pub fn add_attr(&mut self, attr: Attribute) -> &mut Self { self.rich_text_builder.attrs.add(self.range.clone(), attr); self } pub fn size(&mut self, size: impl Into<KeyOrValue<f64>>) -> &mut Self { self.add_attr(Attribute::size(size)); self } pub fn text_color(&mut self, color: impl Into<KeyOrValue<Color>>) -> &mut Self { self.add_attr(Attribute::text_color(color)); self } pub fn font_family(&mut self, family: FontFamily) -> &mut Self { self.add_attr(Attribute::font_family(family)); self } pub fn weight(&mut self, weight: FontWeight) -> &mut Self { self.add_attr(Attribute::weight(weight)); self } pub fn style(&mut self, style: FontStyle) -> &mut Self { self.add_attr(Attribute::style(style)); self } pub fn underline(&mut self, underline: bool) -> &mut Self { self.add_attr(Attribute::underline(underline)); self } pub fn font_descriptor( &mut self, font: impl Into<KeyOrValue<FontDescriptor>>, ) -> &mut Self { self.add_attr(Attribute::font_descriptor(font)); self } pub fn link(&mut self, command: impl Into<Command>) -> &mut Self { self.rich_text_builder .links .push(Link::new(self.range.clone(), command.into())); self } }
use std::{ ops::{Range, RangeBounds}, sync::Arc, }; use druid::{ piet::TextStorage as PietTextStorage, piet::{PietTextLayoutBuilder, TextLayoutBuilder}, text::{Attribute, AttributeSpans, Link}, text::{EnvUpdateCtx, TextStorage}, ArcStr, Color, Command, Data, Env, FontDescriptor, FontFamily, FontStyle, FontWeight, KeyOrValue, }; #[derive(Clone, Debug, Data)] pub struct RichText { buffer: ArcStr, attrs: Arc<AttributeSpans>, line_height: f64, } impl RichText { pub fn new(buffer: ArcStr) -> Self { RichText::new_with_attributes(buffer, Default::default()) } pub fn new_with_attributes(buffer: ArcStr, attributes: AttributeSpans) -> Self { RichText { buffer, attrs: Arc::new(attributes), line_height: 0.0, } } pub fn with_attribute( mut self, range: impl RangeBounds<usize>, attr: Attribute, ) -> Self { self.add_attribute(range, attr); self } pub fn len(&self) -> usize { self.buffer.len() } pub fn is_empty(&self) -> bool { self.buffer.is_empty() } pub fn add_attribute( &mut self, range: impl RangeBounds<usize>, attr: Attribute, ) { let range = druid::piet::util::resolve_range(range, self.buffer.len()); Arc::make_mut(&mut self.attrs).add(range, attr); } } impl PietTextStorage for RichText { fn as_str(&self) -> &str { self.buffer.as_str() } } impl TextStorage for RichText { fn add_attributes( &self, mut builder: PietTextLayoutBuilder, env: &Env, ) -> PietTextLayoutBuilder { for (range, attr) in self.attrs.to_piet_attrs(env) { builder = builder.range_attribute(range, attr); } if self.line_height > 0.0 { builder = builder.set_line_height(self.line_height); } builder } fn env_update(&self, _ctx: &EnvUpdateCtx) -> bool { false } fn links(&self) -> &[Link] { &[] } } #[derive(Default)] pub struct RichTextBuilder { buffer: String, attrs: AttributeSpans, links: Vec<Link>, line_height: f64, } impl RichTextBuilder { pub fn
t_family(&mut self, family: FontFamily) -> &mut Self { self.add_attr(Attribute::font_family(family)); self } pub fn weight(&mut self, weight: FontWeight) -> &mut Self { self.add_attr(Attribute::weight(weight)); self } pub fn style(&mut self, style: FontStyle) -> &mut Self { self.add_attr(Attribute::style(style)); self } pub fn underline(&mut self, underline: bool) -> &mut Self { self.add_attr(Attribute::underline(underline)); self } pub fn font_descriptor( &mut self, font: impl Into<KeyOrValue<FontDescriptor>>, ) -> &mut Self { self.add_attr(Attribute::font_descriptor(font)); self } pub fn link(&mut self, command: impl Into<Command>) -> &mut Self { self.rich_text_builder .links .push(Link::new(self.range.clone(), command.into())); self } }
new() -> Self { Self::default() } pub fn push(&mut self, string: &str) -> AttributesAdder { let range = self.buffer.len()..(self.buffer.len() + string.len()); self.buffer.push_str(string); self.add_attributes_for_range(range) } pub fn set_line_height(&mut self, line_height: f64) { self.line_height = line_height; } #[doc(hidden)] pub fn write_fmt(&mut self, fmt: std::fmt::Arguments<'_>) -> AttributesAdder { use std::fmt::Write; let start = self.buffer.len(); self.buffer .write_fmt(fmt) .expect("a formatting trait implementation returned an error"); self.add_attributes_for_range(start..self.buffer.len()) } pub fn add_attributes_for_range( &mut self, range: impl RangeBounds<usize>, ) -> AttributesAdder { let range = druid::piet::util::resolve_range(range, self.buffer.len()); AttributesAdder { rich_text_builder: self, range, } } pub fn build(self) -> RichText { RichText { buffer: self.buffer.into(), attrs: self.attrs.into(), line_height: self.line_height, } } } pub struct AttributesAdder<'a> { rich_text_builder: &'a mut RichTextBuilder, range: Range<usize>, } impl AttributesAdder<'_> { pub fn add_attr(&mut self, attr: Attribute) -> &mut Self { self.rich_text_builder.attrs.add(self.range.clone(), attr); self } pub fn size(&mut self, size: impl Into<KeyOrValue<f64>>) -> &mut Self { self.add_attr(Attribute::size(size)); self } pub fn text_color(&mut self, color: impl Into<KeyOrValue<Color>>) -> &mut Self { self.add_attr(Attribute::text_color(color)); self } pub fn fon
random
[]
Rust
src/proc/bin/starnix/fs/fuchsia/remote.rs
dahliaOS/fuchsia-pi4
5b534fccefd918b5f03205393c1fe5fddf8031d0
use fidl_fuchsia_io as fio; use fidl_fuchsia_kernel as fkernel; use fuchsia_component::client::connect_channel_to_protocol; use fuchsia_zircon as zx; use lazy_static::lazy_static; use log::info; use crate::fd_impl_seekable; use crate::fs::*; use crate::task::*; use crate::types::*; lazy_static! { static ref VMEX_RESOURCE: zx::Resource = { let (client_end, server_end) = zx::Channel::create().unwrap(); connect_channel_to_protocol::<fkernel::VmexResourceMarker>(server_end) .expect("couldn't connect to fuchsia.kernel.VmexResource"); let service = fkernel::VmexResourceSynchronousProxy::new(client_end); service.get(zx::Time::INFINITE).expect("couldn't talk to fuchsia.kernel.VmexResource") }; } pub struct RemoteFile { node: RemoteNode, } enum RemoteNode { File(fio::FileSynchronousProxy), Directory(fio::DirectorySynchronousProxy), Other(fio::NodeSynchronousProxy), } impl RemoteNode { fn get_attr(&self) -> Result<(i32, fio::NodeAttributes), fidl::Error> { match self { RemoteNode::File(n) => n.get_attr(zx::Time::INFINITE), RemoteNode::Directory(n) => n.get_attr(zx::Time::INFINITE), RemoteNode::Other(n) => n.get_attr(zx::Time::INFINITE), } } } impl RemoteFile { pub fn from_description(description: syncio::DescribedNode) -> FileHandle { let node = match description.info { fio::NodeInfo::Directory(_) => RemoteNode::Directory( fio::DirectorySynchronousProxy::new(description.node.into_channel()), ), fio::NodeInfo::File(_) => { RemoteNode::File(fio::FileSynchronousProxy::new(description.node.into_channel())) } _ => RemoteNode::Other(description.node), }; FileObject::new(RemoteFile { node }) } } const BYTES_PER_BLOCK: i64 = 512; impl FileOps for RemoteFile { fd_impl_seekable!(); fn read_at( &self, _fd: &FileObject, task: &Task, offset: usize, buf: &[iovec_t], ) -> Result<usize, Errno> { let mut total = 0; for vec in buf { total += vec.iov_len; } let (status, data) = match self.node { RemoteNode::File(ref n) => { n.read_at(total as u64, offset as u64, zx::Time::INFINITE).map_err(fidl_error) } RemoteNode::Directory(_) => Err(EISDIR), RemoteNode::Other(_) => Err(EINVAL), }?; zx::Status::ok(status).map_err(fio_error)?; let mut offset = 0; for vec in buf { let end = std::cmp::min(offset + vec.iov_len, data.len()); task.mm.write_memory(vec.iov_base, &data[offset..end])?; offset = end; if offset == data.len() { break; } } Ok(data.len()) } fn write_at( &self, _fd: &FileObject, _task: &Task, _offset: usize, _data: &[iovec_t], ) -> Result<usize, Errno> { Err(ENOSYS) } fn get_vmo( &self, _fd: &FileObject, _task: &Task, mut prot: zx::VmarFlags, _flags: u32, ) -> Result<zx::Vmo, Errno> { let has_execute = prot.contains(zx::VmarFlags::PERM_EXECUTE); prot -= zx::VmarFlags::PERM_EXECUTE; let (status, buffer) = match self.node { RemoteNode::File(ref n) => { n.get_buffer(prot.bits(), zx::Time::INFINITE).map_err(fidl_error) } _ => Err(ENODEV), }?; zx::Status::ok(status).map_err(fio_error)?; let mut vmo = buffer.unwrap().vmo; if has_execute { vmo = vmo.replace_as_executable(&VMEX_RESOURCE).expect("replace_as_executable failed"); } Ok(vmo) } fn fstat(&self, _fd: &FileObject, task: &Task) -> Result<stat_t, Errno> { let (status, attrs) = self.node.get_attr().map_err(fidl_error)?; zx::Status::ok(status).map_err(fio_error)?; Ok(stat_t { st_mode: attrs.mode, st_ino: attrs.id, st_size: attrs.content_size as i64, st_blocks: attrs.storage_size as i64 / BYTES_PER_BLOCK, st_uid: task.creds.uid, st_gid: task.creds.gid, st_nlink: attrs.link_count, ..stat_t::default() }) } } fn fidl_error(err: fidl::Error) -> Errno { info!("fidl error: {}", err); EIO } fn fio_error(status: zx::Status) -> Errno { Errno::from_status_like_fdio(status) }
use fidl_fuchsia_io as fio; use fidl_fuchsia_kernel as fkernel; use fuchsia_component::client::connect_channel_to_protocol; use fuchsia_zircon as zx; use lazy_static::lazy_static; use log::info; use crate::fd_impl_seekable; use crate::fs::*; use crate::task::*; use crate::types::*; lazy_static! { static ref VMEX_RESOURCE: zx::Resource = { let (client_end, server_end) = zx::Channel::create().unwrap(); connect_channel_to_protocol::<fkernel::VmexResourceMarker>(server_end)
s, buffer) = match self.node { RemoteNode::File(ref n) => { n.get_buffer(prot.bits(), zx::Time::INFINITE).map_err(fidl_error) } _ => Err(ENODEV), }?; zx::Status::ok(status).map_err(fio_error)?; let mut vmo = buffer.unwrap().vmo; if has_execute { vmo = vmo.replace_as_executable(&VMEX_RESOURCE).expect("replace_as_executable failed"); } Ok(vmo) } fn fstat(&self, _fd: &FileObject, task: &Task) -> Result<stat_t, Errno> { let (status, attrs) = self.node.get_attr().map_err(fidl_error)?; zx::Status::ok(status).map_err(fio_error)?; Ok(stat_t { st_mode: attrs.mode, st_ino: attrs.id, st_size: attrs.content_size as i64, st_blocks: attrs.storage_size as i64 / BYTES_PER_BLOCK, st_uid: task.creds.uid, st_gid: task.creds.gid, st_nlink: attrs.link_count, ..stat_t::default() }) } } fn fidl_error(err: fidl::Error) -> Errno { info!("fidl error: {}", err); EIO } fn fio_error(status: zx::Status) -> Errno { Errno::from_status_like_fdio(status) }
.expect("couldn't connect to fuchsia.kernel.VmexResource"); let service = fkernel::VmexResourceSynchronousProxy::new(client_end); service.get(zx::Time::INFINITE).expect("couldn't talk to fuchsia.kernel.VmexResource") }; } pub struct RemoteFile { node: RemoteNode, } enum RemoteNode { File(fio::FileSynchronousProxy), Directory(fio::DirectorySynchronousProxy), Other(fio::NodeSynchronousProxy), } impl RemoteNode { fn get_attr(&self) -> Result<(i32, fio::NodeAttributes), fidl::Error> { match self { RemoteNode::File(n) => n.get_attr(zx::Time::INFINITE), RemoteNode::Directory(n) => n.get_attr(zx::Time::INFINITE), RemoteNode::Other(n) => n.get_attr(zx::Time::INFINITE), } } } impl RemoteFile { pub fn from_description(description: syncio::DescribedNode) -> FileHandle { let node = match description.info { fio::NodeInfo::Directory(_) => RemoteNode::Directory( fio::DirectorySynchronousProxy::new(description.node.into_channel()), ), fio::NodeInfo::File(_) => { RemoteNode::File(fio::FileSynchronousProxy::new(description.node.into_channel())) } _ => RemoteNode::Other(description.node), }; FileObject::new(RemoteFile { node }) } } const BYTES_PER_BLOCK: i64 = 512; impl FileOps for RemoteFile { fd_impl_seekable!(); fn read_at( &self, _fd: &FileObject, task: &Task, offset: usize, buf: &[iovec_t], ) -> Result<usize, Errno> { let mut total = 0; for vec in buf { total += vec.iov_len; } let (status, data) = match self.node { RemoteNode::File(ref n) => { n.read_at(total as u64, offset as u64, zx::Time::INFINITE).map_err(fidl_error) } RemoteNode::Directory(_) => Err(EISDIR), RemoteNode::Other(_) => Err(EINVAL), }?; zx::Status::ok(status).map_err(fio_error)?; let mut offset = 0; for vec in buf { let end = std::cmp::min(offset + vec.iov_len, data.len()); task.mm.write_memory(vec.iov_base, &data[offset..end])?; offset = end; if offset == data.len() { break; } } Ok(data.len()) } fn write_at( &self, _fd: &FileObject, _task: &Task, _offset: usize, _data: &[iovec_t], ) -> Result<usize, Errno> { Err(ENOSYS) } fn get_vmo( &self, _fd: &FileObject, _task: &Task, mut prot: zx::VmarFlags, _flags: u32, ) -> Result<zx::Vmo, Errno> { let has_execute = prot.contains(zx::VmarFlags::PERM_EXECUTE); prot -= zx::VmarFlags::PERM_EXECUTE; let (statu
random
[]
Rust
policy-test/tests/e2e_authorization_policy.rs
giantswarm/linkerd2
9d868c097d6c01f63d371578b960ff5d844303cf
use linkerd_policy_controller_k8s_api::{ self as k8s, policy::{LocalTargetRef, NamespacedTargetRef}, }; use linkerd_policy_test::{create, create_ready_pod, curl, nginx, with_temp_ns, LinkerdInject}; #[tokio::test(flavor = "current_thread")] async fn meshtls() { with_temp_ns(|client, ns| async move { let (srv, all_mtls) = tokio::join!( create(&client, nginx::server(&ns)), create(&client, all_authenticated(&ns)) ); create( &client, authz_policy( &ns, "nginx", LocalTargetRef::from_resource(&srv), Some(NamespacedTargetRef::from_resource(&all_mtls)), ), ) .await; tokio::join!( create(&client, nginx::service(&ns)), create_ready_pod(&client, nginx::pod(&ns)) ); let curl = curl::Runner::init(&client, &ns).await; let (injected, uninjected) = tokio::join!( curl.run("curl-injected", "http://nginx", LinkerdInject::Enabled), curl.run("curl-uninjected", "http://nginx", LinkerdInject::Disabled), ); let (injected_status, uninjected_status) = tokio::join!(injected.exit_code(), uninjected.exit_code()); assert_eq!( injected_status, 0, "uninjected curl must fail to contact nginx" ); assert_ne!(uninjected_status, 0, "injected curl must contact nginx"); }) .await; } #[tokio::test(flavor = "current_thread")] async fn targets_namespace() { with_temp_ns(|client, ns| async move { let (_srv, all_mtls) = tokio::join!( create(&client, nginx::server(&ns)), create(&client, all_authenticated(&ns)) ); create( &client, authz_policy( &ns, "nginx", LocalTargetRef { group: None, kind: "Namespace".to_string(), name: ns.clone(), }, Some(NamespacedTargetRef::from_resource(&all_mtls)), ), ) .await; tokio::join!( create(&client, nginx::service(&ns)), create_ready_pod(&client, nginx::pod(&ns)) ); let curl = curl::Runner::init(&client, &ns).await; let (injected, uninjected) = tokio::join!( curl.run("curl-injected", "http://nginx", LinkerdInject::Enabled), curl.run("curl-uninjected", "http://nginx", LinkerdInject::Disabled), ); let (injected_status, uninjected_status) = tokio::join!(injected.exit_code(), uninjected.exit_code()); assert_eq!(injected_status, 0, "injected curl must contact nginx"); assert_ne!( uninjected_status, 0, "uninjected curl must fail to contact nginx" ); }) .await; } #[tokio::test(flavor = "current_thread")] async fn meshtls_namespace() { with_temp_ns(|client, ns| async move { let (srv, mtls_ns) = tokio::join!( create(&client, nginx::server(&ns)), create(&client, ns_authenticated(&ns)) ); create( &client, authz_policy( &ns, "nginx", LocalTargetRef::from_resource(&srv), Some(NamespacedTargetRef::from_resource(&mtls_ns)), ), ) .await; tokio::join!( create(&client, nginx::service(&ns)), create_ready_pod(&client, nginx::pod(&ns)) ); let curl = curl::Runner::init(&client, &ns).await; let (injected, uninjected) = tokio::join!( curl.run("curl-injected", "http://nginx", LinkerdInject::Enabled), curl.run("curl-uninjected", "http://nginx", LinkerdInject::Disabled), ); let (injected_status, uninjected_status) = tokio::join!(injected.exit_code(), uninjected.exit_code()); assert_eq!(injected_status, 0, "injected curl must contact nginx"); assert_ne!( uninjected_status, 0, "uninjected curl must fail to contact nginx" ); }) .await; } #[tokio::test(flavor = "current_thread")] async fn network() { with_temp_ns(|client, ns| async move { let curl = curl::Runner::init(&client, &ns).await; curl.create_lock().await; let blessed = curl .run("curl-blessed", "http://nginx", LinkerdInject::Disabled) .await; let blessed_ip = blessed.ip().await; tracing::debug!(curl.blessed.ip = %blessed_ip); let (srv, allow_ips) = tokio::join!( create(&client, nginx::server(&ns)), create(&client, allow_ips(&ns, Some(blessed_ip))) ); create( &client, authz_policy( &ns, "nginx", LocalTargetRef::from_resource(&srv), Some(NamespacedTargetRef::from_resource(&allow_ips)), ), ) .await; tokio::join!( create(&client, nginx::service(&ns)), create_ready_pod(&client, nginx::pod(&ns)) ); curl.delete_lock().await; let status = blessed.exit_code().await; assert_eq!(status, 0, "blessed curl pod must succeed"); let status = curl .run("curl-cursed", "http://nginx", LinkerdInject::Disabled) .await .exit_code() .await; assert_ne!(status, 0, "cursed curl pod must fail"); }) .await; } #[tokio::test(flavor = "current_thread")] async fn both() { with_temp_ns(|client, ns| async move { let curl = curl::Runner::init(&client, &ns).await; curl.create_lock().await; let (blessed_injected, blessed_uninjected) = tokio::join!( curl.run( "curl-blessed-injected", "http://nginx", LinkerdInject::Enabled, ), curl.run( "curl-blessed-uninjected", "http://nginx", LinkerdInject::Disabled, ) ); let (blessed_injected_ip, blessed_uninjected_ip) = tokio::join!(blessed_injected.ip(), blessed_uninjected.ip(),); tracing::debug!(curl.blessed.injected.ip = ?blessed_injected_ip); tracing::debug!(curl.blessed.uninjected.ip = ?blessed_uninjected_ip); let (srv, allow_ips, all_mtls) = tokio::join!( create(&client, nginx::server(&ns)), create( &client, allow_ips(&ns, vec![blessed_injected_ip, blessed_uninjected_ip]), ), create(&client, all_authenticated(&ns)) ); create( &client, authz_policy( &ns, "nginx", LocalTargetRef::from_resource(&srv), vec![ NamespacedTargetRef::from_resource(&allow_ips), NamespacedTargetRef::from_resource(&all_mtls), ], ), ) .await; tokio::join!( create(&client, nginx::service(&ns)), create_ready_pod(&client, nginx::pod(&ns)) ); curl.delete_lock().await; tracing::info!("unblocked curl"); let (blessed_injected_status, blessed_uninjected_status) = tokio::join!(blessed_injected.exit_code(), blessed_uninjected.exit_code()); assert_eq!( blessed_injected_status, 0, "blessed injected curl pod must succeed" ); assert_ne!( blessed_uninjected_status, 0, "blessed uninjected curl pod must NOT succeed" ); let (cursed_injected, cursed_uninjected) = tokio::join!( curl.run( "curl-cursed-injected", "http://nginx", LinkerdInject::Enabled, ), curl.run( "curl-cursed-uninjected", "http://nginx", LinkerdInject::Disabled, ) ); let (cursed_injected_status, cursed_uninjected_status) = tokio::join!(cursed_injected.exit_code(), cursed_uninjected.exit_code(),); assert_ne!( cursed_injected_status, 0, "cursed injected curl pod must fail" ); assert_ne!( cursed_uninjected_status, 0, "cursed uninjected curl pod must fail" ); }) .await; } #[tokio::test(flavor = "current_thread")] async fn either() { with_temp_ns(|client, ns| async move { let curl = curl::Runner::init(&client, &ns).await; curl.create_lock().await; let (blessed_injected, blessed_uninjected) = tokio::join!( curl.run( "curl-blessed-injected", "http://nginx", LinkerdInject::Enabled, ), curl.run( "curl-blessed-uninjected", "http://nginx", LinkerdInject::Disabled, ) ); let (blessed_injected_ip, blessed_uninjected_ip) = tokio::join!(blessed_injected.ip(), blessed_uninjected.ip()); tracing::debug!(curl.blessed.injected.ip = ?blessed_injected_ip); tracing::debug!(curl.blessed.uninjected.ip = ?blessed_uninjected_ip); let (srv, allow_ips, all_mtls) = tokio::join!( create(&client, nginx::server(&ns)), create(&client, allow_ips(&ns, vec![blessed_uninjected_ip])), create(&client, all_authenticated(&ns)) ); tokio::join!( create( &client, authz_policy( &ns, "nginx-from-ip", LocalTargetRef::from_resource(&srv), vec![NamespacedTargetRef::from_resource(&allow_ips)], ), ), create( &client, authz_policy( &ns, "nginx-from-id", LocalTargetRef::from_resource(&srv), vec![NamespacedTargetRef::from_resource(&all_mtls)], ), ) ); tokio::join!( create(&client, nginx::service(&ns)), create_ready_pod(&client, nginx::pod(&ns)), ); curl.delete_lock().await; tracing::info!("unblocking curl"); let (blessed_injected_status, blessed_uninjected_status) = tokio::join!(blessed_injected.exit_code(), blessed_uninjected.exit_code()); assert_eq!( blessed_injected_status, 0, "blessed injected curl pod must succeed" ); assert_eq!( blessed_uninjected_status, 0, "blessed uninjected curl pod must succeed" ); let (cursed_injected, cursed_uninjected) = tokio::join!( curl.run( "curl-cursed-injected", "http://nginx", LinkerdInject::Enabled, ), curl.run( "curl-cursed-uninjected", "http://nginx", LinkerdInject::Disabled, ), ); let (cursed_injected_status, cursed_uninjected_status) = tokio::join!(cursed_injected.exit_code(), cursed_uninjected.exit_code()); assert_eq!( cursed_injected_status, 0, "cursed injected curl pod must succeed" ); assert_ne!( cursed_uninjected_status, 0, "cursed uninjected curl pod must fail" ); }) .await; } fn authz_policy( ns: &str, name: &str, target: LocalTargetRef, authns: impl IntoIterator<Item = NamespacedTargetRef>, ) -> k8s::policy::AuthorizationPolicy { k8s::policy::AuthorizationPolicy { metadata: k8s::ObjectMeta { namespace: Some(ns.to_string()), name: Some(name.to_string()), ..Default::default() }, spec: k8s::policy::AuthorizationPolicySpec { target_ref: target, required_authentication_refs: authns.into_iter().collect(), }, } } fn all_authenticated(ns: &str) -> k8s::policy::MeshTLSAuthentication { k8s::policy::MeshTLSAuthentication { metadata: k8s::ObjectMeta { namespace: Some(ns.to_string()), name: Some("all-authenticated".to_string()), ..Default::default() }, spec: k8s::policy::MeshTLSAuthenticationSpec { identity_refs: None, identities: Some(vec!["*".to_string()]), }, } } fn ns_authenticated(ns: &str) -> k8s::policy::MeshTLSAuthentication { k8s::policy::MeshTLSAuthentication { metadata: k8s::ObjectMeta { namespace: Some(ns.to_string()), name: Some("all-authenticated".to_string()), ..Default::default() }, spec: k8s::policy::MeshTLSAuthenticationSpec { identity_refs: Some(vec![NamespacedTargetRef { group: None, kind: "Namespace".to_string(), name: ns.to_string(), namespace: None, }]), identities: None, }, } } fn allow_ips( ns: &str, ips: impl IntoIterator<Item = std::net::IpAddr>, ) -> k8s::policy::NetworkAuthentication { k8s::policy::NetworkAuthentication { metadata: k8s::ObjectMeta { namespace: Some(ns.to_string()), name: Some("allow-pod".to_string()), ..Default::default() }, spec: k8s::policy::NetworkAuthenticationSpec { networks: ips .into_iter() .map(|ip| k8s::policy::Network { cidr: ip.into(), except: None, }) .collect(), }, } }
use linkerd_policy_controller_k8s_api::{ self as k8s, policy::{LocalTargetRef, NamespacedTargetRef}, }; use linkerd_policy_test::{create, create_ready_pod, curl, nginx, with_temp_ns, LinkerdInject}; #[tokio::test(flavor = "current_thread")] async fn meshtls() { with_temp_ns(|client, ns| async move { let (srv, all_mtls) = tokio::join!( create(&client, nginx::server(&ns)), create(&client, all_authenticated(&ns)) ); create( &client, authz_policy( &ns, "nginx", LocalTargetRef::from_resource(&srv), Some(NamespacedTargetRef::from_resource(&all_mtls)), ), ) .await; tokio::join!( create(&client, nginx::service(&ns)), create_ready_pod(&client, nginx::pod(&ns)) ); let curl = curl::Runner::init(&client, &ns).await; let (injected, uninjected) = tokio::join!( curl.run("curl-injected", "http://nginx", LinkerdInject::Enabled), curl.run("curl-uninjected", "http://nginx", LinkerdInject::Disabled), ); let (injected_status, uninjected_status) = tokio::join!(injected.exit_code(), uninjected.exit_code()); assert_eq!( injected_status, 0, "uninjected curl must fail to contact nginx" ); assert_ne!(uninjected_status, 0, "injected curl must contact nginx"); }) .await; } #[tokio::test(flavor = "current_thread")] async fn targets_namespace() { with_temp_ns(|client, ns| async move { let (_srv, all_mtls) = tokio::join!( create(&client, nginx::server(&ns)), create(&client, all_authenticated(&ns)) ); create( &client, authz_policy( &ns, "nginx", LocalTargetRef { group: None, kind: "Namespace".to_string(), name: ns.clone(), }, Some(NamespacedTargetRef::from_resource(&all_mtls)), ), ) .await; tokio::join!( create(&client, nginx::service(&ns)), create_ready_pod(&client, nginx::pod(&ns)) ); let curl = curl::Runner::init(&client, &ns).await; let (injected, uninjected) = tokio::join!( curl.run("curl-injected", "http://nginx", LinkerdInject::Enabled), curl.run("curl-uninjected", "http://nginx", LinkerdInject::Disabled), ); let (injected_status, uninjected_status) = tokio::join!(injected.exit_code(), uninjected.exit_code()); assert_eq!(injected_status, 0, "injected curl must contact nginx"); assert_ne!( uninjected_status, 0, "uninjected curl must fail to contact nginx" ); }) .await; } #[tokio::test(flavor = "current_thread")] async fn meshtls_namespace() { with_temp_ns(|client, ns| async move { let (srv, mtls_ns) = tokio::join!( create(&client, nginx::server(&ns)), create(&client, ns_authenticated(&ns)) ); create( &client, authz_policy( &ns, "nginx", LocalTargetRef::from_resource(&srv), Some(NamespacedTargetRef::from_resource(&mtls_ns)), ), ) .await; tokio::join!( create(&client, nginx::service(&ns)), create_ready_pod(&client, nginx::pod(&ns)) ); let curl = curl::Runner::init(&client, &ns).await; let (injected, uninjected) = tokio::join!( curl.run("curl-injected", "http://nginx", LinkerdInject::Enabled), curl.run("curl-uninjected", "http://nginx", LinkerdInject::Disabled), ); let (injected_status, uninjected_status) = tokio::join!(injected.exit_code(), uninjected.exit_code()); assert_eq!(injected_status, 0, "injected curl must contact nginx"); assert_ne!( uninjected_status, 0, "uninjected curl must fail to contact nginx" ); }) .await; } #[tokio::test(flavor = "current_thread")] async fn network() { with_temp_ns(|client, ns| async move { let curl = curl::Runner::init(&client, &ns).await; curl.create_lock().await; let blessed = curl .run("curl-blessed", "http://nginx", LinkerdInject::Disabled) .await; let blessed_ip = blessed.ip().await; tracing::debug!(curl.blessed.ip = %blessed_ip); let (srv, allow_ips) = tokio::join!( create(&client, nginx::server(&ns)), create(&client, allow_ips(&ns, Some(blessed_ip))) ); create( &client, authz_policy( &ns, "nginx", LocalTargetRef::from_resource(&srv), Some(NamespacedTargetRef::from_resource(&allow_ips)), ), ) .await; tokio::join!( create(&client, nginx::service(&ns)), create_ready_pod(&client, nginx::pod(&ns)) ); curl.delete_lock().await; let status = blessed.exit_code().await; assert_eq!(status, 0, "blessed curl pod must succeed"); let status = curl .run("curl-cursed", "http://nginx", LinkerdInject::Disabled) .await .exit_code() .await; assert_ne!(status, 0, "cursed curl pod must fail"); }) .await; } #[tokio::test(flavor = "current_thread")] async fn both() { with_temp_ns(|client, ns| async move { let curl = curl::Runner::init(&client, &ns).await; curl.create_lock().await; let (blessed_injected, blessed_uninjected) = tokio::join!( curl.run( "curl-blessed-injected", "http://nginx", LinkerdInject::Enabled, ), curl.run( "curl-blessed-uninjected", "http://nginx", LinkerdInject::Disabled, ) ); let (blessed_injected_ip, blessed_uninjected_ip) = tokio::join!(blessed_injected.ip(), blessed_uninjected.ip(),); tracing::debug!(curl.blessed.injected.ip = ?blessed_injected_ip); tracing::debug!(curl.blessed.uninjected.ip = ?blessed_uninjected_ip); let (srv, allow_ips, all_mtls) = tokio::join!( create(&client, nginx::server(&ns)), create( &client, allow_ips(&ns, vec![blessed_injected_ip, blessed_uninjected_ip]), ), create(&client, all_authenticated(&ns)) ); create( &client, authz_policy( &ns, "nginx", LocalTargetRef::from_resource(&srv), vec![ NamespacedTargetRef::from_resource(&allow_ips), NamespacedTargetRef::from_resource(&all_mtls), ], ), ) .await; tokio::join!( create(&client, nginx::service(&ns)), create_ready_pod(&client, nginx::pod(&ns)) ); curl.delete_lock().await; tracing::info!("unblocked curl"); let (blessed_injected_status, blessed_uninjected_status) = tokio::join!(blessed_injected.exit_code(), blessed_uninjected.exit_code()); assert_eq!( blessed_injected_status, 0, "blessed injected curl pod must succeed" ); assert_ne!( blessed_uninjected_status, 0, "blessed uninjected curl pod must NOT succeed" ); let (cursed_injected, cursed_uninjected) = tokio::join!( curl.run( "curl-cursed-injected", "http://nginx", LinkerdInject::Enabled, ), curl.run( "curl-cursed-uninjected", "http://nginx", LinkerdInject::Disabled, ) ); let (cursed_injected_status, cursed_uninjected_status) = tokio::join!(cursed_injected.exit_code(), cursed_uninjected.exit_code(),); assert_ne!( cursed_injected_status, 0, "cursed injected curl pod must fail" ); assert_ne!( cursed_uninjected_status, 0, "cursed uninjected curl pod must fail" ); }) .await; } #[tokio::test(flavor = "current_thread")] async fn either() { with_temp_ns(|client, ns| async move { let curl = curl::Runner::init(&client, &ns).await; curl.create_lock().await; let (blessed_injected, blessed_uninjected) = tokio::join!( curl.run( "curl-blessed-injected", "http://nginx", LinkerdInject::Enabled, ), curl.run( "curl-blessed-uninjected", "http://nginx", LinkerdInject::Disabled, ) ); let (blessed_injected_ip, blessed_uninjected_ip) = tokio::join!(blessed_injected.ip(), blessed_uninjected.ip()); tracing::debug!(curl.blessed.injected.ip = ?blessed_injected_ip); tracing::debug!(curl.blessed.uninjected.ip = ?blessed_uninjected_ip); let (srv, allow_ips, all_mtls) = tokio::join!( create(&client, nginx::server(&ns)), create(&client, allow_ips(&ns, vec![blessed_uninjected_ip])), create(&client, all_authenticated(&ns)) ); tokio::join!( create( &client, authz_policy( &ns, "nginx-from-ip", LocalTargetRef::from_resource(&srv), vec![NamespacedTargetRef::from_resource(&allow_ips)], ), ), create( &client, authz_policy( &ns, "nginx-from-id", LocalTargetRef::from_resource(&srv), vec![NamespacedTargetRef::from_resource(&all_mtls)], ), ) ); tokio::join!( create(&client, nginx::service(&ns)), create_ready_pod(&client, nginx::pod(&ns)), ); curl.delete_lock().await; tracing::info!("unblocking curl"); let (blessed_injected_status, blessed_uninjected_status) = tokio::join!(blessed_injected.exit_code(), blessed_uninjected.exit_code()); assert_eq!( blessed_injected_status, 0, "blessed injected curl pod must succeed" ); assert_eq!( blessed_uninjected_status, 0, "blessed uninjected curl pod must succeed" ); let (cursed_injected, cursed_uninjected) = tokio::join!( curl.run( "curl-cursed-injected", "http://nginx", LinkerdInject::Enabled, ), curl.run( "curl-cursed-uninjected", "http://nginx", LinkerdInject::Disabled, ), ); let (cursed_injected_status, cursed_uninjected_status) = tokio::join!(cursed_injected.exit_code(), cursed_uninjected.exit_code()); assert_eq!( cursed_injected_status, 0, "cursed injected curl pod must succeed" ); assert_ne!( cursed_uninjected_status, 0, "cursed uninjected curl pod must fail" ); }) .await; }
fn all_authenticated(ns: &str) -> k8s::policy::MeshTLSAuthentication { k8s::policy::MeshTLSAuthentication { metadata: k8s::ObjectMeta { namespace: Some(ns.to_string()), name: Some("all-authenticated".to_string()), ..Default::default() }, spec: k8s::policy::MeshTLSAuthenticationSpec { identity_refs: None, identities: Some(vec!["*".to_string()]), }, } } fn ns_authenticated(ns: &str) -> k8s::policy::MeshTLSAuthentication { k8s::policy::MeshTLSAuthentication { metadata: k8s::ObjectMeta { namespace: Some(ns.to_string()), name: Some("all-authenticated".to_string()), ..Default::default() }, spec: k8s::policy::MeshTLSAuthenticationSpec { identity_refs: Some(vec![NamespacedTargetRef { group: None, kind: "Namespace".to_string(), name: ns.to_string(), namespace: None, }]), identities: None, }, } } fn allow_ips( ns: &str, ips: impl IntoIterator<Item = std::net::IpAddr>, ) -> k8s::policy::NetworkAuthentication { k8s::policy::NetworkAuthentication { metadata: k8s::ObjectMeta { namespace: Some(ns.to_string()), name: Some("allow-pod".to_string()), ..Default::default() }, spec: k8s::policy::NetworkAuthenticationSpec { networks: ips .into_iter() .map(|ip| k8s::policy::Network { cidr: ip.into(), except: None, }) .collect(), }, } }
fn authz_policy( ns: &str, name: &str, target: LocalTargetRef, authns: impl IntoIterator<Item = NamespacedTargetRef>, ) -> k8s::policy::AuthorizationPolicy { k8s::policy::AuthorizationPolicy { metadata: k8s::ObjectMeta { namespace: Some(ns.to_string()), name: Some(name.to_string()), ..Default::default() }, spec: k8s::policy::AuthorizationPolicySpec { target_ref: target, required_authentication_refs: authns.into_iter().collect(), }, } }
function_block-full_function
[ { "content": "pub fn pod(ns: &str) -> k8s::Pod {\n\n k8s::Pod {\n\n metadata: k8s::ObjectMeta {\n\n namespace: Some(ns.to_string()),\n\n name: Some(\"nginx\".to_string()),\n\n annotations: Some(convert_args!(btreemap!(\n\n \"linkerd.io/inject\" => \"enab...
Rust
modules/fdb/src/ro/mod.rs
enteryournamehere/assembly_rs
dd5250abb586e135b59bf574543c386a4c89cbd9
use std::{ops::Deref, sync::Arc}; use assembly_core::buffer::{CastError, MinimallyAligned, Repr}; use self::buffer::Buffer; use super::file::ArrayHeader; pub mod buffer; pub mod handle; pub mod slice; pub type ArcHandle<B, T> = BaseHandle<Arc<B>, T>; impl<B: AsRef<[u8]>> ArcHandle<B, ()> { pub fn new_arc(inner: B) -> Self { Self::new(Arc::new(inner)) } } impl<B: AsRef<[u8]>, T: Copy> ArcHandle<B, T> { pub fn as_bytes_handle(&self) -> Handle<T> { BaseHandle { mem: Buffer::new(self.mem.as_ref().as_ref()), raw: self.raw, } } } #[derive(Clone, Debug)] pub struct BaseHandle<P: Deref, T> where <P as Deref>::Target: AsRef<[u8]>, { pub(super) mem: P, pub(super) raw: T, } impl<P, T> Copy for BaseHandle<P, T> where P: Deref + Copy, T: Copy, <P as Deref>::Target: AsRef<[u8]>, { } impl<P: Deref> BaseHandle<P, ()> where <P as Deref>::Target: AsRef<[u8]>, { pub fn new(mem: P) -> Self { Self { mem, raw: () } } } impl<T, P: Deref> BaseHandle<P, Option<T>> where <P as Deref>::Target: AsRef<[u8]>, { pub fn transpose(self) -> Option<BaseHandle<P, T>> { if let Some(raw) = self.raw { Some(BaseHandle { mem: self.mem, raw }) } else { None } } } impl<P: Deref, T> BaseHandle<P, T> where <P as Deref>::Target: AsRef<[u8]>, { pub fn raw(&self) -> &T { &self.raw } pub fn raw_mut(&mut self) -> &mut T { &mut self.raw } pub fn as_bytes(&self) -> &[u8] { self.mem.deref().as_ref() } pub fn replace<O>(self, raw: O) -> BaseHandle<P, O> { BaseHandle { mem: self.mem, raw } } } pub type Handle<'a, T> = BaseHandle<Buffer<'a>, T>; impl<'a, T> Handle<'a, T> { pub fn buf(self) -> Buffer<'a> { self.mem } pub fn into_raw(self) -> T { self.raw } pub(crate) fn wrap<R>(&self, raw: R) -> Handle<'a, R> { Handle { mem: self.mem, raw } } pub(crate) fn try_map_cast<R: MinimallyAligned>( &self, offset: u32, ) -> Result<RefHandle<'a, R>, CastError> { let raw: &'a R = self.mem.try_cast(offset)?; Ok(self.wrap(raw)) } pub(crate) fn try_map_cast_slice<R: MinimallyAligned>( &self, offset: u32, count: u32, ) -> Result<RefHandle<'a, [R]>, CastError> { let raw: &'a [R] = self.mem.try_cast_slice(offset, count)?; Ok(self.wrap(raw)) } pub(crate) fn try_map_cast_array<R: MinimallyAligned>( &self, array: ArrayHeader, ) -> Result<RefHandle<'a, [R]>, CastError> { let raw: &'a [R] = self.mem.try_cast_slice(array.base_offset, array.count)?; Ok(self.wrap(raw)) } pub fn map<X>(self, mapper: impl Fn(Buffer<'a>, T) -> X) -> Handle<'a, X> { let raw = mapper(self.mem, self.raw); Handle { mem: self.mem, raw } } pub fn map_val<X>(self, mapper: impl Fn(T) -> X) -> Handle<'a, X> { let raw = mapper(self.raw); Handle { mem: self.mem, raw } } pub fn try_map<X, E>( self, mapper: impl Fn(Buffer<'a>, T) -> Result<X, E>, ) -> Result<Handle<'a, X>, E> { let raw = mapper(self.mem, self.raw)?; Ok(Handle { mem: self.mem, raw }) } } impl<'a, T> Iterator for Handle<'a, T> where T: Iterator, { type Item = Handle<'a, T::Item>; fn next(&mut self) -> Option<Self::Item> { self.raw.next().map(|raw| Handle { mem: self.mem, raw }) } } impl<'a, T> RefHandle<'a, [T]> { pub fn get(self, index: usize) -> Option<RefHandle<'a, T>> { self.raw.get(index).map(|raw| self.wrap(raw)) } } pub type RefHandle<'a, T> = Handle<'a, &'a T>; impl<'a, T: Repr> RefHandle<'a, T> { pub fn map_extract(self) -> Handle<'a, T::Value> { self.wrap(self.raw.extract()) } } pub type SliceHandle<'a, T> = RefHandle<'a, [T]>; pub type SliceIterHandle<'a, T> = Handle<'a, std::slice::Iter<'a, T>>;
use std::{ops::Deref, sync::Arc}; use assembly_core::buffer::{CastError, MinimallyAligned, Repr}; use self::buffer::Buffer; use super::file::ArrayHeader; pub mod buffer; pub mod handle; pub mod slice; pub type ArcHandle<B, T> = BaseHandle<Arc<B>, T>; impl<B: AsRef<[u8]>> ArcHandle<B, ()> { pub fn new_arc(inner: B) -> Self { Self::new(Arc::new(inner)) } } impl<B: AsRef<[u8]>, T: Copy> ArcHandle<B, T> { pub f
} #[derive(Clone, Debug)] pub struct BaseHandle<P: Deref, T> where <P as Deref>::Target: AsRef<[u8]>, { pub(super) mem: P, pub(super) raw: T, } impl<P, T> Copy for BaseHandle<P, T> where P: Deref + Copy, T: Copy, <P as Deref>::Target: AsRef<[u8]>, { } impl<P: Deref> BaseHandle<P, ()> where <P as Deref>::Target: AsRef<[u8]>, { pub fn new(mem: P) -> Self { Self { mem, raw: () } } } impl<T, P: Deref> BaseHandle<P, Option<T>> where <P as Deref>::Target: AsRef<[u8]>, { pub fn transpose(self) -> Option<BaseHandle<P, T>> { if let Some(raw) = self.raw { Some(BaseHandle { mem: self.mem, raw }) } else { None } } } impl<P: Deref, T> BaseHandle<P, T> where <P as Deref>::Target: AsRef<[u8]>, { pub fn raw(&self) -> &T { &self.raw } pub fn raw_mut(&mut self) -> &mut T { &mut self.raw } pub fn as_bytes(&self) -> &[u8] { self.mem.deref().as_ref() } pub fn replace<O>(self, raw: O) -> BaseHandle<P, O> { BaseHandle { mem: self.mem, raw } } } pub type Handle<'a, T> = BaseHandle<Buffer<'a>, T>; impl<'a, T> Handle<'a, T> { pub fn buf(self) -> Buffer<'a> { self.mem } pub fn into_raw(self) -> T { self.raw } pub(crate) fn wrap<R>(&self, raw: R) -> Handle<'a, R> { Handle { mem: self.mem, raw } } pub(crate) fn try_map_cast<R: MinimallyAligned>( &self, offset: u32, ) -> Result<RefHandle<'a, R>, CastError> { let raw: &'a R = self.mem.try_cast(offset)?; Ok(self.wrap(raw)) } pub(crate) fn try_map_cast_slice<R: MinimallyAligned>( &self, offset: u32, count: u32, ) -> Result<RefHandle<'a, [R]>, CastError> { let raw: &'a [R] = self.mem.try_cast_slice(offset, count)?; Ok(self.wrap(raw)) } pub(crate) fn try_map_cast_array<R: MinimallyAligned>( &self, array: ArrayHeader, ) -> Result<RefHandle<'a, [R]>, CastError> { let raw: &'a [R] = self.mem.try_cast_slice(array.base_offset, array.count)?; Ok(self.wrap(raw)) } pub fn map<X>(self, mapper: impl Fn(Buffer<'a>, T) -> X) -> Handle<'a, X> { let raw = mapper(self.mem, self.raw); Handle { mem: self.mem, raw } } pub fn map_val<X>(self, mapper: impl Fn(T) -> X) -> Handle<'a, X> { let raw = mapper(self.raw); Handle { mem: self.mem, raw } } pub fn try_map<X, E>( self, mapper: impl Fn(Buffer<'a>, T) -> Result<X, E>, ) -> Result<Handle<'a, X>, E> { let raw = mapper(self.mem, self.raw)?; Ok(Handle { mem: self.mem, raw }) } } impl<'a, T> Iterator for Handle<'a, T> where T: Iterator, { type Item = Handle<'a, T::Item>; fn next(&mut self) -> Option<Self::Item> { self.raw.next().map(|raw| Handle { mem: self.mem, raw }) } } impl<'a, T> RefHandle<'a, [T]> { pub fn get(self, index: usize) -> Option<RefHandle<'a, T>> { self.raw.get(index).map(|raw| self.wrap(raw)) } } pub type RefHandle<'a, T> = Handle<'a, &'a T>; impl<'a, T: Repr> RefHandle<'a, T> { pub fn map_extract(self) -> Handle<'a, T::Value> { self.wrap(self.raw.extract()) } } pub type SliceHandle<'a, T> = RefHandle<'a, [T]>; pub type SliceIterHandle<'a, T> = Handle<'a, std::slice::Iter<'a, T>>;
n as_bytes_handle(&self) -> Handle<T> { BaseHandle { mem: Buffer::new(self.mem.as_ref().as_ref()), raw: self.raw, } }
function_block-function_prefixed
[ { "content": "/// Expect an opening tag `<{key}>`\n\npub fn expect_elem<B: BufRead>(\n\n xml: &mut Reader<B>,\n\n buf: &mut Vec<u8>,\n\n key: &'static str,\n\n) -> Result<()> {\n\n if let Event::Start(start) = xml.read_event(buf)? {\n\n if start.name() == key.as_bytes() {\n\n buf.c...
Rust
src/main.rs
cspital/lsplit
dcab20d5aef4ff8ec4a35e57e28adbc18d3240b7
extern crate clap; use clap::{App, Arg, ArgMatches}; use std::env; use std::error; use std::error::Error; use std::fmt; use std::fs; use std::fs::File; use std::io; use std::io::{BufRead, BufReader, BufWriter, Write}; use std::path::PathBuf; use std::str::FromStr; use std::sync::mpsc::{channel, Receiver, RecvError, SendError, Sender}; use std::thread; fn main() { let matches = App::new("By Line File Splitter") .version("0.1.0") .author("Cliff Spital <cspital@uw.edu>") .about("Splits a file on line ending, to chunks of specified size.") .arg( Arg::with_name("bytes") .value_name("bytes") .short("b") .long("bytes") .help("Specify the maximum size of a chunk in bytes, [k|m] may be appended to the end of this number to indicate [k]ilobytes or [m]egabytes.") .required(true) ).arg( Arg::with_name("file") .help("Specifies the file to split.") .required(true) .index(1), ).arg( Arg::with_name("dir") .help("Optionally specify the directory into which the files will be added.") .required(false) .index(2), ).get_matches(); let config = match Config::new(&matches) { Ok(c) => c, Err(e) => { println!("{}", e); return; } }; let splitter = Splitter::new(config); match splitter.split() { Ok(()) => return, Err(e) => println!("{}", e.description()), } } #[derive(Debug)] struct Config { size: u32, pwd: PathBuf, target: PathBuf, dir: Option<PathBuf>, } impl Config { fn new(matches: &ArgMatches) -> ConfigResult<Config> { let presize = matches.value_of("bytes").unwrap(); let size = Config::parse_size(presize)?; let pwd = env::current_dir()?; let target = PathBuf::from(matches.value_of("file").unwrap()); if !target.is_file() { return Err(ConfigError::StateError("target must be a file".to_owned())); } Ok(Config { size, pwd, target, dir: match matches.value_of("dir") { Some(s) => Some(PathBuf::from(s)), None => None, }, }) } #[inline] fn parse_size(arg: &str) -> ConfigResult<u32> { match arg.parse::<ByteSize>() { Ok(b) => { let ByteSize(s) = b; Ok(s) } Err(e) => Err(e), } } } type ConfigResult<T> = std::result::Result<T, ConfigError>; #[derive(Debug)] enum ConfigError { ByteSizeError(String), DirError(io::Error), StateError(String), } impl fmt::Display for ConfigError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { ConfigError::ByteSizeError(msg) => write!(f, "{}", msg), ConfigError::DirError(err) => err.fmt(f), ConfigError::StateError(msg) => write!(f, "{}", msg), } } } impl error::Error for ConfigError { fn description(&self) -> &str { match self { ConfigError::ByteSizeError(msg) => msg, ConfigError::DirError(err) => err.description(), ConfigError::StateError(msg) => msg, } } fn cause(&self) -> Option<&error::Error> { match self { ConfigError::ByteSizeError(_) => None, ConfigError::DirError(err) => Some(err), ConfigError::StateError(_) => None, } } } impl From<io::Error> for ConfigError { fn from(err: io::Error) -> Self { ConfigError::DirError(err) } } #[derive(Debug)] struct ByteSize(u32); impl FromStr for ByteSize { type Err = ConfigError; fn from_str(arg: &str) -> Result<Self, Self::Err> { match arg.parse::<u32>() { Ok(s) => Ok(ByteSize(s)), _ => { let pivot = arg.len() - 1; let prefix = &arg[..pivot]; match prefix.parse::<u32>() { Ok(s) => { let last = &arg[pivot..]; match last { "k" => Ok(ByteSize(s * 1_000)), "m" => Ok(ByteSize(s * 1_000_000)), _ => Err(ConfigError::ByteSizeError(format!( "{} is not a support size suffix", last ))), } } _ => Err(ConfigError::ByteSizeError(format!( "{} is not numeric, only k or m is a supported size suffix", prefix ))), } } } } } type SplitterResult = Result<(), SplitterError>; type SplitterHandle = thread::JoinHandle<SplitterResult>; #[derive(Debug)] enum SplitterError { IOError(io::Error), SendError(SendError<Line>), RecvError(RecvError), Temp(String), } impl fmt::Display for SplitterError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { SplitterError::IOError(e) => e.fmt(f), SplitterError::Temp(s) => write!(f, "{}", s), SplitterError::SendError(e) => e.fmt(f), SplitterError::RecvError(e) => e.fmt(f), } } } impl error::Error for SplitterError { fn description(&self) -> &str { match self { SplitterError::IOError(e) => e.description(), SplitterError::Temp(s) => s, SplitterError::SendError(e) => e.description(), SplitterError::RecvError(e) => e.description(), } } fn cause(&self) -> Option<&error::Error> { match self { SplitterError::IOError(e) => Some(e), SplitterError::Temp(_) => None, SplitterError::SendError(e) => Some(e), SplitterError::RecvError(e) => Some(e), } } } impl From<io::Error> for SplitterError { fn from(err: io::Error) -> Self { SplitterError::IOError(err) } } impl From<SendError<Line>> for SplitterError { fn from(err: SendError<Line>) -> Self { SplitterError::SendError(err) } } impl From<RecvError> for SplitterError { fn from(err: RecvError) -> Self { SplitterError::RecvError(err) } } struct Line { content: String, size: u32, } impl Line { fn new(content: String, size: usize) -> Self { Line { content: content, size: size as u32, } } } impl<'a> From<&'a Line> for &'a [u8] { fn from(line: &'a Line) -> &'a [u8] { line.content.as_bytes() } } impl AsRef<Line> for Line { fn as_ref(&self) -> &Line { &self } } struct Splitter { chunk_size: u32, read: PathBuf, write_dir: PathBuf, } impl Splitter { fn new(cfg: Config) -> Self { Splitter { chunk_size: cfg.size, read: cfg.target, write_dir: match cfg.dir { Some(d) => d, None => cfg.pwd, }, } } fn split(&self) -> Result<(), SplitterError> { let (sender, receiver) = channel::<Line>(); let target = fs::File::open(&self.read)?; let split_reader = SplitReader::new(target); let split_writer = SplitWriter::new(self); let _read_result: SplitterHandle = thread::spawn(move || Ok(split_reader.stream(sender)?)); Ok(split_writer.stream(receiver)?) } } struct SplitWriter<'s> { splitter: &'s Splitter, } impl<'s> SplitWriter<'s> { fn new(splitter: &'s Splitter) -> Self { SplitWriter { splitter } } fn stream(&self, receiver: Receiver<Line>) -> SplitterResult { if let Ok(mut line) = receiver.recv() { let mut progress = 0; let mut file_num = 1; fs::create_dir_all(&self.splitter.write_dir)?; let mut writer = new_writer(file_num, self.splitter)?; while line.size > 0 { progress += line.size; if progress > self.splitter.chunk_size { if line.size > self.splitter.chunk_size { return Err(SplitterError::Temp( "line size exceeds maximum allowed chunk size".to_owned(), )); } file_num += 1; progress = line.size; writer.flush()?; writer = new_writer(file_num, self.splitter)?; } writer.write_all(line.as_ref().into())?; line = receiver.recv()?; } } Ok(()) } } fn new_writer(file_num: i32, splitter: &Splitter) -> Result<BufWriter<File>, SplitterError> { if let Some(new_path) = derive_new_path(file_num, splitter) { let new_file = File::create(new_path)?; return Ok(BufWriter::new(new_file)); } Err(SplitterError::Temp("Invalid filename.".to_string())) } fn derive_new_path(file_num: i32, splitter: &Splitter) -> Option<PathBuf> { match splitter.read.file_name() { None => None, Some(oss) => match oss.to_str() { None => None, Some(s) => { let dir = PathBuf::from(&splitter.write_dir); Some(dir.join(format!("{}_{}", file_num, s))) } }, } } #[derive(Debug)] struct SplitReader { read: File, } impl SplitReader { fn new(read: File) -> Self { SplitReader { read } } fn stream(&self, send: Sender<Line>) -> SplitterResult { let mut reader = BufReader::new(&self.read); let mut first = String::new(); if let Ok(mut count) = reader.read_line(&mut first) { send.send(Line::new(first, count))?; while count > 0 { let mut subs = String::new(); count = reader.read_line(&mut subs)?; send.send(Line::new(subs, count))?; } } Ok(send.send(Line::new(String::new(), 0))?) } } #[cfg(test)] mod tests { use super::*; #[test] fn bytesize_fromstr_numeric_ok() { let input = "2000"; let ByteSize(size) = input.parse::<ByteSize>().unwrap(); assert_eq!(size, 2000); } #[test] fn bytesize_fromstr_kilo_ok() { let input = "2k"; let ByteSize(size) = input.parse::<ByteSize>().unwrap(); assert_eq!(size, 2000); } #[test] fn bytesize_fromstr_mega_ok() { let input = "2m"; let ByteSize(size) = input.parse::<ByteSize>().unwrap(); assert_eq!(size, 2_000_000); } #[test] fn bytesize_fromstr_invalid() { let input = "2km"; let size = input.parse::<ByteSize>(); assert!(size.is_err()); } }
extern crate clap; use clap::{App, Arg, ArgMatches}; use std::env; use std::error; use std::error::Error; use std::fmt; use std::fs; use std::fs::File; use std::io; use std::io::{BufRead, BufReader, BufWriter, Write}; use std::path::PathBuf; use std::str::FromStr; use std::sync::mpsc::{channel, Receiver, RecvError, SendError, Sender}; use std::thread; fn main() { let matches = App::new("By Line File Splitter") .version("0.1.0") .author("Cliff Spital <cspital@uw.edu>") .about("Splits a file on line ending, to chunks of specified size.") .arg( Arg::with_name("bytes") .value_name("bytes") .short("b") .long("bytes") .help("Specify the maximum size of a chunk in bytes, [k|m] may be appended to the end of this number to indicate [k]ilobytes or [m]egabytes.") .required(true) ).arg( Arg::with_name("file") .help("Specifies the file to split.") .required(true) .index(1), ).arg( Arg::with_name("dir") .help("Optionally specify the directory into which the files will be added.") .required(false) .index(2), ).get_matches(); let config = match Config::new(&matches) { Ok(c) => c, Err(e) => { println!("{}", e); return; } }; let splitter = Splitter::new(config); match splitter.split() { Ok(()) => return, Err(e) => println!("{}", e.description()), } } #[derive(Debug)] struct Config { size: u32, pwd: PathBuf, target: PathBuf, dir: Option<PathBuf>, } impl Config { fn new(matches: &ArgMatches) -> ConfigResult<Config> { let presize = matches.value_of("bytes").unwrap(); let size = Config::parse_size(presize)?; let pwd = env::current_dir()?; let target = PathBuf::from(matches.value_of("file").unwrap()); if !target.is_file() { return Err(ConfigError::StateError("target must be a file".to_owned())); } Ok(Config { size, pwd, target, dir: match matches.value_of("dir") { Some(s) => Some(PathBuf::from(s)), None => None, }, }) } #[inline] fn parse_size(arg: &str) -> ConfigResult<u32> { match arg.parse::<ByteSize>() { Ok(b) => { let ByteSize(s) = b; Ok(s) } Err(e) => Err(e), } } } type ConfigResult<T> = std::result::Result<T, ConfigError>; #[derive(Debug)] enum ConfigError { ByteSizeError(String), DirError(io::Error), StateError(String), } impl fmt::Display for ConfigError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { ConfigError::ByteSizeError(msg) => write!(f, "{}", msg), ConfigError::DirError(err) => err.fmt(f), ConfigError::StateError(msg) => write!(f, "{}", msg), } } } impl error::Error for ConfigError { fn description(&self) -> &str { match self { ConfigError::ByteSizeError(msg) => msg, ConfigError::DirError(err) => err.description(), ConfigError::StateError(msg) => msg, } } fn cause(&self) -> Option<&error::Error> { match self { ConfigError::ByteSizeError(_) => None, ConfigError::DirError(err) => Some(err), ConfigError::StateError(_) => None, } } } impl From<io::Error> for ConfigError { fn from(err: io::Error) -> Self { ConfigError::DirError(err) } } #[derive(Debug)] struct ByteSize(u32); impl FromStr for ByteSize { type Err = ConfigError; fn from_str(arg: &str) -> Result<Self, Self::Err> { match arg.parse::<u32>() { Ok(s) => Ok(ByteSize(s)), _ => { let pivot = arg.len() - 1; let prefix = &arg[..pivot]; match prefix.parse::<u32>() { Ok(s) => { let last = &arg[pivot..]; match last { "k" => Ok(ByteSize(s * 1_000)), "m" => Ok(ByteSize(s * 1_000_000)), _ => Err(ConfigError::ByteSizeError(format!( "{} is not a support size suffix", last ))), } } _ => Err(ConfigError::ByteSizeError(format!( "{} is not numeric, only k or m is a supported size suffix", prefix ))), } } } } } type SplitterResult = Result<(), SplitterError>; type SplitterHandle = thread::JoinHandle<SplitterResult>; #[derive(Debug)] enum SplitterError { IOError(io::Error), SendError(SendError<Line>), RecvError(RecvError), Temp(String), } impl fmt::Display for SplitterError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { SplitterError::IOError(e) => e.fmt(f), SplitterError::Temp(s) => write!(f, "{}", s), SplitterError::SendError(e) => e.fmt(f), SplitterError::RecvError(e) => e.fmt(f), } } } impl error::Error for SplitterError { fn description(&self) -> &str { match self { SplitterError::IOError(e) => e.description(), SplitterError::Temp(s) => s, SplitterError::SendError(e) => e.description(), SplitterError::RecvError(e) => e.description(), } } fn cause(&self) -> Option<&error::Error> { match self { SplitterError::IOError(e) => Some(e), SplitterError::Temp(_) => None, SplitterError::SendError(e) => Some(e), SplitterError::RecvError(e) => Some(e), } } } impl From<io::Error> for SplitterError { fn from(err: io::Error) -> Self { SplitterError::IOError(err) } } impl From<SendError<Line>> for SplitterError { fn from(err: SendError<Line>) -> Self { SplitterError::SendError(err) } } impl From<RecvError> for SplitterError { fn from(err: RecvError) -> Self { SplitterError::RecvError(err) } } struct Line { content: String, size: u32, } impl Line { fn new(content: String, size: usize) -> Self { Line { content: content, size: size as u32, } } } impl<'a> From<&'a Line> for &'a [u8] { fn from(line: &'a Line) -> &'a [u8] { line.content.as_bytes() } } impl AsRef<Line> for Line { fn as_ref(&self) -> &Line { &self } } struct Splitter { chunk_size: u32, read: PathBuf, write_dir: PathBuf, } impl Splitter { fn new(cfg: Config) -> Self { Splitter { chunk_size: cfg.size, read: cfg.target, write_dir: match cfg.dir { Some(d) => d, None => cfg.pwd, }, } } fn split(&self) -> Result<(), SplitterError> { let (sender, receiver) = channel::<Line>(); let target = fs::File::open(&self.read)?; let split_reader = SplitReader::new(target); let split_writer = SplitWriter::new(self); let _read_result: SplitterHandle = thread::spawn(move || Ok(split_reader.stream(sender)?)); Ok(split_writer.stream(receiver)?) } } struct SplitWriter<'s> { splitter: &'s Splitter, } impl<'s> SplitWriter<'s> { fn new(splitter: &'s Splitter) -> Self { SplitWriter { splitter } }
} fn new_writer(file_num: i32, splitter: &Splitter) -> Result<BufWriter<File>, SplitterError> { if let Some(new_path) = derive_new_path(file_num, splitter) { let new_file = File::create(new_path)?; return Ok(BufWriter::new(new_file)); } Err(SplitterError::Temp("Invalid filename.".to_string())) } fn derive_new_path(file_num: i32, splitter: &Splitter) -> Option<PathBuf> { match splitter.read.file_name() { None => None, Some(oss) => match oss.to_str() { None => None, Some(s) => { let dir = PathBuf::from(&splitter.write_dir); Some(dir.join(format!("{}_{}", file_num, s))) } }, } } #[derive(Debug)] struct SplitReader { read: File, } impl SplitReader { fn new(read: File) -> Self { SplitReader { read } } fn stream(&self, send: Sender<Line>) -> SplitterResult { let mut reader = BufReader::new(&self.read); let mut first = String::new(); if let Ok(mut count) = reader.read_line(&mut first) { send.send(Line::new(first, count))?; while count > 0 { let mut subs = String::new(); count = reader.read_line(&mut subs)?; send.send(Line::new(subs, count))?; } } Ok(send.send(Line::new(String::new(), 0))?) } } #[cfg(test)] mod tests { use super::*; #[test] fn bytesize_fromstr_numeric_ok() { let input = "2000"; let ByteSize(size) = input.parse::<ByteSize>().unwrap(); assert_eq!(size, 2000); } #[test] fn bytesize_fromstr_kilo_ok() { let input = "2k"; let ByteSize(size) = input.parse::<ByteSize>().unwrap(); assert_eq!(size, 2000); } #[test] fn bytesize_fromstr_mega_ok() { let input = "2m"; let ByteSize(size) = input.parse::<ByteSize>().unwrap(); assert_eq!(size, 2_000_000); } #[test] fn bytesize_fromstr_invalid() { let input = "2km"; let size = input.parse::<ByteSize>(); assert!(size.is_err()); } }
fn stream(&self, receiver: Receiver<Line>) -> SplitterResult { if let Ok(mut line) = receiver.recv() { let mut progress = 0; let mut file_num = 1; fs::create_dir_all(&self.splitter.write_dir)?; let mut writer = new_writer(file_num, self.splitter)?; while line.size > 0 { progress += line.size; if progress > self.splitter.chunk_size { if line.size > self.splitter.chunk_size { return Err(SplitterError::Temp( "line size exceeds maximum allowed chunk size".to_owned(), )); } file_num += 1; progress = line.size; writer.flush()?; writer = new_writer(file_num, self.splitter)?; } writer.write_all(line.as_ref().into())?; line = receiver.recv()?; } } Ok(()) }
function_block-full_function
[ { "content": "Split a file into byte sized chunks by line.\n\n\n", "file_path": "README.md", "rank": 24, "score": 11.377523898280417 } ]
Rust
clef/src/math/fraction.rs
dukguru/clef
edd54db5cd36ce41218453cd6c4d13e08da76310
use crate::math; use contracts::requires; use std::cmp::Ordering; use std::fmt; use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign}; #[derive(Clone, Copy, Debug, Eq, Ord)] pub struct Fraction { numerator: i32, denominator: i32, } impl Fraction { pub const ZERO: Fraction = Fraction { numerator: 0, denominator: 1, }; pub const ONE: Fraction = Fraction { numerator: 1, denominator: 1, }; pub const HALF: Fraction = Fraction { numerator: 1, denominator: 2, }; } impl Fraction { #[requires(denominator != 0, "denominator must not be zero")] pub fn new(numerator: i32, denominator: i32) -> Fraction { Self { numerator, denominator, } } pub fn numerator(&self) -> i32 { self.numerator } pub fn denominator(&self) -> i32 { self.denominator } pub fn signum(&self) -> i32 { self.numerator.signum() * self.denominator.signum() } pub fn to_irreducible(&self) -> Self { let gcd = math::gcd(self.numerator, self.denominator); Self { numerator: (self.numerator / gcd).abs() * self.signum(), denominator: (self.denominator / gcd).abs(), } } pub fn to_float(&self) -> f32 { self.numerator as f32 / self.denominator as f32 } } impl fmt::Display for Fraction { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}/{}", self.numerator, self.denominator) } } impl PartialEq for Fraction { fn eq(&self, other: &Self) -> bool { let l = self.to_irreducible(); let r = other.to_irreducible(); l.numerator == r.numerator && l.denominator == r.denominator } } impl PartialOrd for Fraction { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { self.to_float().partial_cmp(&other.to_float()) } } impl Add for Fraction { type Output = Self; fn add(self, other: Self) -> Self::Output { if self.denominator == other.denominator { Self::new(self.numerator + other.numerator, self.denominator).to_irreducible() } else { let numerator = self.numerator * other.denominator + other.numerator * self.denominator; let denominator = self.denominator * other.denominator; Self::new(numerator, denominator).to_irreducible() } } } impl Add<i32> for Fraction { type Output = Self; fn add(self, other: i32) -> Self::Output { self + Fraction::new(other, 1) } } impl AddAssign for Fraction { fn add_assign(&mut self, other: Self) { *self = *self + other; } } impl AddAssign<i32> for Fraction { fn add_assign(&mut self, other: i32) { *self = *self + other; } } impl Div for Fraction { type Output = Self; fn div(self, other: Self) -> Self::Output { Self::new( self.numerator * other.denominator, self.denominator * other.numerator, ) .to_irreducible() } } impl Div<i32> for Fraction { type Output = Self; fn div(self, other: i32) -> Self::Output { Self::new(self.numerator * other, self.denominator * other).to_irreducible() } } impl DivAssign for Fraction { fn div_assign(&mut self, other: Self) { *self = *self / other; } } impl DivAssign<i32> for Fraction { fn div_assign(&mut self, other: i32) { *self = *self / other; } } impl Mul for Fraction { type Output = Self; fn mul(self, other: Self) -> Self::Output { Self::new( self.numerator * other.numerator, self.denominator * other.denominator, ) .to_irreducible() } } impl Mul<i32> for Fraction { type Output = Self; fn mul(self, other: i32) -> Self::Output { Self::new(self.numerator * other, self.denominator).to_irreducible() } } impl MulAssign for Fraction { fn mul_assign(&mut self, other: Self) { *self = *self * other; } } impl MulAssign<i32> for Fraction { fn mul_assign(&mut self, other: i32) { *self = *self * other; } } impl Neg for Fraction { type Output = Self; fn neg(self) -> Self::Output { Self::new(self.numerator * -1, self.denominator).to_irreducible() } } impl Sub for Fraction { type Output = Self; fn sub(self, other: Self) -> Self::Output { self + -other } } impl Sub<i32> for Fraction { type Output = Self; fn sub(self, other: i32) -> Self::Output { self + -other } } impl SubAssign for Fraction { fn sub_assign(&mut self, other: Self) { *self += -other; } } impl SubAssign<i32> for Fraction { fn sub_assign(&mut self, other: i32) { *self += -other; } } #[cfg(test)] mod tests { use super::*; #[test] #[should_panic] fn test_zero_denominator() { let _illegal = Fraction::new(1, 0); } #[test] fn test_signum() { assert_eq!(Fraction::new(1, 2).signum(), 1); assert_eq!(Fraction::new(-1, 2).signum(), -1); assert_eq!(Fraction::new(1, -2).signum(), -1); assert_eq!(Fraction::new(-1, -2).signum(), 1); assert_eq!(Fraction::new(0, 2).signum(), 0); } #[test] fn test_to_irreducible() { assert_eq!(Fraction::new(3, 9).to_irreducible(), Fraction::new(1, 3)); assert_eq!(Fraction::new(27, 9).to_irreducible(), Fraction::new(3, 1)); assert_eq!( Fraction::new(11, 13).to_irreducible(), Fraction::new(11, 13) ); assert_eq!(Fraction::new(-3, 9).to_irreducible(), Fraction::new(-1, 3)); assert_eq!(Fraction::new(3, -9).to_irreducible(), Fraction::new(-1, 3)); assert_eq!(Fraction::new(-3, -9).to_irreducible(), Fraction::new(1, 3)); } #[test] fn test_to_float() { assert_eq!(0.25, Fraction::new(1, 4).to_float()); assert_eq!(-0.25, Fraction::new(-1, 4).to_float()); } #[test] fn test_op_eq() { assert!(Fraction::new(5, -10) == Fraction::new(-1, 2)); assert!(Fraction::new(5, 10) != Fraction::new(1, 3)); } #[test] fn test_op_ord() { assert!(Fraction::new(3, 5) < Fraction::new(4, 5)); assert!(Fraction::new(3, 5) <= Fraction::new(15, 25)); } #[test] fn test_op_add() { assert_eq!( Fraction::new(5, 10) + Fraction::new(5, 20), Fraction::new(3, 4) ); assert_eq!( Fraction::new(-5, 10) + Fraction::new(5, 20), Fraction::new(-1, 4) ); assert_eq!( Fraction::new(5, -10) + Fraction::new(5, 20), Fraction::new(-1, 4) ); assert_eq!( Fraction::new(5, 10) + Fraction::new(0, 20), Fraction::new(1, 2) ); assert_eq!(Fraction::ZERO + Fraction::new(3, 4), Fraction::new(3, 4)); assert_eq!(Fraction::new(3, 4) + Fraction::ZERO, Fraction::new(3, 4)); let mut a = Fraction::new(5, 10); let b = Fraction::new(5, 20); a += b; assert_eq!(a, Fraction::new(3, 4)); } #[test] fn test_op_sub() { assert_eq!( Fraction::new(5, 10) - Fraction::new(5, 20), Fraction::new(1, 4) ); assert_eq!( Fraction::new(5, 20) - Fraction::new(5, 10), Fraction::new(-1, 4) ); assert_eq!(Fraction::new(5, 20) - Fraction::new(5, 20), Fraction::ZERO); } #[test] fn test_op_neg() { assert_eq!(-Fraction::new(5, 10), Fraction::new(-1, 2)); assert_eq!(-Fraction::new(-5, 10), Fraction::new(1, 2)); assert_eq!(-Fraction::new(5, -10), Fraction::new(1, 2)); assert_eq!(-Fraction::new(-5, -10), Fraction::new(-1, 2)); } }
use crate::math; use contracts::requires; use std::cmp::Ordering; use std::fmt; use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign}; #[derive(Clone, Copy, Debug, Eq, Ord)] pub struct Fraction { numerator: i32, denominator: i32, } impl Fraction { pub const ZERO: Fraction = Fraction { numerator: 0, denominator: 1, }; pub const ONE: Fraction = Fraction { numerator: 1, denominator: 1, }; pub const HALF: Fraction = Fraction { numerator: 1, denominator: 2, }; } impl Fraction { #[requires(denominator != 0, "denominator must not be zero")] pub fn new(numerator: i32, denominator: i32) -> Fraction { Self { numerator, denominator, } } pub fn numerator(&self) -> i32 { self.numerator } pub fn denominator(&self) -> i32 { self.denominator } pub fn signum(&self) -> i32 { self.numerator.signum() * self.denominator.signum() } pub fn to_irreducible(&self) -> Self { let gcd = math::gcd(self.numerator, self.denominator); Self { numerator: (self.numerator / gcd).abs() * self.signum(), denominator: (self.denominator / gcd).abs(), } } pub fn to_float(&self) -> f32 { self.numerator as f32 / self.denominator as f32 } } impl fmt::Display for Fraction { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}/{}", self.numerator, self.denominator) } } impl PartialEq for Fraction { fn eq(&self, other: &Self) -> bool { let l = self.to_irreducible(); let r = other.to_irreducible(); l.numerator == r.numerator && l.denominator == r.denominator } } impl PartialOrd for Fraction { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { self.to_float().partial_cmp(&other.to_float()) } } impl Add for Fraction { type Output = Self; fn add(self, other: Self) -> Self::Output { if self.denominator == other.denominator { Self::new(self.numerator + other.numerator, self.denominator).to_irreducible() } else { let numerator = self.numerator * other.denominator + other.numerator * self.denominator; let denominator = self.denominator * other.denominator; Self::new(numerator, denominator).to_irreducible() } } } impl Add<i32> for Fraction { type Output = Self; fn add(self, other: i32) -> Self::Output { self + Fraction::new(other, 1) } } impl AddAssign for Fraction { fn add_assign(&mut self, other: Self) { *self = *self + other; } } impl AddAssign<i32> for Fraction { fn add_assign(&mut self, other: i32) { *self = *self + other; } } impl Div for Fraction { type Output = Self; fn div(self, other: Self) -> Self::Output { Self::new( self.numerator * other.denominator, self.denominator * other.numerator, ) .to_irreducible() } } impl Div<i32> for Fraction { type Output = Self; fn div(self, other: i32) -> Self::Output { Self::new(self.numerator * other, self.denominator * other).to_irreducible() } } impl DivAssign for Fraction { fn div_assign(&mut self, other: Self) { *self = *self / other; } } impl DivAssign<i32> for Fraction { fn div_assign(&mut self, other: i32) { *self = *self / other; } } impl Mul for Fraction { type Output = Self; fn mul(self, other: Self) -> Self::Output { Self::new( self.numerator * other.numerator, self.denominator * other.denominator, ) .to_irreducible() } } impl Mul<i32> for Fraction { type Output = Self; fn mul(self, other: i32) -> Self::Output { Self::new(self.numerator * other, self.denominator).to_irreducible() } } impl MulAssign for Fraction { fn mul_assign(&mut self, other: Self) { *self = *self * other; } } impl MulAssign<i32> for Fraction { fn mul_assign(&mut self, other: i32) { *self = *self * other; } } impl Neg for Fraction { type Output = Self; fn neg(self) -> Self::Output { Self::new(self.numerator * -1, self.denominator).to_irreducible() } } impl Sub for Fraction { type Output = Self; fn sub(self, other: Self) -> Self::Output { self + -other } } impl Sub<i32> for Fraction { type Output = Self; fn sub(self, other: i32) -> Self::Output { self + -other } } impl SubAssign for Fraction { fn sub_assign(&mut self, other: Self) { *self += -other; } } impl SubAssign<i32> for Fraction { fn sub_assign(&mut self, other: i32) { *self += -other; } } #[cfg(test)] mod tests { use super::*; #[test] #[should_panic] fn test_zero_denominator() { let _illegal = Fraction::new(1, 0); } #[test] fn test_signum() { assert_eq!(Fraction::new(1, 2).signum(), 1); assert_eq!(Fraction::new(-1, 2).signum(), -1); assert_eq!(Fraction::new(1, -2).signum(), -1); assert_eq!(Fraction::new(-1, -2).signum(), 1); assert_eq!(Fraction::new(0, 2).signum(), 0); } #[test] fn test_to_irreducible() { assert_eq!(Fraction::new(3, 9).to_irreducible(), Fraction::new(1, 3)); assert_eq!(Fraction::new(27, 9).to_irreducible(), Fraction::new(3, 1)); assert_eq!( Fraction::new(11, 13).to_irreducible(), Fraction::new(11, 13) ); assert_eq!(Fraction::new(-3, 9).to_irreducible(), Fraction::new(-1, 3)); assert_eq!(Fraction::new(3, -9).to_irreducible(), Fraction::new(-1, 3)); assert_eq!(Fraction::new(-3, -9).to_irreducible(), Fraction::new(1, 3)); } #[test] fn test_to_float() { assert_eq!(0.25, Fraction::new(1, 4).to_float()); assert_eq!(-0.25, Fraction::new(-1, 4).to_float()); } #[test] fn test_op_eq() { assert!(Fraction::new(5, -10) == Fraction::new(-1, 2)); assert!(Fraction::new(5, 10) != Fraction::new(1, 3)); } #[test] fn test_op_ord() { assert!(Fraction::new(3, 5) < Fraction::new(4, 5)); assert!(Fraction::new(3, 5) <= Fraction::new(15, 25)); } #[test] fn test_op_add() { assert_eq!( Fraction::new(5, 10) + Fraction::new(5, 20), Fraction::new(3, 4) ); assert_eq!( Fraction::new(-5, 10) + Fraction::new(5, 20), Fraction::new(-1, 4) ); assert_eq!( Fraction::new(5, -10) + Fraction::new(5, 20), Fraction::new(-1, 4) ); assert_eq!( Fraction::new(5, 10) + Fraction::new(0, 20), Fraction::new(1, 2) ); assert_eq!(Fraction::ZERO + Fraction::new(3, 4), Fraction::new(3, 4)); assert_eq!(Fraction::new(3, 4) + Fraction::ZERO, Fraction::new(3, 4)); let mut a = Fraction::new(5, 10); let b = Fraction::new(5, 20); a += b; assert_eq!(a, Fraction::new(3, 4)); } #[test] fn test_op_sub() { assert_eq!( Fraction::new(5, 10) - Fraction::new(5, 20), Fraction::new(1, 4) ); assert_eq!( Fraction::new(5, 20) - Fraction::new(5, 10), Fraction::new(-1, 4) ); assert_eq!(Fraction::new(5, 20) - Fraction::new(5, 20), Fraction::ZERO); } #[test] fn test_op_neg() { assert_eq!(-Fraction::new(5, 10), Fraction::new(-1, 2)); assert_eq!(-Fraction::new(-5, 10), Fraction::new(1,
}
2)); assert_eq!(-Fraction::new(5, -10), Fraction::new(1, 2)); assert_eq!(-Fraction::new(-5, -10), Fraction::new(-1, 2)); }
function_block-function_prefixed
[ { "content": "pub fn gcd(a: i32, b: i32) -> i32 {\n\n let mut a = a.abs();\n\n let mut b = b.abs();\n\n\n\n while a != 0 && b != 0 {\n\n if a > b {\n\n a %= b;\n\n } else {\n\n b %= a;\n\n }\n\n }\n\n\n\n cmp::max(a, b)\n\n}\n\n\n", "file_path": "cle...
Rust
src/arena.rs
scottjmaddox/rust-memory-arena
66dfdf6a683cd2d0066ab1742100ade8256d3a7b
use core::cell::Cell; use arena_box::ArenaBox; pub struct Arena { size: usize, used: Cell<usize>, mem: *mut u8, } impl Arena { pub fn new(size: usize, alignment: usize) -> Result<Self, ::alloc::AllocError> { if size == 0 { Ok(Self { size: size, used: Cell::new(0), mem: 1 as *mut u8, }) } else { unsafe { let mem = ::alloc::aligned_alloc(size, alignment)?; Ok(Self { size: size, used: Cell::new(0), mem: mem, }) } } } fn aligned_alloc(&self, size: usize, alignment: usize) -> Option<*mut u8> { assert!(alignment.count_ones() == 1); let unaligned_p = self.mem as usize + self.used.get(); let aligned_p = (unaligned_p + alignment - 1) & !(alignment - 1); let offset = aligned_p - unaligned_p; if self.used.get() + size + offset > self.size { return None; } self.used.set(self.used.get() + size + offset); Some(aligned_p as *mut u8) } fn alloc<T>(&self) -> Option<*mut T> { let size = ::core::mem::size_of::<T>(); if size == 0 { return Some(::core::mem::align_of::<T>() as *mut T); } let alignment = ::core::mem::align_of::<T>(); match self.aligned_alloc(size, alignment) { None => None, Some(p) => Some(p as *mut T), } } pub fn new_box<'a, T>(&'a self, x: T) -> Result<ArenaBox<'a, T>, T> { match self.alloc::<T>() { None => Err(x), Some(p) => { unsafe { ::core::ptr::write(p, x); } Ok(unsafe { ArenaBox::from_raw(p) }) } } } } impl Drop for Arena { fn drop(&mut self) { unsafe { ::alloc::free(self.mem); } } } #[cfg(test)] mod tests { #[allow(unused_imports)] use super::*; #[test] fn arena_box() { let alignment = 1024; let size = 1024; let a = Arena::new(size, alignment).unwrap(); let mut num = a.new_box(42).unwrap(); assert_eq!(*num, 42); *num += 1; assert_eq!(*num, 43); } #[test] fn arena_out_of_memory() { let alignment = 512; let size = 1; let a = Arena::new(size, alignment).unwrap(); let i: usize = 42; assert_eq!(a.new_box(i), Err(42)); } #[test] fn arena_aligned_alloc() { let a = Arena::new(1024, 1024).unwrap(); let p1 = a.aligned_alloc(1, 1).unwrap(); let p2 = a.aligned_alloc(1, 4).unwrap(); let p3 = a.aligned_alloc(1, 8).unwrap(); let p4 = a.aligned_alloc(1, 512).unwrap(); assert!(((p1 as usize) % 1024) == 0); assert!(((p2 as usize) % 4) == 0); assert!(((p3 as usize) % 8) == 0); assert!(((p4 as usize) % 512) == 0); } #[test] #[should_panic] fn arena_invalid_alignment() { let _ = Arena::new(1024, 1025).unwrap(); } #[test] #[should_panic] fn arena_aligned_alloc_invalid_alignment() { let a = Arena::new(1024, 1024).unwrap(); let _ = a.aligned_alloc(1, 3).unwrap(); } }
use core::cell::Cell; use arena_box::ArenaBox; pub struct Arena { size: usize, used: Cell<usize>, mem: *mut u8, } impl Arena { pub fn new(size: usize, alignment: usize) -> Result<Self, ::alloc::AllocError> { if size == 0 { Ok(Self { size: size, used: Cell::new(0), mem: 1 as *mut u8, }) } else { unsafe { let mem = ::alloc::aligned_alloc(size, alignment)?; Ok(Self { size: size, used: Cell::new(0), mem: mem, }) } } } fn aligned_alloc(&self, size: usize, alignment: usize) -> Option<*mut u8> {
fn alloc<T>(&self) -> Option<*mut T> { let size = ::core::mem::size_of::<T>(); if size == 0 { return Some(::core::mem::align_of::<T>() as *mut T); } let alignment = ::core::mem::align_of::<T>(); match self.aligned_alloc(size, alignment) { None => None, Some(p) => Some(p as *mut T), } } pub fn new_box<'a, T>(&'a self, x: T) -> Result<ArenaBox<'a, T>, T> { match self.alloc::<T>() { None => Err(x), Some(p) => { unsafe { ::core::ptr::write(p, x); } Ok(unsafe { ArenaBox::from_raw(p) }) } } } } impl Drop for Arena { fn drop(&mut self) { unsafe { ::alloc::free(self.mem); } } } #[cfg(test)] mod tests { #[allow(unused_imports)] use super::*; #[test] fn arena_box() { let alignment = 1024; let size = 1024; let a = Arena::new(size, alignment).unwrap(); let mut num = a.new_box(42).unwrap(); assert_eq!(*num, 42); *num += 1; assert_eq!(*num, 43); } #[test] fn arena_out_of_memory() { let alignment = 512; let size = 1; let a = Arena::new(size, alignment).unwrap(); let i: usize = 42; assert_eq!(a.new_box(i), Err(42)); } #[test] fn arena_aligned_alloc() { let a = Arena::new(1024, 1024).unwrap(); let p1 = a.aligned_alloc(1, 1).unwrap(); let p2 = a.aligned_alloc(1, 4).unwrap(); let p3 = a.aligned_alloc(1, 8).unwrap(); let p4 = a.aligned_alloc(1, 512).unwrap(); assert!(((p1 as usize) % 1024) == 0); assert!(((p2 as usize) % 4) == 0); assert!(((p3 as usize) % 8) == 0); assert!(((p4 as usize) % 512) == 0); } #[test] #[should_panic] fn arena_invalid_alignment() { let _ = Arena::new(1024, 1025).unwrap(); } #[test] #[should_panic] fn arena_aligned_alloc_invalid_alignment() { let a = Arena::new(1024, 1024).unwrap(); let _ = a.aligned_alloc(1, 3).unwrap(); } }
assert!(alignment.count_ones() == 1); let unaligned_p = self.mem as usize + self.used.get(); let aligned_p = (unaligned_p + alignment - 1) & !(alignment - 1); let offset = aligned_p - unaligned_p; if self.used.get() + size + offset > self.size { return None; } self.used.set(self.used.get() + size + offset); Some(aligned_p as *mut u8) }
function_block-function_prefix_line
[ { "content": "/// Types that can be \"unsized\" to a dynamically-sized type.\n\n///\n\n/// For example, the sized array type `[i8; 2]` implements `Unsize<[i8]>` and\n\n/// `Unsize<fmt::Debug>`.\n\n///\n\n/// All implementations of `Unsize` are provided automatically by the compiler.\n\n///\n\n/// `Unsize` is im...
Rust
src/bin/server.rs
srgsrg/machiavelli
ebbaae04e4867a123c652f5cad6292a176034a06
use std::process; use std::fs::File; use std::thread; use std::env; use rand::{ thread_rng, Rng }; use machiavelli::lib_server::*; const SAVE_EXTENSION: &str = ".sav"; fn get_port() -> usize { println!("Which port should I use?"); loop { match get_input() { Ok(s) => match s.trim().parse::<usize>() { Ok(p)=> return p, Err(_) => println!("Could not parse the input") } Err(_) => println!("Could not parse the input") } } } fn main() { let mut args = env::args(); args.next(); print!("\x1b[2J\x1b[1;1H"); println!("Machiavelli server\n"); let name_file_port_server = "Config/port_server.dat"; let port = match std::fs::read_to_string(name_file_port_server) { Ok(s) => match s.trim().parse::<usize>() { Ok(n) => n, Err(_) => get_port() } Err(_) => get_port() }; let load: bool; let load_from_command_line: bool; match args.next() { Some(s) => { load_from_command_line = true; match s.trim().parse::<u8>() { Ok(1) => { println!("Loading a previous game"); load = true; }, Ok(121) => { println!("Loading a previous game"); load = true; }, _ => load = false }; } None => { load_from_command_line = false; println!("Load a previous game? (y/n)"); load = match get_input().unwrap().trim() { "y" => true, _ => false }; } }; let mut config = Config { n_decks: 0, n_jokers: 0, n_cards_to_start: 0, custom_rule_jokers: false, n_players: 0 }; let mut savefile = "machiavelli_save".to_string(); if !load { match get_config_from_file(&"Config/config.dat") { Ok(conf) => { config = conf.0; savefile = conf.1; }, Err(_) => { println!("Could not read the config from the file!"); match get_config_and_savefile() { Ok(conf) => { config = conf.0; savefile = conf.1; }, Err(_) => { println!("Invalid input!"); process::exit(1); } } } }; } let mut starting_player: u8; let mut table = Table::new(); let mut deck: Sequence; let mut hands: Vec<Sequence>; let mut player: usize; let mut player_names = Vec::<String>::new(); let mut rng = thread_rng(); if load { let mut fname = String::new(); let mut bytes = Vec::<u8>::new(); if load_from_command_line { match args.next() { Some(s) => fname = s, None => fname = savefile.clone() + SAVE_EXTENSION }; } loop { if fname.len() == 0 { println!("Name of the save file (nothing for the default file):"); match stdin().read_line(&mut fname) { Ok(_) => (), Err(_) => { println!("Could not read the input"); continue; } }; } fname = fname.trim().to_string(); if fname.len() == 0 { fname = savefile.clone() + SAVE_EXTENSION; } let mut file: File; match File::open(fname.clone()) { Ok(f) => file = f, Err(_) => { println!("Could not open the file!"); fname.clear(); continue; } }; match file.read_to_end(&mut bytes) { Ok(_) => (), Err(_) => { println!("Could not read from the file!"); bytes.clear(); fname.clear(); continue; } }; bytes = encode::xor(&bytes, &fname.as_bytes()); match load_game(&bytes) { Ok(lg) => { config = lg.0; starting_player = lg.1; player = lg.2 as usize; table = lg.3; hands = lg.4; deck = lg.5; player_names = lg.6; }, Err(_) => { println!("Error loading the save file!"); bytes.clear(); fname.clear(); continue; } }; break; } } else { deck = Sequence::multi_deck(config.n_decks, config.n_jokers, &mut rng); starting_player = rng.gen_range(0..config.n_players); player = starting_player as usize; hands = vec![Sequence::new(); config.n_players as usize]; for i in 0..config.n_players { for _ in 0..config.n_cards_to_start { hands[i as usize].add_card(deck.draw_card().unwrap()); } } } let mut n_clients: u8 = 0; let mut client_threads = Vec::<thread::JoinHandle<(TcpStream, String, usize)>>::new(); let mut client_streams = Vec::<TcpStream>::new(); { let listener = TcpListener::bind(format!("0.0.0.0:{}", port)).unwrap(); let names_taken = Arc::new(Mutex::new(Vec::<String>::new())); println!("\nserver listening to port {}", port); for stream_res in listener.incoming() { match stream_res { Ok(stream) => { n_clients += 1; println!("New connection: {} (player {})", stream.peer_addr().unwrap(), n_clients); if load { let player_names_ = player_names.clone(); let arc = names_taken.clone(); client_threads.push(thread::spawn(move || { handle_client_load(stream, &player_names_, arc).unwrap() })); } else { client_threads.push(thread::spawn(move || {handle_client(stream).unwrap()})); } }, Err(e) => { println!("Error: {}", e); } } if n_clients == config.n_players { break; } } if load { for _i in 0..config.n_players { client_streams.push(TcpStream::connect(format!("0.0.0.0:{}", port)).unwrap()); } for thread in client_threads { let output = thread.join().unwrap(); client_streams[output.2] = output.0; } } else { for thread in client_threads { let output = thread.join().unwrap(); client_streams.push(output.0); player_names.push(output.1); } ensure_names_are_different(&mut player_names, &mut client_streams).unwrap(); } } let save_name = &(savefile.clone() + SAVE_EXTENSION); let backup_name = &(savefile.clone() + &"_bak" + SAVE_EXTENSION); let mut sort_modes: Vec<u8> = vec![0; config.n_players as usize]; let mut play_again = true; let mut previous_messages: Vec<Option<String>> = vec![None; config.n_players as usize]; while play_again { loop { if deck.number_cards() == 0 { send_message_all_players(&mut client_streams, &"\n\x1b[1mNo more cards in the deck—it's a draw!\x1b[0m\n"); break; } let mut bytes = game_to_bytes(starting_player as u8, player as u8, &table, &hands, &deck, &config, &player_names); bytes = encode::xor(&bytes, save_name.as_bytes()); match File::create(save_name) { Ok(mut f) => match f.write_all(&bytes) { Ok(_) => (), Err(_) => { println!("Could not write to the save file!"); } }, Err(_) => { println!("Could not create the save file!"); } }; match std::fs::copy(&save_name, &backup_name) { Ok(_) => (), Err(_) => println!("Could not create the backup file!") }; clear_and_send_message_all_players(&mut client_streams, &format!("\x1b[1m{}'s turn:{}", &player_names[player], &reset_style_string())); let mut string_n_cards = format!("\nNumber of cards ({} remaining in the deck):", deck.number_cards()); for i in 0..(config.n_players as usize) { string_n_cards += &format!("\n {}: {}", &player_names[i], &hands[i].number_cards()); } string_n_cards += "\n"; for i in 0..(config.n_players as usize) { loop { match send_message_to_client(&mut client_streams[i], &format!("{}{}", &string_n_cards, &situation_to_string(&table, &hands[i], &Sequence::new())) ) { Ok(_) => break, Err(_) => { send_message_all_players( &mut client_streams, &format!("{} seems to have disconnected... Waiting for them to reconnect.\n", &player_names[i]) ); println!("Lost connection with player {}", i + 1); wait_for_reconnection(&mut client_streams[i], &player_names[i], port).unwrap(); println!("Player {} is back", i + 1); send_message_all_players( &mut client_streams, &format!("{} is back!\n", &player_names[i]) ); } }; } if let Some(s) = &previous_messages[i] { send_message_to_client(&mut client_streams[i], &format!("\n{}", s)).unwrap(); }; } previous_messages[player] = match start_player_turn(&mut table, &mut hands, &mut deck, config.custom_rule_jokers, &player_names, player, config.n_players as usize, &mut client_streams, port, &mut sort_modes[player], &previous_messages) { Ok(o_m) => o_m, Err(err) => { println!("{}", err); process::exit(1); } }; if hands[player].number_cards() == 0 { send_message_all_players(&mut client_streams, &format!("\n\u{0007}\u{0007}\u{0007}\x1b[1m{} wins! Congratulations!\x1b[0m{}\n\n", player_names[player], &reset_style_string()) ); break; } player += 1; if player >= config.n_players as usize { player = 0; } } send_message_all_players(&mut client_streams, &"Play again? (‘y’ for yes)\n".to_string()); for stream in &mut client_streams { let reply = match get_string_from_client(stream) { Ok(s) => s, Err(_) => "y".to_string() }; if !is_yes(reply.trim()) { play_again = false; match stream.write(&mut [5]) { Ok(_) => {}, Err(_) => println!("Could not send the exit signal") }; } } if play_again { deck = Sequence::multi_deck(config.n_decks, config.n_jokers, &mut rng); hands = vec![Sequence::new(); config.n_players as usize]; table = Table::new(); for i in 0..config.n_players { for _ in 0..config.n_cards_to_start { hands[i as usize].add_card(deck.draw_card().unwrap()); } } starting_player += 1; if starting_player >= config.n_players { starting_player = 0; } player = starting_player as usize; } } for i in 0..config.n_players as usize { match client_streams[i].write(&mut [5]) { Ok(_) => {}, Err(_) => println!("Could not send the exit signal to client {}", i) }; } }
use std::process; use std::fs::File; use std::thread; use std::env; use rand::{ thread_rng, Rng }; use machiavelli::lib_server::*; const SAVE_EXTENSION: &str = ".sav"; fn get_port() -> usize { println!("Which port should I use?"); loop { match get_input() { Ok(s) => match s.trim().parse::<usize>() { Ok(p)=> return p, Err(_) => println!("Could not parse the input") } Err(_) => println!("Could not parse the input") } } } fn main() { let mut args = env::args(); args.next(); print!("\x1b[2J\x1b[1;1H"); println!("Machiavelli server\n"); let name_file_port_server = "Config/port_server.dat"; let port = match std::fs::read_to_string(name_file_port_server) { Ok(s) => match s.trim().parse::<usize>() { Ok(n) => n, Err(_) => get_port() } Err(_) => get_port() }; let load: bool; let load_from_command_line: bool; match args.next() { Some(s) => { load_from_command_line = true; match s.trim().parse::<u8>() { Ok(1) => { println!("Loading a previous game"); load = true; }, Ok(121) => { println!("Loading a previous game"); load = true; }, _ => load = false }; } None => { load_from_command_line = false; println!("Load a previous game? (y/n)"); load = match get_input().unwrap().trim() { "y" => true, _ => false }; } };
let mut savefile = "machiavelli_save".to_string(); if !load { match get_config_from_file(&"Config/config.dat") { Ok(conf) => { config = conf.0; savefile = conf.1; }, Err(_) => { println!("Could not read the config from the file!"); match get_config_and_savefile() { Ok(conf) => { config = conf.0; savefile = conf.1; }, Err(_) => { println!("Invalid input!"); process::exit(1); } } } }; } let mut starting_player: u8; let mut table = Table::new(); let mut deck: Sequence; let mut hands: Vec<Sequence>; let mut player: usize; let mut player_names = Vec::<String>::new(); let mut rng = thread_rng(); if load { let mut fname = String::new(); let mut bytes = Vec::<u8>::new(); if load_from_command_line { match args.next() { Some(s) => fname = s, None => fname = savefile.clone() + SAVE_EXTENSION }; } loop { if fname.len() == 0 { println!("Name of the save file (nothing for the default file):"); match stdin().read_line(&mut fname) { Ok(_) => (), Err(_) => { println!("Could not read the input"); continue; } }; } fname = fname.trim().to_string(); if fname.len() == 0 { fname = savefile.clone() + SAVE_EXTENSION; } let mut file: File; match File::open(fname.clone()) { Ok(f) => file = f, Err(_) => { println!("Could not open the file!"); fname.clear(); continue; } }; match file.read_to_end(&mut bytes) { Ok(_) => (), Err(_) => { println!("Could not read from the file!"); bytes.clear(); fname.clear(); continue; } }; bytes = encode::xor(&bytes, &fname.as_bytes()); match load_game(&bytes) { Ok(lg) => { config = lg.0; starting_player = lg.1; player = lg.2 as usize; table = lg.3; hands = lg.4; deck = lg.5; player_names = lg.6; }, Err(_) => { println!("Error loading the save file!"); bytes.clear(); fname.clear(); continue; } }; break; } } else { deck = Sequence::multi_deck(config.n_decks, config.n_jokers, &mut rng); starting_player = rng.gen_range(0..config.n_players); player = starting_player as usize; hands = vec![Sequence::new(); config.n_players as usize]; for i in 0..config.n_players { for _ in 0..config.n_cards_to_start { hands[i as usize].add_card(deck.draw_card().unwrap()); } } } let mut n_clients: u8 = 0; let mut client_threads = Vec::<thread::JoinHandle<(TcpStream, String, usize)>>::new(); let mut client_streams = Vec::<TcpStream>::new(); { let listener = TcpListener::bind(format!("0.0.0.0:{}", port)).unwrap(); let names_taken = Arc::new(Mutex::new(Vec::<String>::new())); println!("\nserver listening to port {}", port); for stream_res in listener.incoming() { match stream_res { Ok(stream) => { n_clients += 1; println!("New connection: {} (player {})", stream.peer_addr().unwrap(), n_clients); if load { let player_names_ = player_names.clone(); let arc = names_taken.clone(); client_threads.push(thread::spawn(move || { handle_client_load(stream, &player_names_, arc).unwrap() })); } else { client_threads.push(thread::spawn(move || {handle_client(stream).unwrap()})); } }, Err(e) => { println!("Error: {}", e); } } if n_clients == config.n_players { break; } } if load { for _i in 0..config.n_players { client_streams.push(TcpStream::connect(format!("0.0.0.0:{}", port)).unwrap()); } for thread in client_threads { let output = thread.join().unwrap(); client_streams[output.2] = output.0; } } else { for thread in client_threads { let output = thread.join().unwrap(); client_streams.push(output.0); player_names.push(output.1); } ensure_names_are_different(&mut player_names, &mut client_streams).unwrap(); } } let save_name = &(savefile.clone() + SAVE_EXTENSION); let backup_name = &(savefile.clone() + &"_bak" + SAVE_EXTENSION); let mut sort_modes: Vec<u8> = vec![0; config.n_players as usize]; let mut play_again = true; let mut previous_messages: Vec<Option<String>> = vec![None; config.n_players as usize]; while play_again { loop { if deck.number_cards() == 0 { send_message_all_players(&mut client_streams, &"\n\x1b[1mNo more cards in the deck—it's a draw!\x1b[0m\n"); break; } let mut bytes = game_to_bytes(starting_player as u8, player as u8, &table, &hands, &deck, &config, &player_names); bytes = encode::xor(&bytes, save_name.as_bytes()); match File::create(save_name) { Ok(mut f) => match f.write_all(&bytes) { Ok(_) => (), Err(_) => { println!("Could not write to the save file!"); } }, Err(_) => { println!("Could not create the save file!"); } }; match std::fs::copy(&save_name, &backup_name) { Ok(_) => (), Err(_) => println!("Could not create the backup file!") }; clear_and_send_message_all_players(&mut client_streams, &format!("\x1b[1m{}'s turn:{}", &player_names[player], &reset_style_string())); let mut string_n_cards = format!("\nNumber of cards ({} remaining in the deck):", deck.number_cards()); for i in 0..(config.n_players as usize) { string_n_cards += &format!("\n {}: {}", &player_names[i], &hands[i].number_cards()); } string_n_cards += "\n"; for i in 0..(config.n_players as usize) { loop { match send_message_to_client(&mut client_streams[i], &format!("{}{}", &string_n_cards, &situation_to_string(&table, &hands[i], &Sequence::new())) ) { Ok(_) => break, Err(_) => { send_message_all_players( &mut client_streams, &format!("{} seems to have disconnected... Waiting for them to reconnect.\n", &player_names[i]) ); println!("Lost connection with player {}", i + 1); wait_for_reconnection(&mut client_streams[i], &player_names[i], port).unwrap(); println!("Player {} is back", i + 1); send_message_all_players( &mut client_streams, &format!("{} is back!\n", &player_names[i]) ); } }; } if let Some(s) = &previous_messages[i] { send_message_to_client(&mut client_streams[i], &format!("\n{}", s)).unwrap(); }; } previous_messages[player] = match start_player_turn(&mut table, &mut hands, &mut deck, config.custom_rule_jokers, &player_names, player, config.n_players as usize, &mut client_streams, port, &mut sort_modes[player], &previous_messages) { Ok(o_m) => o_m, Err(err) => { println!("{}", err); process::exit(1); } }; if hands[player].number_cards() == 0 { send_message_all_players(&mut client_streams, &format!("\n\u{0007}\u{0007}\u{0007}\x1b[1m{} wins! Congratulations!\x1b[0m{}\n\n", player_names[player], &reset_style_string()) ); break; } player += 1; if player >= config.n_players as usize { player = 0; } } send_message_all_players(&mut client_streams, &"Play again? (‘y’ for yes)\n".to_string()); for stream in &mut client_streams { let reply = match get_string_from_client(stream) { Ok(s) => s, Err(_) => "y".to_string() }; if !is_yes(reply.trim()) { play_again = false; match stream.write(&mut [5]) { Ok(_) => {}, Err(_) => println!("Could not send the exit signal") }; } } if play_again { deck = Sequence::multi_deck(config.n_decks, config.n_jokers, &mut rng); hands = vec![Sequence::new(); config.n_players as usize]; table = Table::new(); for i in 0..config.n_players { for _ in 0..config.n_cards_to_start { hands[i as usize].add_card(deck.draw_card().unwrap()); } } starting_player += 1; if starting_player >= config.n_players { starting_player = 0; } player = starting_player as usize; } } for i in 0..config.n_players as usize { match client_streams[i].write(&mut [5]) { Ok(_) => {}, Err(_) => println!("Could not send the exit signal to client {}", i) }; } }
let mut config = Config { n_decks: 0, n_jokers: 0, n_cards_to_start: 0, custom_rule_jokers: false, n_players: 0 };
assignment_statement
[ { "content": "/// wait for a player to reconnect\n\npub fn wait_for_reconnection(stream: &mut TcpStream, name: &str, port: usize) \n\n -> Result<(), StreamError>\n\n{\n\n\n\n // wait for a connection\n\n\n\n // set-up the tcp listener\n\n let listener = TcpListener::bind(format!(\"0.0.0.0:{}\", port...
Rust
src/lib.rs
tstellanova/ist8310
851f9767759073520eb538fe6987e51c12b53c1b
/* Copyright (c) 2020 Todd Stellanova LICENSE: BSD3 (see LICENSE file) */ #![no_std] use embedded_hal as hal; use hal::blocking::delay::DelayMs; #[derive(Debug)] pub enum Error<CommE> { Comm(CommE), OutOfRange, Configuration, UnknownChipId, } pub const ADDR_0_0_7BIT:u8 = 0x0C; pub const ADDR_0_1_7BIT:u8 = 0x0D; pub const ADDR_1_0_7BIT:u8 = 0x0E; pub const ADDR_1_1_7BIT:u8 = 0x0F; pub const ADDR_7BIT_DEFAULT:u8 = 0x0E; pub const ADDR_0_0_8BIT:u8 = 0x18; pub const ADDR_0_1_8BIT:u8 = 0x1A; pub const ADDR_1_0_8BIT:u8 = 0x1C; pub const ADDR_1_1_8BIT:u8 = 0x1E; pub const ADDR_8BIT_DEFAULT:u8 = 0x1C; pub const DEFAULT_ADDRESS:u8 = ADDR_7BIT_DEFAULT; pub const REG_WAI:u8 = 0x00; const REG_DATA_X:u8 = 0x03; const REG_MAG_DATA_START:u8 = REG_DATA_X; const REG_CTRL1: u8 = 0x0A; pub const REG_CTRL2: u8 = 0x0B; pub const REG_AVG_CTRL:u8 = 0x41; pub const REG_SENS_MODE_SELECT:u8 = 0x42; const AVG_CTRL_16X: u8 = 0x24; const SRPD_MODE_LOW_POWER: u8 = 0xC0; const BLOCK_BUF_LEN: usize = 32; pub struct IST8310<I2C> { i2c_port: I2C, address: u8, block_buf: [u8; BLOCK_BUF_LEN], avg_ctrl_reg_set: u8, srpd_ctrl_reg_set: u8, } impl<I2C, CommE> IST8310<I2C> where I2C: hal::blocking::i2c::Write<Error = CommE> + hal::blocking::i2c::Read<Error = CommE> + hal::blocking::i2c::WriteRead<Error = CommE>, { pub fn default(i2c: I2C) -> Result<Self, Error<CommE>> { Self::new(i2c, DEFAULT_ADDRESS) } pub fn new(i2c_port: I2C, address: u8) -> Result<Self, Error<CommE>> { let mut inst = Self { i2c_port, address, block_buf: [0; BLOCK_BUF_LEN], avg_ctrl_reg_set: 0, srpd_ctrl_reg_set: 0, }; inst.reset()?; Ok(inst) } fn reset(&mut self) -> Result<(), Error<CommE>> { const SRST_POR_FLAG: u8 = 0x01 << 0; const EXPECTED_PROD_ID:u8 = 0x10; self.write_reg(REG_CTRL2, SRST_POR_FLAG)?; self.avg_ctrl_reg_set = AVG_CTRL_16X; self.write_reg(REG_AVG_CTRL, self.avg_ctrl_reg_set)?; self.srpd_ctrl_reg_set = SRPD_MODE_LOW_POWER; self.write_reg(REG_SENS_MODE_SELECT, self.srpd_ctrl_reg_set)?; let product_id = self.read_reg(REG_WAI)?; if product_id != EXPECTED_PROD_ID { return Err(Error::UnknownChipId) } Ok(()) } fn read_block(&mut self, reg: u8, recv_count: usize) -> Result<(), Error<CommE>> { let cmd_buf = [reg]; self.i2c_port .write_read(self.address, &cmd_buf, &mut self.block_buf[..recv_count]) .map_err(Error::Comm)?; Ok(()) } fn read_reg(&mut self, reg: u8 ) -> Result<u8, Error<CommE>> { self.read_block(reg,1)?; Ok(self.block_buf[0]) } fn write_reg(&mut self, reg: u8, val: u8) -> Result<(), Error<CommE>> { self.block_buf[0] = reg; self.block_buf[1] = val; self.i2c_port .write(self.address, &self.block_buf[..2]) .map_err(Error::Comm)?; Ok(()) } fn reading_in_range(sample: &[i16; 3]) -> bool { const MDR_XY_AXES: i16 = 1600; const MDR_Z_AXIS: i16 = 2500; const RESO_PER_BIT: f32 = 0.3; const MAX_VAL_XY: i16 = (((MDR_XY_AXES as f32) / RESO_PER_BIT) as i16) + 1; const MAX_VAL_Z: i16 = (((MDR_Z_AXIS as f32) / RESO_PER_BIT) as i16) + 1; sample[0].abs() < MAX_VAL_XY && sample[1].abs() < MAX_VAL_XY && sample[2].abs() < MAX_VAL_Z } fn raw_reading_to_i16(buf: &[u8], idx: usize) -> i16 { let val: i16 = (buf[idx] as i16) | ((buf[idx+1] as i16) << 8) ; val } pub fn get_mag_vector(&mut self, delay_source: &mut impl DelayMs<u8>) -> Result<[i16; 3], Error<CommE>> { const SINGLE_MEASURE_MODE: u8 = 0x01; const XYZ_DATA_LEN: usize = 6; self.write_reg(REG_CTRL1, SINGLE_MEASURE_MODE)?; delay_source.delay_ms(6); self.read_block(REG_MAG_DATA_START, XYZ_DATA_LEN)?; let sample_i16 = [ Self::raw_reading_to_i16(&self.block_buf, 0), Self::raw_reading_to_i16(&self.block_buf, 2), Self::raw_reading_to_i16(&self.block_buf, 4) ]; if !Self::reading_in_range(&sample_i16) { return Err(Error::OutOfRange) } Ok(sample_i16) } }
/* Copyright (c) 2020 Todd Stellanova LICENSE: BSD3 (see LICENSE file) */ #![no_std] use embedded_hal as hal; use hal::blocking::delay::DelayMs; #[derive(Debug)] pub enum Error<CommE> { Comm(CommE), OutOfRange, Configuration, UnknownChipId, } pub const ADDR_0_0_7BIT:u8 = 0x0C; pub const ADDR_0_1_7BIT:u8 = 0x0D; pub const ADDR_1_0_7BIT:u8 = 0x0E; pub const ADDR_1_1_7BIT:u8 = 0x0F; pub const ADDR_7BIT_DEFAULT:u8 = 0x0E; pub const ADDR_0_0_8BIT:u8 = 0x18; pub const ADDR_0_1_8BIT:u8 = 0x1A; pub const ADDR_1_0_8BIT:u8 = 0x1C; pub const ADDR_1_1_8BIT:u8 = 0x1E; pub const ADDR_8BIT_DEFAULT:u8 = 0x1C; pub const DEFAULT_ADDRESS:u8 = ADDR_7BIT_DEFAULT; pub const REG_WAI:u8 = 0x00; const REG_DATA_X:u8 = 0x03; const REG_MAG_DATA_START:u8 = REG_DATA_X; const REG_CTRL1: u8 = 0x0A; pub const REG_CTRL2: u8 = 0x0B; pub const REG_AVG_CTRL:u8 = 0x41; pub const REG_SENS_MODE_SELECT:u8 = 0x42; const AVG_CTRL_16X: u8 = 0x24; const SRPD_MODE_LOW_POWER: u8 = 0xC0; const BLOCK_BUF_LEN: usize = 32; pub struct IST8310<I2C> { i2c_port: I2C, address: u8, block_buf: [u8; BLOCK_BUF_LEN], avg_ctrl_reg_set: u8, srpd_ctrl_reg_set: u8, } impl<I2C, CommE> IST8310<I2C> where I2C: hal::blocking::i2c::Write<Error = CommE> + hal::blocking::i2c::Read<Error = CommE> + hal::blocking::i2c::WriteRead<Error = CommE>, { pub fn default(i2c: I2C) -> Result<Self, Error<CommE>> { Self::new(i2c, DEFAULT_ADDRESS) } pub fn new(i2c_port: I2C, address: u8) -> Result<Self, Error<CommE>> { let mut inst = Self { i2c_port, address, block_buf: [0; BLOCK_BUF_LEN], avg_ctrl_reg_set: 0, srpd_ctrl_reg_set: 0, }; inst.reset()?; Ok(inst) } fn reset(&mut self) -> Result<(), Error<CommE>> { const SRST_POR_FLAG: u8 = 0x01 << 0; const EXPECTED_PROD_ID:u8 = 0x10; self.write_reg(REG_CTRL2, SRST_POR_FLAG)?; self.avg_ctrl_reg_set = AVG_CTRL_16X; self.write_reg(REG_AVG_CTRL, self.avg_ctrl_reg_set)?; self.srpd_ctrl_reg_set = SRPD_MODE_LOW_POWER; self.write_reg(REG_SENS_MODE_SELECT, self.srpd_ctrl_reg_set)?; let product_id = self.read_reg(REG_WAI)?; if product_id != EXPECTED_PROD_ID { return Err(Error::UnknownChipId) } Ok(()) } fn read_block(&mut self, reg: u8, recv_count: usize) -> Result<(), Error<CommE>> { let cmd_buf = [reg]; self.i2c_port .write_read(self.address, &cmd_buf, &mut self.block_buf[..recv_count]) .map_err(Error::Comm)?; Ok(()) } fn read_reg(&mut self, reg: u8 ) -> Result<u8, Error<CommE>> { self.read_block(reg,1)?; Ok(self.block_buf[0]) } fn write_reg(&mut self, reg: u8, val: u8) -> Result<(), Error<CommE>> { self.block_buf[0] = reg; self.block_buf[1] = val; self.i2c_port .write(self.address, &self.block_buf[..2]) .map_err(Error::Comm)?; Ok(()) } fn reading_in_range(sample: &[i16; 3]) -> bool { const MDR_XY_AXES: i16 = 1600; const MDR_Z_AXIS: i16 = 2500; const RESO_PER_BIT: f32 = 0.3; const MAX_VAL_XY: i16 = (((MDR_XY_AXES as f32) / RESO_PER_BIT) as i16) + 1; const MAX_VAL_Z: i16 = (((MDR_Z_AXIS as f32) / RESO_PER_BIT) as i16) + 1; sample[0].abs() < MAX_VAL_XY && sample[1].abs() < MAX_VAL_XY && sample[2].abs() < MAX_VAL_Z } fn raw_reading_to_i16(buf: &[u8], idx: usize) -> i16 { let val: i16 = (buf[idx] as i16) | ((buf[idx+1] as i16) << 8) ; val }
}
pub fn get_mag_vector(&mut self, delay_source: &mut impl DelayMs<u8>) -> Result<[i16; 3], Error<CommE>> { const SINGLE_MEASURE_MODE: u8 = 0x01; const XYZ_DATA_LEN: usize = 6; self.write_reg(REG_CTRL1, SINGLE_MEASURE_MODE)?; delay_source.delay_ms(6); self.read_block(REG_MAG_DATA_START, XYZ_DATA_LEN)?; let sample_i16 = [ Self::raw_reading_to_i16(&self.block_buf, 0), Self::raw_reading_to_i16(&self.block_buf, 2), Self::raw_reading_to_i16(&self.block_buf, 4) ]; if !Self::reading_in_range(&sample_i16) { return Err(Error::OutOfRange) } Ok(sample_i16) }
function_block-full_function
[ { "content": "#[test]\n\nfn test_init() {\n\n const SRST_POR_FLAG: u8 = 0x01 << 0;\n\n const SRPD_MODE_LOW_POWER: u8 = 0xC0;\n\n const AVG_CTRL_16X: u8 = 0x24;\n\n\n\n let addr = ist8310::DEFAULT_ADDRESS;\n\n\n\n // Configure expectations\n\n let expectations = [\n\n I2cTransaction::wri...
Rust
src/server/rpc/client.rs
gavento/rain
9372c66d82180ecae12af065a81631565c0d40dc
use capnp::capability::Promise; use std::net::SocketAddr; use futures::{future, Future}; use common::resources::Resources; use common::id::{DataObjectId, SId, TaskId}; use common::convert::{FromCapnp, ToCapnp}; use client_capnp::client_service; use server::state::StateRef; use server::graph::{ClientRef, DataObjectRef, SessionError, TaskInput, TaskRef}; use errors::{Error, ErrorKind, Result}; use common::Attributes; use common::RcSet; use server::rpc::ClientDataStoreImpl; use common::events::{ObjectDescriptor, TaskDescriptor}; pub struct ClientServiceImpl { state: StateRef, client: ClientRef, } impl ClientServiceImpl { pub fn new(state: &StateRef, address: &SocketAddr) -> Result<Self> { Ok(Self { state: state.clone(), client: state.get_mut().add_client(address.clone())?, }) } } impl Drop for ClientServiceImpl { fn drop(&mut self) { let mut s = self.state.get_mut(); info!("Client {} disconnected", self.client.get_id()); s.remove_client(&self.client) .expect("client connection drop"); } } impl client_service::Server for ClientServiceImpl { fn get_server_info( &mut self, _: client_service::GetServerInfoParams, mut results: client_service::GetServerInfoResults, ) -> Promise<(), ::capnp::Error> { debug!("Client asked for info"); let s = self.state.get(); let futures: Vec<_> = s.graph .workers .iter() .map(|(worker_id, worker)| { let w = worker.get(); let control = w.control.as_ref().unwrap(); let worker_id = worker_id.clone(); let resources = w.resources.clone(); control .get_info_request() .send() .promise .map(move |r| (worker_id, r, resources)) }) .collect(); Promise::from_future(future::join_all(futures).map(move |rs| { let results = results.get(); let mut workers = results.init_workers(rs.len() as u32); for (i, &(ref worker_id, ref r, ref resources)) in rs.iter().enumerate() { let mut w = workers.borrow().get(i as u32); let r = r.get().unwrap(); w.set_tasks(r.get_tasks().unwrap()).unwrap(); w.set_objects(r.get_objects().unwrap()).unwrap(); w.set_objects_to_delete(r.get_objects_to_delete().unwrap()) .unwrap(); resources.to_capnp(&mut w.borrow().get_resources().unwrap()); worker_id.to_capnp(&mut w.get_worker_id().unwrap()); } () })) } fn new_session( &mut self, _: client_service::NewSessionParams, mut results: client_service::NewSessionResults, ) -> Promise<(), ::capnp::Error> { let mut s = self.state.get_mut(); let session = pry!(s.add_session(&self.client)); results.get().set_session_id(session.get_id()); debug!("Client asked for a new session, got {:?}", session.get_id()); Promise::ok(()) } fn close_session( &mut self, params: client_service::CloseSessionParams, _: client_service::CloseSessionResults, ) -> Promise<(), ::capnp::Error> { let params = pry!(params.get()); let mut s = self.state.get_mut(); let session = pry!(s.session_by_id(params.get_session_id())); s.remove_session(&session).unwrap(); Promise::ok(()) } fn submit( &mut self, params: client_service::SubmitParams, _: client_service::SubmitResults, ) -> Promise<(), ::capnp::Error> { let mut s = self.state.get_mut(); let params = pry!(params.get()); let tasks = pry!(params.get_tasks()); let objects = pry!(params.get_objects()); info!( "New task submission ({} tasks, {} data objects) from client {}", tasks.len(), objects.len(), self.client.get_id() ); debug!("Sessions: {:?}", s.graph.sessions); let mut created_tasks = Vec::<TaskRef>::new(); let mut created_objects = Vec::<DataObjectRef>::new(); let res: Result<()> = (|| { for co in objects.iter() { let id = DataObjectId::from_capnp(&co.borrow().get_id()?); let session = s.session_by_id(id.get_session_id())?; let data = if co.get_has_data() { Some(co.get_data()?.into()) } else { None }; let attributes = Attributes::from_capnp(&co.get_attributes()?); let o = s.add_object( &session, id, co.get_keep(), co.get_label()?.to_string(), data, attributes, )?; created_objects.push(o); } for ct in tasks.iter() { let id = TaskId::from_capnp(&ct.get_id()?); let session = s.session_by_id(id.get_session_id())?; let attributes = Attributes::from_capnp(&ct.get_attributes().unwrap()); let resources: Resources = attributes.get("resources")?; let mut inputs = Vec::<TaskInput>::new(); for ci in ct.get_inputs()?.iter() { inputs.push(TaskInput { object: s.object_by_id(DataObjectId::from_capnp(&ci.get_id()?))?, label: ci.get_label()?.into(), path: ci.get_path()?.into(), }); } let mut outputs = Vec::<DataObjectRef>::new(); for co in ct.get_outputs()?.iter() { outputs.push(s.object_by_id(DataObjectId::from_capnp(&co))?); } let t = s.add_task( &session, id, inputs, outputs, ct.get_task_type()?.to_string(), attributes, resources, )?; created_tasks.push(t); } debug!("New tasks: {:?}", created_tasks); debug!("New objects: {:?}", created_objects); s.logger.add_client_submit_event( created_tasks .iter() .map(|t| TaskDescriptor::from(&t.get())) .collect(), created_objects .iter() .map(|o| ObjectDescriptor::from(&o.get())) .collect(), ); s.verify_submit(&created_tasks, &created_objects) })(); if res.is_err() { debug!("Error: {:?}", res); for t in created_tasks { pry!(s.remove_task(&t)); } for o in created_objects { pry!(s.remove_object(&o)); } pry!(res); } Promise::ok(()) } fn get_data_store( &mut self, _params: client_service::GetDataStoreParams, mut results: client_service::GetDataStoreResults, ) -> Promise<(), ::capnp::Error> { debug!("server data store requested from client"); let datastore = ::datastore_capnp::data_store::ToClient::new(ClientDataStoreImpl::new( &self.state, )).from_server::<::capnp_rpc::Server>(); results.get().set_store(datastore); Promise::ok(()) } fn wait( &mut self, params: client_service::WaitParams, mut result: client_service::WaitResults, ) -> Promise<(), ::capnp::Error> { fn set_error(result: &mut ::common_capnp::unit_result::Builder, error: &SessionError) { error.to_capnp(&mut result.borrow().init_error()); } let s = self.state.get_mut(); let params = pry!(params.get()); let task_ids = pry!(params.get_task_ids()); let object_ids = pry!(params.get_object_ids()); info!( "New wait request ({} tasks, {} data objects) from client", task_ids.len(), object_ids.len() ); if task_ids.len() == 1 && object_ids.len() == 0 && task_ids.get(0).get_id() == ::common_capnp::ALL_TASKS_ID { let session_id = task_ids.get(0).get_session_id(); debug!("Waiting for all session session_id={}", session_id); let session = match s.session_by_id(session_id) { Ok(s) => s, Err(e) => return Promise::err(::capnp::Error::failed(e.description().to_string())), }; if let &Some(ref e) = session.get().get_error() { set_error(&mut result.get(), e); return Promise::ok(()); } let session2 = session.clone(); return Promise::from_future(session.get_mut().wait().then(move |r| { match r { Ok(_) => result.get().set_ok(()), Err(_) => { set_error( &mut result.get(), session2.get().get_error().as_ref().unwrap(), ); } }; Ok(()) })); } let mut sessions = RcSet::new(); let mut task_futures = Vec::new(); for id in task_ids.iter() { match s.task_by_id_check_session(TaskId::from_capnp(&id)) { Ok(t) => { let mut task = t.get_mut(); sessions.insert(task.session.clone()); if task.is_finished() { continue; } task_futures.push(task.wait()); } Err(Error(ErrorKind::SessionErr(ref e), _)) => { set_error(&mut result.get(), e); return Promise::ok(()); } Err(e) => return Promise::err(::capnp::Error::failed(e.description().to_string())), }; } debug!("{} waiting futures", task_futures.len()); if task_futures.is_empty() { result.get().set_ok(()); return Promise::ok(()); } Promise::from_future(::futures::future::join_all(task_futures).then(move |r| { match r { Ok(_) => result.get().set_ok(()), Err(_) => { let session = sessions.iter().find(|s| s.get().is_failed()).unwrap(); set_error( &mut result.get(), session.get().get_error().as_ref().unwrap(), ); } }; Ok(()) })) } fn wait_some( &mut self, params: client_service::WaitSomeParams, _results: client_service::WaitSomeResults, ) -> Promise<(), ::capnp::Error> { let params = pry!(params.get()); let task_ids = pry!(params.get_task_ids()); let object_ids = pry!(params.get_object_ids()); info!( "New wait_some request ({} tasks, {} data objects) from client", task_ids.len(), object_ids.len() ); Promise::err(::capnp::Error::failed( "wait_sone is not implemented yet".to_string(), )) } fn unkeep( &mut self, params: client_service::UnkeepParams, mut results: client_service::UnkeepResults, ) -> Promise<(), ::capnp::Error> { let mut s = self.state.get_mut(); let params = pry!(params.get()); let object_ids = pry!(params.get_object_ids()); debug!( "New unkeep request ({} data objects) from client", object_ids.len() ); let mut objects = Vec::new(); for oid in object_ids.iter() { let id: DataObjectId = DataObjectId::from_capnp(&oid); match s.object_by_id_check_session(id) { Ok(obj) => objects.push(obj), Err(Error(ErrorKind::SessionErr(ref e), _)) => { e.to_capnp(&mut results.get().init_error()); return Promise::ok(()); } Err(e) => return Promise::err(::capnp::Error::failed(e.description().to_string())), }; } for o in objects.iter() { s.unkeep_object(&o); } s.logger .add_client_unkeep_event(objects.iter().map(|o| o.get().id).collect()); Promise::ok(()) } fn get_state( &mut self, params: client_service::GetStateParams, mut results: client_service::GetStateResults, ) -> Promise<(), ::capnp::Error> { let params = pry!(params.get()); let task_ids = pry!(params.get_task_ids()); let object_ids = pry!(params.get_object_ids()); info!( "New get_state request ({} tasks, {} data objects) from client", task_ids.len(), object_ids.len() ); let s = self.state.get(); let tasks: Vec<_> = match task_ids .iter() .map(|id| s.task_by_id_check_session(TaskId::from_capnp(&id))) .collect() { Ok(tasks) => tasks, Err(Error(ErrorKind::SessionErr(ref e), _)) => { e.to_capnp(&mut results.get().get_state().unwrap().init_error()); return Promise::ok(()); } Err(e) => return Promise::err(::capnp::Error::failed(e.description().to_string())), }; let objects: Vec<_> = match object_ids .iter() .map(|id| s.object_by_id_check_session(DataObjectId::from_capnp(&id))) .collect() { Ok(tasks) => tasks, Err(Error(ErrorKind::SessionErr(ref e), _)) => { e.to_capnp(&mut results.get().get_state().unwrap().init_error()); return Promise::ok(()); } Err(e) => return Promise::err(::capnp::Error::failed(e.description().to_string())), }; let mut results = results.get(); { let mut task_updates = results.borrow().init_tasks(tasks.len() as u32); for (i, task) in tasks.iter().enumerate() { let mut update = task_updates.borrow().get(i as u32); let t = task.get(); t.id.to_capnp(&mut update.borrow().get_id().unwrap()); t.attributes.to_capnp(&mut update.get_attributes().unwrap()); } } { let mut obj_updates = results.borrow().init_objects(objects.len() as u32); for (i, obj) in objects.iter().enumerate() { let mut update = obj_updates.borrow().get(i as u32); let o = obj.get(); o.attributes .to_capnp(&mut update.borrow().get_attributes().unwrap()); o.id.to_capnp(&mut update.get_id().unwrap()); } } results.get_state().unwrap().set_ok(()); Promise::ok(()) } }
use capnp::capability::Promise; use std::net::SocketAddr; use futures::{future, Future}; use common::resources::Resources; use common::id::{DataObjectId, SId, TaskId}; use common::convert::{FromCapnp, ToCapnp}; use client_capnp::client_service; use server::state::StateRef; use server::graph::{ClientRef, DataObjectRef, SessionError, TaskInput, TaskRef}; use errors::{Error, ErrorKind, Result}; use common::Attributes; use common::RcSet; use server::rpc::ClientDataStoreImpl; use common::events::{ObjectDescriptor, TaskDescriptor}; pub struct ClientServiceImpl { state: StateRef, client: ClientRef, } impl ClientServiceImpl { pub fn new(state: &StateRef, address: &SocketAddr) -> Result<Self> { Ok(Self { state: state.clone(), client: state.get_mut().add_client(address.clone())?, }) } } impl Drop for ClientServiceImpl {
} impl client_service::Server for ClientServiceImpl { fn get_server_info( &mut self, _: client_service::GetServerInfoParams, mut results: client_service::GetServerInfoResults, ) -> Promise<(), ::capnp::Error> { debug!("Client asked for info"); let s = self.state.get(); let futures: Vec<_> = s.graph .workers .iter() .map(|(worker_id, worker)| { let w = worker.get(); let control = w.control.as_ref().unwrap(); let worker_id = worker_id.clone(); let resources = w.resources.clone(); control .get_info_request() .send() .promise .map(move |r| (worker_id, r, resources)) }) .collect(); Promise::from_future(future::join_all(futures).map(move |rs| { let results = results.get(); let mut workers = results.init_workers(rs.len() as u32); for (i, &(ref worker_id, ref r, ref resources)) in rs.iter().enumerate() { let mut w = workers.borrow().get(i as u32); let r = r.get().unwrap(); w.set_tasks(r.get_tasks().unwrap()).unwrap(); w.set_objects(r.get_objects().unwrap()).unwrap(); w.set_objects_to_delete(r.get_objects_to_delete().unwrap()) .unwrap(); resources.to_capnp(&mut w.borrow().get_resources().unwrap()); worker_id.to_capnp(&mut w.get_worker_id().unwrap()); } () })) } fn new_session( &mut self, _: client_service::NewSessionParams, mut results: client_service::NewSessionResults, ) -> Promise<(), ::capnp::Error> { let mut s = self.state.get_mut(); let session = pry!(s.add_session(&self.client)); results.get().set_session_id(session.get_id()); debug!("Client asked for a new session, got {:?}", session.get_id()); Promise::ok(()) } fn close_session( &mut self, params: client_service::CloseSessionParams, _: client_service::CloseSessionResults, ) -> Promise<(), ::capnp::Error> { let params = pry!(params.get()); let mut s = self.state.get_mut(); let session = pry!(s.session_by_id(params.get_session_id())); s.remove_session(&session).unwrap(); Promise::ok(()) } fn submit( &mut self, params: client_service::SubmitParams, _: client_service::SubmitResults, ) -> Promise<(), ::capnp::Error> { let mut s = self.state.get_mut(); let params = pry!(params.get()); let tasks = pry!(params.get_tasks()); let objects = pry!(params.get_objects()); info!( "New task submission ({} tasks, {} data objects) from client {}", tasks.len(), objects.len(), self.client.get_id() ); debug!("Sessions: {:?}", s.graph.sessions); let mut created_tasks = Vec::<TaskRef>::new(); let mut created_objects = Vec::<DataObjectRef>::new(); let res: Result<()> = (|| { for co in objects.iter() { let id = DataObjectId::from_capnp(&co.borrow().get_id()?); let session = s.session_by_id(id.get_session_id())?; let data = if co.get_has_data() { Some(co.get_data()?.into()) } else { None }; let attributes = Attributes::from_capnp(&co.get_attributes()?); let o = s.add_object( &session, id, co.get_keep(), co.get_label()?.to_string(), data, attributes, )?; created_objects.push(o); } for ct in tasks.iter() { let id = TaskId::from_capnp(&ct.get_id()?); let session = s.session_by_id(id.get_session_id())?; let attributes = Attributes::from_capnp(&ct.get_attributes().unwrap()); let resources: Resources = attributes.get("resources")?; let mut inputs = Vec::<TaskInput>::new(); for ci in ct.get_inputs()?.iter() { inputs.push(TaskInput { object: s.object_by_id(DataObjectId::from_capnp(&ci.get_id()?))?, label: ci.get_label()?.into(), path: ci.get_path()?.into(), }); } let mut outputs = Vec::<DataObjectRef>::new(); for co in ct.get_outputs()?.iter() { outputs.push(s.object_by_id(DataObjectId::from_capnp(&co))?); } let t = s.add_task( &session, id, inputs, outputs, ct.get_task_type()?.to_string(), attributes, resources, )?; created_tasks.push(t); } debug!("New tasks: {:?}", created_tasks); debug!("New objects: {:?}", created_objects); s.logger.add_client_submit_event( created_tasks .iter() .map(|t| TaskDescriptor::from(&t.get())) .collect(), created_objects .iter() .map(|o| ObjectDescriptor::from(&o.get())) .collect(), ); s.verify_submit(&created_tasks, &created_objects) })(); if res.is_err() { debug!("Error: {:?}", res); for t in created_tasks { pry!(s.remove_task(&t)); } for o in created_objects { pry!(s.remove_object(&o)); } pry!(res); } Promise::ok(()) } fn get_data_store( &mut self, _params: client_service::GetDataStoreParams, mut results: client_service::GetDataStoreResults, ) -> Promise<(), ::capnp::Error> { debug!("server data store requested from client"); let datastore = ::datastore_capnp::data_store::ToClient::new(ClientDataStoreImpl::new( &self.state, )).from_server::<::capnp_rpc::Server>(); results.get().set_store(datastore); Promise::ok(()) } fn wait( &mut self, params: client_service::WaitParams, mut result: client_service::WaitResults, ) -> Promise<(), ::capnp::Error> { fn set_error(result: &mut ::common_capnp::unit_result::Builder, error: &SessionError) { error.to_capnp(&mut result.borrow().init_error()); } let s = self.state.get_mut(); let params = pry!(params.get()); let task_ids = pry!(params.get_task_ids()); let object_ids = pry!(params.get_object_ids()); info!( "New wait request ({} tasks, {} data objects) from client", task_ids.len(), object_ids.len() ); if task_ids.len() == 1 && object_ids.len() == 0 && task_ids.get(0).get_id() == ::common_capnp::ALL_TASKS_ID { let session_id = task_ids.get(0).get_session_id(); debug!("Waiting for all session session_id={}", session_id); let session = match s.session_by_id(session_id) { Ok(s) => s, Err(e) => return Promise::err(::capnp::Error::failed(e.description().to_string())), }; if let &Some(ref e) = session.get().get_error() { set_error(&mut result.get(), e); return Promise::ok(()); } let session2 = session.clone(); return Promise::from_future(session.get_mut().wait().then(move |r| { match r { Ok(_) => result.get().set_ok(()), Err(_) => { set_error( &mut result.get(), session2.get().get_error().as_ref().unwrap(), ); } }; Ok(()) })); } let mut sessions = RcSet::new(); let mut task_futures = Vec::new(); for id in task_ids.iter() { match s.task_by_id_check_session(TaskId::from_capnp(&id)) { Ok(t) => { let mut task = t.get_mut(); sessions.insert(task.session.clone()); if task.is_finished() { continue; } task_futures.push(task.wait()); } Err(Error(ErrorKind::SessionErr(ref e), _)) => { set_error(&mut result.get(), e); return Promise::ok(()); } Err(e) => return Promise::err(::capnp::Error::failed(e.description().to_string())), }; } debug!("{} waiting futures", task_futures.len()); if task_futures.is_empty() { result.get().set_ok(()); return Promise::ok(()); } Promise::from_future(::futures::future::join_all(task_futures).then(move |r| { match r { Ok(_) => result.get().set_ok(()), Err(_) => { let session = sessions.iter().find(|s| s.get().is_failed()).unwrap(); set_error( &mut result.get(), session.get().get_error().as_ref().unwrap(), ); } }; Ok(()) })) } fn wait_some( &mut self, params: client_service::WaitSomeParams, _results: client_service::WaitSomeResults, ) -> Promise<(), ::capnp::Error> { let params = pry!(params.get()); let task_ids = pry!(params.get_task_ids()); let object_ids = pry!(params.get_object_ids()); info!( "New wait_some request ({} tasks, {} data objects) from client", task_ids.len(), object_ids.len() ); Promise::err(::capnp::Error::failed( "wait_sone is not implemented yet".to_string(), )) } fn unkeep( &mut self, params: client_service::UnkeepParams, mut results: client_service::UnkeepResults, ) -> Promise<(), ::capnp::Error> { let mut s = self.state.get_mut(); let params = pry!(params.get()); let object_ids = pry!(params.get_object_ids()); debug!( "New unkeep request ({} data objects) from client", object_ids.len() ); let mut objects = Vec::new(); for oid in object_ids.iter() { let id: DataObjectId = DataObjectId::from_capnp(&oid); match s.object_by_id_check_session(id) { Ok(obj) => objects.push(obj), Err(Error(ErrorKind::SessionErr(ref e), _)) => { e.to_capnp(&mut results.get().init_error()); return Promise::ok(()); } Err(e) => return Promise::err(::capnp::Error::failed(e.description().to_string())), }; } for o in objects.iter() { s.unkeep_object(&o); } s.logger .add_client_unkeep_event(objects.iter().map(|o| o.get().id).collect()); Promise::ok(()) } fn get_state( &mut self, params: client_service::GetStateParams, mut results: client_service::GetStateResults, ) -> Promise<(), ::capnp::Error> { let params = pry!(params.get()); let task_ids = pry!(params.get_task_ids()); let object_ids = pry!(params.get_object_ids()); info!( "New get_state request ({} tasks, {} data objects) from client", task_ids.len(), object_ids.len() ); let s = self.state.get(); let tasks: Vec<_> = match task_ids .iter() .map(|id| s.task_by_id_check_session(TaskId::from_capnp(&id))) .collect() { Ok(tasks) => tasks, Err(Error(ErrorKind::SessionErr(ref e), _)) => { e.to_capnp(&mut results.get().get_state().unwrap().init_error()); return Promise::ok(()); } Err(e) => return Promise::err(::capnp::Error::failed(e.description().to_string())), }; let objects: Vec<_> = match object_ids .iter() .map(|id| s.object_by_id_check_session(DataObjectId::from_capnp(&id))) .collect() { Ok(tasks) => tasks, Err(Error(ErrorKind::SessionErr(ref e), _)) => { e.to_capnp(&mut results.get().get_state().unwrap().init_error()); return Promise::ok(()); } Err(e) => return Promise::err(::capnp::Error::failed(e.description().to_string())), }; let mut results = results.get(); { let mut task_updates = results.borrow().init_tasks(tasks.len() as u32); for (i, task) in tasks.iter().enumerate() { let mut update = task_updates.borrow().get(i as u32); let t = task.get(); t.id.to_capnp(&mut update.borrow().get_id().unwrap()); t.attributes.to_capnp(&mut update.get_attributes().unwrap()); } } { let mut obj_updates = results.borrow().init_objects(objects.len() as u32); for (i, obj) in objects.iter().enumerate() { let mut update = obj_updates.borrow().get(i as u32); let o = obj.get(); o.attributes .to_capnp(&mut update.borrow().get_attributes().unwrap()); o.id.to_capnp(&mut update.get_id().unwrap()); } } results.get_state().unwrap().set_ok(()); Promise::ok(()) } }
fn drop(&mut self) { let mut s = self.state.get_mut(); info!("Client {} disconnected", self.client.get_id()); s.remove_client(&self.client) .expect("client connection drop"); }
function_block-function_prefixed
[ { "content": "pub fn task_run(state: &mut State, task_ref: TaskRef) -> TaskResult {\n\n let state_ref = state.self_ref();\n\n let config: RunConfig = task_ref.get().attributes.get(\"config\")?;\n\n\n\n let (dir, future, stderr_path) = {\n\n // Parse arguments\n\n let name = config.args.ge...
Rust
src/hierarchies.rs
esrlabs/cgroups-rs
556dea62b963fb75c3b1234f9cab6164706fc594
use std::fs; use std::fs::File; use std::io::{BufRead, BufReader}; use std::path::PathBuf; use crate::blkio::BlkIoController; use crate::cpu::CpuController; use crate::cpuacct::CpuAcctController; use crate::cpuset::CpuSetController; use crate::devices::DevicesController; use crate::freezer::FreezerController; use crate::hugetlb::HugeTlbController; use crate::memory::MemController; use crate::net_cls::NetClsController; use crate::net_prio::NetPrioController; use crate::perf_event::PerfEventController; use crate::pid::PidController; use crate::rdma::RdmaController; use crate::systemd::SystemdController; use crate::{Controllers, Hierarchy, Subsystem}; use crate::cgroup::Cgroup; #[derive(Debug, PartialEq, Eq, Hash, Clone)] pub struct Mountinfo { pub mount_point: PathBuf, pub fs_type: (String, Option<String>), pub super_opts: Vec<String>, } pub(crate) fn parse_mountinfo_for_line(line: &str) -> Option<Mountinfo> { let s_values: Vec<_> = line.split(" - ").collect(); if s_values.len() != 2 { return None; } let s0_values: Vec<_> = s_values[0].trim().split(' ').collect(); let s1_values: Vec<_> = s_values[1].trim().split(' ').collect(); if s0_values.len() < 6 || s1_values.len() < 3 { return None; } let mount_point = PathBuf::from(s0_values[4]); let fs_type_values: Vec<_> = s1_values[0].trim().split('.').collect(); let fs_type = match fs_type_values.len() { 1 => (fs_type_values[0].to_string(), None), 2 => ( fs_type_values[0].to_string(), Some(fs_type_values[1].to_string()), ), _ => return None, }; let super_opts: Vec<String> = s1_values[2].trim().split(',').map(String::from).collect(); Some(Mountinfo { mount_point, fs_type, super_opts, }) } fn mountinfo_file(file: &mut File) -> Vec<Mountinfo> { let mut r = Vec::new(); for line in BufReader::new(file).lines() { match line { Ok(line) => { if let Some(mi) = parse_mountinfo_for_line(&line) { if mi.fs_type.0 == "cgroup" { r.push(mi); } } } Err(_) => break, } } r } pub fn mountinfo_self() -> Vec<Mountinfo> { match File::open("/proc/self/mountinfo") { Ok(mut file) => mountinfo_file(&mut file), Err(_) => vec![], } } #[derive(Debug, Clone)] pub struct V1 { mountinfo: Vec<Mountinfo>, } #[derive(Debug, Clone)] pub struct V2 { root: String, } impl Hierarchy for V1 { fn v2(&self) -> bool { false } fn subsystems(&self) -> Vec<Subsystem> { let mut subs = vec![]; if let Some(root) = self.get_mount_point(Controllers::BlkIo) { subs.push(Subsystem::BlkIo(BlkIoController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::Mem) { subs.push(Subsystem::Mem(MemController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::Pids) { subs.push(Subsystem::Pid(PidController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::CpuSet) { subs.push(Subsystem::CpuSet(CpuSetController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::CpuAcct) { subs.push(Subsystem::CpuAcct(CpuAcctController::new(root))); } if let Some(root) = self.get_mount_point(Controllers::Cpu) { subs.push(Subsystem::Cpu(CpuController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::Devices) { subs.push(Subsystem::Devices(DevicesController::new(root))); } if let Some(root) = self.get_mount_point(Controllers::Freezer) { subs.push(Subsystem::Freezer(FreezerController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::NetCls) { subs.push(Subsystem::NetCls(NetClsController::new(root))); } if let Some(root) = self.get_mount_point(Controllers::PerfEvent) { subs.push(Subsystem::PerfEvent(PerfEventController::new(root))); } if let Some(root) = self.get_mount_point(Controllers::NetPrio) { subs.push(Subsystem::NetPrio(NetPrioController::new(root))); } if let Some(root) = self.get_mount_point(Controllers::HugeTlb) { subs.push(Subsystem::HugeTlb(HugeTlbController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::Rdma) { subs.push(Subsystem::Rdma(RdmaController::new(root))); } if let Some(root) = self.get_mount_point(Controllers::Systemd) { subs.push(Subsystem::Systemd(SystemdController::new(root, false))); } subs } fn root_control_group(&self) -> Cgroup { Cgroup::load(auto(), "") } fn root(&self) -> PathBuf { self.mountinfo .iter() .find_map(|m| { if m.fs_type.0 == "cgroup" { return Some(m.mount_point.parent().unwrap()); } None }) .unwrap() .to_path_buf() } } impl Hierarchy for V2 { fn v2(&self) -> bool { true } fn subsystems(&self) -> Vec<Subsystem> { let p = format!("{}/{}", UNIFIED_MOUNTPOINT, "cgroup.controllers"); let ret = fs::read_to_string(p.as_str()); if ret.is_err() { return vec![]; } let mut subs = vec![]; let controllers = ret.unwrap().trim().to_string(); let mut controller_list: Vec<&str> = controllers.split(' ').collect(); controller_list.push("freezer"); for s in controller_list { match s { "cpu" => { subs.push(Subsystem::Cpu(CpuController::new(self.root(), true))); } "io" => { subs.push(Subsystem::BlkIo(BlkIoController::new(self.root(), true))); } "cpuset" => { subs.push(Subsystem::CpuSet(CpuSetController::new(self.root(), true))); } "memory" => { subs.push(Subsystem::Mem(MemController::new(self.root(), true))); } "pids" => { subs.push(Subsystem::Pid(PidController::new(self.root(), true))); } "freezer" => { subs.push(Subsystem::Freezer(FreezerController::new( self.root(), true, ))); } "hugetlb" => { subs.push(Subsystem::HugeTlb(HugeTlbController::new( self.root(), true, ))); } _ => {} } } subs } fn root_control_group(&self) -> Cgroup { Cgroup::load(auto(), "") } fn root(&self) -> PathBuf { PathBuf::from(self.root.clone()) } } impl V1 { pub fn new() -> V1 { V1 { mountinfo: mountinfo_self(), } } pub fn get_mount_point(&self, controller: Controllers) -> Option<PathBuf> { self.mountinfo.iter().find_map(|m| { if m.fs_type.0 == "cgroup" && m.super_opts.contains(&controller.to_string()) { return Some(m.mount_point.clone()); } None }) } } impl Default for V1 { fn default() -> Self { Self::new() } } impl V2 { pub fn new() -> V2 { V2 { root: String::from(UNIFIED_MOUNTPOINT), } } } impl Default for V2 { fn default() -> Self { Self::new() } } pub const UNIFIED_MOUNTPOINT: &str = "/sys/fs/cgroup"; #[cfg(any( all(target_os = "linux", not(target_env = "musl")), target_os = "android" ))] pub fn is_cgroup2_unified_mode() -> bool { use nix::sys::statfs; let path = std::path::Path::new(UNIFIED_MOUNTPOINT); let fs_stat = statfs::statfs(path); if fs_stat.is_err() { return false; } fs_stat.unwrap().filesystem_type() == statfs::CGROUP2_SUPER_MAGIC } pub const INIT_CGROUP_PATHS: &str = "/proc/1/cgroup"; #[cfg(all(target_os = "linux", target_env = "musl"))] pub fn is_cgroup2_unified_mode() -> bool { let lines = fs::read_to_string(INIT_CGROUP_PATHS); if lines.is_err() { return false; } for line in lines.unwrap().lines() { let fields: Vec<&str> = line.split(':').collect(); if fields.len() != 3 { continue; } if fields[0] != "0" { return false; } } true } pub fn auto() -> Box<dyn Hierarchy> { if is_cgroup2_unified_mode() { Box::new(V2::new()) } else { Box::new(V1::new()) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_parse_mount() { let mountinfo = vec![ ("29 26 0:26 / /sys/fs/cgroup/cpuset,cpu,cpuacct rw,nosuid,nodev,noexec,relatime shared:10 - cgroup cgroup rw,cpuset,cpu,cpuacct", Mountinfo{mount_point: PathBuf::from("/sys/fs/cgroup/cpuset,cpu,cpuacct"), fs_type: ("cgroup".to_string(), None), super_opts: vec![ "rw".to_string(), "cpuset".to_string(), "cpu".to_string(), "cpuacct".to_string(), ]}), ("121 1731 0:42 / /shm rw,nosuid,nodev,noexec,relatime shared:68 master:66 - tmpfs shm rw,size=65536k", Mountinfo{mount_point: PathBuf::from("/shm"), fs_type: ("tmpfs".to_string(), None), super_opts: vec![ "rw".to_string(), "size=65536k".to_string(), ]}), ("121 1731 0:42 / /shm rw,nosuid,nodev,noexec,relatime shared:68 master:66 - tmpfs.123 shm rw,size=65536k", Mountinfo{mount_point: PathBuf::from("/shm"), fs_type: ("tmpfs".to_string(), Some("123".to_string())), super_opts: vec![ "rw".to_string(), "size=65536k".to_string(), ]}), ]; for mi in mountinfo { let info = parse_mountinfo_for_line(mi.0).unwrap(); assert_eq!(info, mi.1) } } }
use std::fs; use std::fs::File; use std::io::{BufRead, BufReader}; use std::path::PathBuf; use crate::blkio::BlkIoController; use crate::cpu::CpuController; use crate::cpuacct::CpuAcctController; use crate::cpuset::CpuSetController; use crate::devices::DevicesController; use crate::freezer::FreezerController; use crate::hugetlb::HugeTlbController; use crate::memory::MemController; use crate::net_cls::NetClsController; use crate::net_prio::NetPrioController; use crate::perf_event::PerfEventController; use crate::pid::PidController; use crate::rdma::RdmaController; use crate::systemd::SystemdController; use crate::{Controllers, Hierarchy, Subsystem}; use crate::cgroup::Cgroup; #[derive(Debug, PartialEq, Eq, Hash, Clone)] pub struct Mountinfo { pub mount_point: PathBuf, pub fs_type: (String, Option<String>), pub super_opts: Vec<String>, } pub(crate) fn parse_mountinfo_for_line(line: &str) -> Option<Mountinfo> { let s_values: Vec<_> = line.split(" - ").collect(); if s_values.len() != 2 { return None; } let s0_values: Vec<_> = s_values[0].trim().split(' ').collect(); let s1_values: Vec<_> = s_values[1].trim().split(' ').collect(); if s0_values.len() < 6 || s1_values.len() < 3 { return None; } let mount_point = PathBuf::from(s0_values[4]); let fs_type_values: Vec<_> = s1_values[0].trim().split('.').collect(); let fs_type = match fs_type_values.len() { 1 => (fs_type_values[0].to_string(), None), 2 => ( fs_type_values[0].to_string(), Some(fs_type_values[1].to_string()), ), _ => return None, }; let super_opts: Vec<String> = s1_values[2].trim().split(',').map(String::from).collect(); Some(Mountinfo { mount_point, fs_type, super_opts, }) } fn mountinfo_file(file: &mut File) -> Vec<Mountinfo> { let mut r = Vec::new(); for line in BufReader::new(file).lines() { match line { Ok(line) => { if let Some(mi) = parse_mountinfo_for_line(&line) { if mi.fs_type.0 == "cgroup" { r.push(mi); } } } Err(_) => break, } } r } pub fn mountinfo_self() -> Vec<Mountinfo> { match File::open("/proc/self/mountinfo") { Ok(mut file) => mountinfo_file(&mut file), Err(_) => vec![], } } #[derive(Debug, Clone)] pub struct V1 { mountinfo: Vec<Mountinfo>, } #[derive(Debug, Clone)] pub struct V2 { root: String, } impl Hierarchy for V1 { fn v2(&self) -> bool { false } fn subsystems(&self) -> Vec<Subsystem> { let mut subs = vec![]; if let Some(root) = self.get_mount_point(Controllers::BlkIo) { subs.push(Subsystem::BlkIo(BlkIoController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::Mem) { subs.push(Subsystem::Mem(MemController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::Pids) { subs.push(Subsystem::Pid(PidController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::CpuSet) { subs.push(Subsystem::CpuSet(CpuSetController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::CpuAcct) { subs.push(Subsystem::CpuAcct(CpuAcctController::new(root))); } if let Some(root) = self.get_mount_point(Controllers::Cpu) { subs.push(Subsystem::Cpu(CpuController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::Devices) { subs.push(Subsystem::Devices(DevicesController::new(root))); } if let Some(root) = self.get_mount_point(Controllers::Freezer) { subs.push(Subsystem::Freezer(FreezerController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::NetCls) { subs.push(Subsystem::NetCls(NetClsController::new(root))); } if let Some(root) = self.get_mount_point(Controllers::PerfEvent) { subs.push(Subsystem::PerfEvent(PerfEventController::new(root))); } if let Some(root) = self.get_mount_point(Controllers::NetPrio) { subs.push(Subsystem::NetPrio(NetPrioController::new(root))); } if let Some(root) = self.get_mount_point(Controllers::HugeTlb) { subs.push(Subsystem::HugeTlb(HugeTlbController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::Rdma) { subs.push(Subsystem::Rdma(RdmaController::new(root))); } if let Some(root) = self.get_mount_point(Controllers::Systemd) { subs.push(Subsystem::Systemd(SystemdController::new(root, false))); } subs } fn root_control_group(&self) -> Cgroup { Cgroup::load(auto(), "") } fn root(&self) -> PathBuf { self.mountinfo .iter() .find_map(|m| { if m.fs_type.0 == "cgroup" { return Some(m.mount_point.parent().unwrap()); } None }) .unwrap() .to_path_buf() } } impl Hierarchy for V2 { fn v2(&self) -> bool { true } fn subsystems(&self) -> Vec<Subsystem> { let p = format!("{}/{}", UNIFIED_MOUNTPOINT, "cgroup.controllers"); let ret = fs::read_to_string(p.as_str()); if ret.is_err() { return vec![]; } let mut subs = vec![]; let controllers = ret.unwrap().trim().to_string(); let mut controller_list: Vec<&str> = controllers.split(' ').collect(); controller_list.push("freezer"); for s in controller_list { match s { "cpu" => { subs.push(Subsystem::Cpu(CpuController::new(self.root(), true))); } "io" => { subs.push(Subsystem::BlkIo(BlkIoController::new(self.root(), true))); } "cpuset" => { subs.push(Subsystem::CpuSet(CpuSetController::new(self.root(), true))); } "memory" => { subs.push(Subsystem::Mem(MemController::new(self.root(), true))); } "pids" => { subs.push(Subsystem::Pid(PidController::new(self.root(), true))); } "freezer" => { subs.push(Subsystem::Freezer(FreezerController::new( self.root(), true, ))); } "hugetlb" => { subs.push(Subsystem::HugeTlb(HugeTlbController::new( self.root(), true, ))); } _ => {} } } subs } fn root_control_group(&self) -> Cgroup { Cgroup::load(auto(), "") } fn root(&self) -> PathBuf { PathBuf::from(self.root.clone()) } } impl V1 { pub fn new() -> V1 { V1 { mountinfo: mountinfo_self(), } } pub fn get_mount_point(&self, controller: Controllers) -> Option<PathBuf> { self.mountinfo.iter().find_map(|m| { if m.fs_type.0 == "cgroup" && m.super_opts.contains(&controller.to_string()) { return Some(m.mount_point.clone()); } None }) } } impl Default for V1 { fn default() -> Self { Self::new() } } impl V2 { pub fn new() -> V2 { V2 { root: String::from(UNIFIED_MOUNTPOINT), } } } impl Default for V2 { fn default() -> Self { Self::new() } } pub const UNIFIED_MOUNTPOINT: &str = "/sys/fs/cgroup"; #[cfg(any( all(target_os = "linux", not(target_env = "musl")), target_os = "android" ))] pub fn is_cgroup2_unified_mode() -> bool { use nix::sys::statfs; let path = std::path::Path::new(UNIFIED_MOUNTPOINT); let fs_stat = statfs::statfs(path); if fs_stat.is_err() { return false; } fs_stat.unwrap().filesystem_type() == statfs::CGROUP2_SUPER_MAGIC } pub const INIT_CGROUP_PATHS: &str = "/proc/1/cgroup"; #[cfg(all(target_os = "linux", target_env = "musl"))]
pub fn auto() -> Box<dyn Hierarchy> { if is_cgroup2_unified_mode() { Box::new(V2::new()) } else { Box::new(V1::new()) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_parse_mount() { let mountinfo = vec![ ("29 26 0:26 / /sys/fs/cgroup/cpuset,cpu,cpuacct rw,nosuid,nodev,noexec,relatime shared:10 - cgroup cgroup rw,cpuset,cpu,cpuacct", Mountinfo{mount_point: PathBuf::from("/sys/fs/cgroup/cpuset,cpu,cpuacct"), fs_type: ("cgroup".to_string(), None), super_opts: vec![ "rw".to_string(), "cpuset".to_string(), "cpu".to_string(), "cpuacct".to_string(), ]}), ("121 1731 0:42 / /shm rw,nosuid,nodev,noexec,relatime shared:68 master:66 - tmpfs shm rw,size=65536k", Mountinfo{mount_point: PathBuf::from("/shm"), fs_type: ("tmpfs".to_string(), None), super_opts: vec![ "rw".to_string(), "size=65536k".to_string(), ]}), ("121 1731 0:42 / /shm rw,nosuid,nodev,noexec,relatime shared:68 master:66 - tmpfs.123 shm rw,size=65536k", Mountinfo{mount_point: PathBuf::from("/shm"), fs_type: ("tmpfs".to_string(), Some("123".to_string())), super_opts: vec![ "rw".to_string(), "size=65536k".to_string(), ]}), ]; for mi in mountinfo { let info = parse_mountinfo_for_line(mi.0).unwrap(); assert_eq!(info, mi.1) } } }
pub fn is_cgroup2_unified_mode() -> bool { let lines = fs::read_to_string(INIT_CGROUP_PATHS); if lines.is_err() { return false; } for line in lines.unwrap().lines() { let fields: Vec<&str> = line.split(':').collect(); if fields.len() != 3 { continue; } if fields[0] != "0" { return false; } } true }
function_block-full_function
[]
Rust
stake-pool/program/tests/set_fee.rs
honeydefi/NFT-farm
dc97a5439c6ab85e7b0ed4c86f3f5c6939d07c99
#![cfg(feature = "test-bpf")] mod helpers; use { helpers::*, solana_program_test::*, solana_sdk::{ borsh::try_from_slice_unchecked, instruction::InstructionError, signature::{Keypair, Signer}, transaction::{Transaction, TransactionError}, }, spl_stake_pool::{ error, id, instruction, state::{Fee, FeeType, StakePool}, }, }; async fn setup() -> (ProgramTestContext, StakePoolAccounts, Fee) { let mut context = program_test().start_with_context().await; let stake_pool_accounts = StakePoolAccounts::new(); stake_pool_accounts .initialize_stake_pool( &mut context.banks_client, &context.payer, &context.last_blockhash, 1, ) .await .unwrap(); let new_fee = Fee { numerator: 10, denominator: 10, }; (context, stake_pool_accounts, new_fee) } #[tokio::test] async fn success() { let (mut context, stake_pool_accounts, new_fee) = setup().await; let stake_pool = get_account( &mut context.banks_client, &stake_pool_accounts.stake_pool.pubkey(), ) .await; let stake_pool = try_from_slice_unchecked::<StakePool>(&stake_pool.data.as_slice()).unwrap(); let old_fee = stake_pool.fee; let transaction = Transaction::new_signed_with_payer( &[instruction::set_fee( &id(), &stake_pool_accounts.stake_pool.pubkey(), &stake_pool_accounts.manager.pubkey(), FeeType::Epoch(new_fee), )], Some(&context.payer.pubkey()), &[&context.payer, &stake_pool_accounts.manager], context.last_blockhash, ); context .banks_client .process_transaction(transaction) .await .unwrap(); let stake_pool = get_account( &mut context.banks_client, &stake_pool_accounts.stake_pool.pubkey(), ) .await; let stake_pool = try_from_slice_unchecked::<StakePool>(&stake_pool.data.as_slice()).unwrap(); assert_eq!(stake_pool.fee, old_fee); assert_eq!(stake_pool.next_epoch_fee, Some(new_fee)); let first_normal_slot = context.genesis_config().epoch_schedule.first_normal_slot; let slots_per_epoch = context.genesis_config().epoch_schedule.slots_per_epoch; context .warp_to_slot(first_normal_slot + slots_per_epoch) .unwrap(); stake_pool_accounts .update_all( &mut context.banks_client, &context.payer, &context.last_blockhash, &[], false, ) .await; let stake_pool = get_account( &mut context.banks_client, &stake_pool_accounts.stake_pool.pubkey(), ) .await; let stake_pool = try_from_slice_unchecked::<StakePool>(&stake_pool.data.as_slice()).unwrap(); assert_eq!(stake_pool.fee, new_fee); assert_eq!(stake_pool.next_epoch_fee, None); } #[tokio::test] async fn fail_wrong_manager() { let (mut context, stake_pool_accounts, new_fee) = setup().await; let wrong_manager = Keypair::new(); let transaction = Transaction::new_signed_with_payer( &[instruction::set_fee( &id(), &stake_pool_accounts.stake_pool.pubkey(), &wrong_manager.pubkey(), FeeType::Epoch(new_fee), )], Some(&context.payer.pubkey()), &[&context.payer, &wrong_manager], context.last_blockhash, ); let error = context .banks_client .process_transaction(transaction) .await .err() .unwrap() .unwrap(); match error { TransactionError::InstructionError(_, InstructionError::Custom(error_index)) => { let program_error = error::StakePoolError::WrongManager as u32; assert_eq!(error_index, program_error); } _ => panic!("Wrong error occurs while malicious try to set manager"), } } #[tokio::test] async fn fail_high_fee() { let (mut context, stake_pool_accounts, _new_fee) = setup().await; let new_fee = Fee { numerator: 11, denominator: 10, }; let transaction = Transaction::new_signed_with_payer( &[instruction::set_fee( &id(), &stake_pool_accounts.stake_pool.pubkey(), &stake_pool_accounts.manager.pubkey(), FeeType::Epoch(new_fee), )], Some(&context.payer.pubkey()), &[&context.payer, &stake_pool_accounts.manager], context.last_blockhash, ); let error = context .banks_client .process_transaction(transaction) .await .err() .unwrap() .unwrap(); match error { TransactionError::InstructionError(_, InstructionError::Custom(error_index)) => { let program_error = error::StakePoolError::FeeTooHigh as u32; assert_eq!(error_index, program_error); } _ => panic!("Wrong error occurs when setting fee too high"), } } #[tokio::test] async fn fail_not_updated() { let mut context = program_test().start_with_context().await; let stake_pool_accounts = StakePoolAccounts::new(); stake_pool_accounts .initialize_stake_pool( &mut context.banks_client, &context.payer, &context.last_blockhash, 1, ) .await .unwrap(); let new_fee = Fee { numerator: 10, denominator: 100, }; context.warp_to_slot(50_000).unwrap(); let transaction = Transaction::new_signed_with_payer( &[instruction::set_fee( &id(), &stake_pool_accounts.stake_pool.pubkey(), &stake_pool_accounts.manager.pubkey(), FeeType::Epoch(new_fee), )], Some(&context.payer.pubkey()), &[&context.payer, &stake_pool_accounts.manager], context.last_blockhash, ); let error = context .banks_client .process_transaction(transaction) .await .err() .unwrap() .unwrap(); match error { TransactionError::InstructionError(_, InstructionError::Custom(error_index)) => { let program_error = error::StakePoolError::StakeListAndPoolOutOfDate as u32; assert_eq!(error_index, program_error); } _ => panic!("Wrong error occurs when stake pool out of date"), } }
#![cfg(feature = "test-bpf")] mod helpers; use { helpers::*, solana_program_test::*, solana_sdk::{ borsh::try_from_slice_unchecked, instruction::InstructionError, signature::{Keypair, Signer}, transaction::{Transaction, TransactionError}, }, spl_stake_pool::{ error, id, instruction, state::{Fee, FeeType, StakePool}, }, }; async fn setup() -> (ProgramTestContext, StakePoolAccounts, Fee) { let mut context = program_test().start_with_context().await; let stake_pool_accounts = StakePoolAccounts::new(); stake_pool_accounts .initialize_stake_pool( &mut context.banks_client, &context.payer, &context.last_blockhash, 1, ) .await .unwrap(); let new_fee = Fee { numerator: 10, denominator: 10, }; (context, stake_pool_accounts, new_fee) } #[tokio::test] async fn success() { let (mut context, stake_pool_accounts, new_fee) = setup().await; let stake_pool = get_account( &mut context.banks_client, &stake_pool_accounts.stake_pool.pubkey(), ) .await; let stake_pool = try_from_slice_unchecked::<StakePool>(&stake_pool.data.as_slice()).unwrap(); let old_fee = stake_pool.fee; let transaction = Transaction::new_signed_with_payer( &[instruction::set_fee( &id(), &stake_pool_accounts.stake_pool.pubkey(), &stake_pool_accounts.manager.pubkey(), FeeType::Epoch(new_fee), )], Some(&context.payer.pubkey()), &[&context.payer, &stake_pool_accounts.manager], context.last_blockhash, ); context .banks_client .process_transaction(transaction) .await .unwrap(); let stake_pool = get_account( &mut context.banks_client, &stake_pool_accounts.stake_pool.pubkey(), ) .await; let stake_pool = try_from_slice_unchecked::<StakePool>(&stake_pool.data.as_slice()).unwrap(); assert_eq!(stake_pool.fee, old_fee); assert_eq!(stake_pool.next_epoch_fee, Some(new_fee)); let first_normal_slot = context.genesis_config().epoch_schedule.first_normal_slot; let slots_per_epoch = context.genesis_config().epoch_schedule.slots_per_epoch; context .warp_to_slot(first_normal_slot + slots_per_epoch) .unwrap(); stake_pool_accounts .update_all( &mut context.banks_client, &context.payer, &context.last_blockhash, &[], false, ) .await; let stake_pool = get_account( &mut context.banks_client, &stake_pool_accounts.stake_pool.pubkey(), ) .await; let stake_pool = try_from_slice_unchecked::<StakePool>(&stake_pool.data.as_slice()).unwrap(); assert_eq!(stake_pool.fee, new_fee); assert_eq!(stake_pool.next_epoch_fee, None); } #[tokio::test] async fn fail_wrong_manager() { let (mut context, stake_pool_acco
#[tokio::test] async fn fail_high_fee() { let (mut context, stake_pool_accounts, _new_fee) = setup().await; let new_fee = Fee { numerator: 11, denominator: 10, }; let transaction = Transaction::new_signed_with_payer( &[instruction::set_fee( &id(), &stake_pool_accounts.stake_pool.pubkey(), &stake_pool_accounts.manager.pubkey(), FeeType::Epoch(new_fee), )], Some(&context.payer.pubkey()), &[&context.payer, &stake_pool_accounts.manager], context.last_blockhash, ); let error = context .banks_client .process_transaction(transaction) .await .err() .unwrap() .unwrap(); match error { TransactionError::InstructionError(_, InstructionError::Custom(error_index)) => { let program_error = error::StakePoolError::FeeTooHigh as u32; assert_eq!(error_index, program_error); } _ => panic!("Wrong error occurs when setting fee too high"), } } #[tokio::test] async fn fail_not_updated() { let mut context = program_test().start_with_context().await; let stake_pool_accounts = StakePoolAccounts::new(); stake_pool_accounts .initialize_stake_pool( &mut context.banks_client, &context.payer, &context.last_blockhash, 1, ) .await .unwrap(); let new_fee = Fee { numerator: 10, denominator: 100, }; context.warp_to_slot(50_000).unwrap(); let transaction = Transaction::new_signed_with_payer( &[instruction::set_fee( &id(), &stake_pool_accounts.stake_pool.pubkey(), &stake_pool_accounts.manager.pubkey(), FeeType::Epoch(new_fee), )], Some(&context.payer.pubkey()), &[&context.payer, &stake_pool_accounts.manager], context.last_blockhash, ); let error = context .banks_client .process_transaction(transaction) .await .err() .unwrap() .unwrap(); match error { TransactionError::InstructionError(_, InstructionError::Custom(error_index)) => { let program_error = error::StakePoolError::StakeListAndPoolOutOfDate as u32; assert_eq!(error_index, program_error); } _ => panic!("Wrong error occurs when stake pool out of date"), } }
unts, new_fee) = setup().await; let wrong_manager = Keypair::new(); let transaction = Transaction::new_signed_with_payer( &[instruction::set_fee( &id(), &stake_pool_accounts.stake_pool.pubkey(), &wrong_manager.pubkey(), FeeType::Epoch(new_fee), )], Some(&context.payer.pubkey()), &[&context.payer, &wrong_manager], context.last_blockhash, ); let error = context .banks_client .process_transaction(transaction) .await .err() .unwrap() .unwrap(); match error { TransactionError::InstructionError(_, InstructionError::Custom(error_index)) => { let program_error = error::StakePoolError::WrongManager as u32; assert_eq!(error_index, program_error); } _ => panic!("Wrong error occurs while malicious try to set manager"), } }
function_block-function_prefixed
[ { "content": "fn validate_fraction(numerator: u64, denominator: u64) -> Result<(), SwapError> {\n\n if denominator == 0 && numerator == 0 {\n\n Ok(())\n\n } else if numerator >= denominator {\n\n Err(SwapError::InvalidFee)\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Fees {\n\...
Rust
samples/d3d12-hello-world/d3d12-hello-window/src/main.rs
damyanp/directx-graphics-samples-rs
d1b04382984b3ad1facf5f52ffd7901cc6d00488
use d3dx12::*; use dxsample::*; use windows::{ core::*, Win32::{ Foundation::*, Graphics::{Direct3D12::*, Dxgi::Common::*, Dxgi::*}, }, }; mod d3d12_hello_window { use std::convert::TryInto; use super::*; const FRAME_COUNT: usize = 2; pub struct Sample { dxgi_factory: IDXGIFactory4, device: ID3D12Device, resources: Option<Resources>, } struct Resources { command_queue: SynchronizedCommandQueue, swap_chain: IDXGISwapChain3, frame_index: usize, render_targets: [ID3D12Resource; FRAME_COUNT], rtv_heap: RtvDescriptorHeap, command_allocator: ID3D12CommandAllocator, command_list: ID3D12GraphicsCommandList, } impl DXSample for Sample { fn new(command_line: &SampleCommandLine) -> Result<Self> { let (dxgi_factory, device) = create_device(command_line)?; Ok(Sample { dxgi_factory, device, resources: None, }) } fn bind_to_window(&mut self, hwnd: &HWND) -> Result<()> { let command_queue = SynchronizedCommandQueue::new(&self.device, D3D12_COMMAND_LIST_TYPE_DIRECT)?; let (width, height) = self.window_size(); let swap_chain_desc = DXGI_SWAP_CHAIN_DESC1 { Width: width as u32, Height: height as u32, Format: DXGI_FORMAT_R8G8B8A8_UNORM, SampleDesc: DXGI_SAMPLE_DESC { Count: 1, ..Default::default() }, BufferUsage: DXGI_USAGE_RENDER_TARGET_OUTPUT, BufferCount: FRAME_COUNT as u32, SwapEffect: DXGI_SWAP_EFFECT_FLIP_DISCARD, ..Default::default() }; let swap_chain: IDXGISwapChain3 = unsafe { self.dxgi_factory.CreateSwapChainForHwnd( &command_queue.queue, hwnd, &swap_chain_desc, std::ptr::null(), None, ) }? .cast()?; unsafe { self.dxgi_factory .MakeWindowAssociation(hwnd, DXGI_MWA_NO_ALT_ENTER) }?; let frame_index = unsafe { swap_chain.GetCurrentBackBufferIndex() } .try_into() .unwrap(); let rtv_heap = RtvDescriptorHeap::new(&self.device, FRAME_COUNT)?; let render_targets: [ID3D12Resource; FRAME_COUNT] = array_init::try_array_init(|i: usize| -> Result<ID3D12Resource> { let render_target: ID3D12Resource = unsafe { swap_chain.GetBuffer(i as u32) }?; unsafe { rtv_heap.create_render_target_view(&self.device, &render_target, None, i); } Ok(render_target) })?; let command_allocator = unsafe { self.device .CreateCommandAllocator(D3D12_COMMAND_LIST_TYPE_DIRECT) }?; let command_list: ID3D12GraphicsCommandList = unsafe { self.device.CreateCommandList( 0, D3D12_COMMAND_LIST_TYPE_DIRECT, &command_allocator, None, ) }?; unsafe { command_list.Close() }?; self.resources = Some(Resources { command_queue, swap_chain, frame_index, render_targets, rtv_heap, command_allocator, command_list, }); Ok(()) } fn title(&self) -> String { "D3D12 Hello Window".into() } fn window_size(&self) -> (i32, i32) { (1280, 720) } fn render(&mut self) { let resources = match &mut self.resources { Some(it) => it, _ => return, }; populate_command_list(resources).unwrap(); let command_list = ID3D12CommandList::from(&resources.command_list); unsafe { resources .command_queue .ExecuteCommandLists(1, &mut Some(command_list)) }; unsafe { resources.swap_chain.Present(1, 0) }.ok().unwrap(); wait_for_previous_frame(resources); } } fn populate_command_list(resources: &Resources) -> Result<()> { unsafe { resources.command_allocator.Reset() }?; let command_list = &resources.command_list; unsafe { command_list.Reset(&resources.command_allocator, None) }?; let barrier = transition_barrier( &resources.render_targets[resources.frame_index as usize], D3D12_RESOURCE_STATE_PRESENT, D3D12_RESOURCE_STATE_RENDER_TARGET, ); unsafe { command_list.ResourceBarrier(1, &barrier) }; let rtv_handle = resources .rtv_heap .get_cpu_descriptor_handle(resources.frame_index); unsafe { command_list.OMSetRenderTargets(1, &rtv_handle, false, std::ptr::null()) }; unsafe { command_list.ClearRenderTargetView( rtv_handle, [0.0, 0.2, 0.4, 1.0].as_ptr(), 0, std::ptr::null(), ); command_list.ResourceBarrier( 1, &transition_barrier( &resources.render_targets[resources.frame_index as usize], D3D12_RESOURCE_STATE_RENDER_TARGET, D3D12_RESOURCE_STATE_PRESENT, ), ); } unsafe { command_list.Close() } } fn wait_for_previous_frame(resources: &mut Resources) { resources.command_queue.signal_and_wait_for_gpu().unwrap(); resources.frame_index = unsafe { resources .swap_chain .GetCurrentBackBufferIndex() .try_into() .unwrap() }; } } fn main() -> Result<()> { run_sample::<d3d12_hello_window::Sample>()?; Ok(()) }
use d3dx12::*; use dxsample::*; use windows::{ core::*, Win32::{ Foundation::*, Graphics::{Direct3D12::*, Dxgi::Common::*, Dxgi::*}, }, }; mod d3d12_hello_wi
}); Ok(()) } fn title(&self) -> String { "D3D12 Hello Window".into() } fn window_size(&self) -> (i32, i32) { (1280, 720) } fn render(&mut self) { let resources = match &mut self.resources { Some(it) => it, _ => return, }; populate_command_list(resources).unwrap(); let command_list = ID3D12CommandList::from(&resources.command_list); unsafe { resources .command_queue .ExecuteCommandLists(1, &mut Some(command_list)) }; unsafe { resources.swap_chain.Present(1, 0) }.ok().unwrap(); wait_for_previous_frame(resources); } } fn populate_command_list(resources: &Resources) -> Result<()> { unsafe { resources.command_allocator.Reset() }?; let command_list = &resources.command_list; unsafe { command_list.Reset(&resources.command_allocator, None) }?; let barrier = transition_barrier( &resources.render_targets[resources.frame_index as usize], D3D12_RESOURCE_STATE_PRESENT, D3D12_RESOURCE_STATE_RENDER_TARGET, ); unsafe { command_list.ResourceBarrier(1, &barrier) }; let rtv_handle = resources .rtv_heap .get_cpu_descriptor_handle(resources.frame_index); unsafe { command_list.OMSetRenderTargets(1, &rtv_handle, false, std::ptr::null()) }; unsafe { command_list.ClearRenderTargetView( rtv_handle, [0.0, 0.2, 0.4, 1.0].as_ptr(), 0, std::ptr::null(), ); command_list.ResourceBarrier( 1, &transition_barrier( &resources.render_targets[resources.frame_index as usize], D3D12_RESOURCE_STATE_RENDER_TARGET, D3D12_RESOURCE_STATE_PRESENT, ), ); } unsafe { command_list.Close() } } fn wait_for_previous_frame(resources: &mut Resources) { resources.command_queue.signal_and_wait_for_gpu().unwrap(); resources.frame_index = unsafe { resources .swap_chain .GetCurrentBackBufferIndex() .try_into() .unwrap() }; } } fn main() -> Result<()> { run_sample::<d3d12_hello_window::Sample>()?; Ok(()) }
ndow { use std::convert::TryInto; use super::*; const FRAME_COUNT: usize = 2; pub struct Sample { dxgi_factory: IDXGIFactory4, device: ID3D12Device, resources: Option<Resources>, } struct Resources { command_queue: SynchronizedCommandQueue, swap_chain: IDXGISwapChain3, frame_index: usize, render_targets: [ID3D12Resource; FRAME_COUNT], rtv_heap: RtvDescriptorHeap, command_allocator: ID3D12CommandAllocator, command_list: ID3D12GraphicsCommandList, } impl DXSample for Sample { fn new(command_line: &SampleCommandLine) -> Result<Self> { let (dxgi_factory, device) = create_device(command_line)?; Ok(Sample { dxgi_factory, device, resources: None, }) } fn bind_to_window(&mut self, hwnd: &HWND) -> Result<()> { let command_queue = SynchronizedCommandQueue::new(&self.device, D3D12_COMMAND_LIST_TYPE_DIRECT)?; let (width, height) = self.window_size(); let swap_chain_desc = DXGI_SWAP_CHAIN_DESC1 { Width: width as u32, Height: height as u32, Format: DXGI_FORMAT_R8G8B8A8_UNORM, SampleDesc: DXGI_SAMPLE_DESC { Count: 1, ..Default::default() }, BufferUsage: DXGI_USAGE_RENDER_TARGET_OUTPUT, BufferCount: FRAME_COUNT as u32, SwapEffect: DXGI_SWAP_EFFECT_FLIP_DISCARD, ..Default::default() }; let swap_chain: IDXGISwapChain3 = unsafe { self.dxgi_factory.CreateSwapChainForHwnd( &command_queue.queue, hwnd, &swap_chain_desc, std::ptr::null(), None, ) }? .cast()?; unsafe { self.dxgi_factory .MakeWindowAssociation(hwnd, DXGI_MWA_NO_ALT_ENTER) }?; let frame_index = unsafe { swap_chain.GetCurrentBackBufferIndex() } .try_into() .unwrap(); let rtv_heap = RtvDescriptorHeap::new(&self.device, FRAME_COUNT)?; let render_targets: [ID3D12Resource; FRAME_COUNT] = array_init::try_array_init(|i: usize| -> Result<ID3D12Resource> { let render_target: ID3D12Resource = unsafe { swap_chain.GetBuffer(i as u32) }?; unsafe { rtv_heap.create_render_target_view(&self.device, &render_target, None, i); } Ok(render_target) })?; let command_allocator = unsafe { self.device .CreateCommandAllocator(D3D12_COMMAND_LIST_TYPE_DIRECT) }?; let command_list: ID3D12GraphicsCommandList = unsafe { self.device.CreateCommandList( 0, D3D12_COMMAND_LIST_TYPE_DIRECT, &command_allocator, None, ) }?; unsafe { command_list.Close() }?; self.resources = Some(Resources { command_queue, swap_chain, frame_index, render_targets, rtv_heap, command_allocator, command_list,
random
[ { "content": "use windows::Win32::Graphics::{Direct3D12::*, Dxgi::Common::*};\n\n\n\nmod descriptor_heaps;\n\npub use descriptor_heaps::*;\n\n\n\nmod pipeline_states;\n\npub use pipeline_states::*;\n\n\n\npub mod build;\n\n\n", "file_path": "d3dx12/src/lib.rs", "rank": 0, "score": 34164.99046050119 ...
Rust
src/device/vga/crt.rs
shift-crops/x64emu
18f661a9a64bfbfce76c15dc7039abee73e4e128
use packed_struct::prelude::*; #[derive(Debug, Default)] pub(super) struct CRT { pub ccir: CRTCtrlIndex, htr: u8, pub hdeer: u8, hbsr: u8, hber: HorBlnkEnd, hssr: u8, hser: HorSyncEnd, vtr: u8, ofr: Overflow, prsr: PresetRowScan, mslr: MaxScanLine, tcsr: TextCurStart, tcer: TextCurEnd, sahr: u8, salr: u8, tclhr: u8, tcllr: u8, vssr: u8, vser: VertSyncEnd, pub vdeer: u8, or: u8, ulr: UnderLocate, vbsr: u8, vber: u8, cmr: CRTMode, lcr: u8, pub latch: u8, } impl CRT { pub fn get(&self) -> u8 { match self.ccir.idx { 0x00 => self.htr, 0x01 => self.hdeer, 0x02 => self.hbsr, 0x03 => self.hber.pack().unwrap()[0], 0x04 => self.hssr, 0x05 => self.hser.pack().unwrap()[0], 0x06 => self.vtr, 0x07 => self.ofr.pack().unwrap()[0], 0x08 => self.prsr.pack().unwrap()[0], 0x09 => self.mslr.pack().unwrap()[0], 0x0a => self.tcsr.pack().unwrap()[0], 0x0b => self.tcer.pack().unwrap()[0], 0x0c => self.sahr, 0x0d => self.salr, 0x0e => self.tclhr, 0x0f => self.tcllr, 0x10 => self.vssr, 0x11 => self.vser.pack().unwrap()[0], 0x12 => self.vdeer, 0x13 => self.or, 0x14 => self.ulr.pack().unwrap()[0], 0x15 => self.vbsr, 0x16 => self.vber, 0x17 => self.cmr.pack().unwrap()[0], 0x18 => self.lcr, 0x22 => self.latch, _ => 0, } } pub fn set(&mut self, v: u8) -> () { let data = &[v]; match self.ccir.idx { 0x00 => self.htr = v, 0x01 => self.hdeer = v, 0x02 => self.hbsr = v, 0x03 => self.hber = HorBlnkEnd::unpack(data).unwrap(), 0x04 => self.hssr = v, 0x05 => self.hser = HorSyncEnd::unpack(data).unwrap(), 0x06 => self.vtr = v, 0x07 => self.ofr = Overflow::unpack(data).unwrap(), 0x08 => self.prsr = PresetRowScan::unpack(data).unwrap(), 0x09 => self.mslr = MaxScanLine::unpack(data).unwrap(), 0x0a => self.tcsr = TextCurStart::unpack(data).unwrap(), 0x0b => self.tcer = TextCurEnd::unpack(data).unwrap(), 0x0c => self.sahr = v, 0x0d => self.salr = v, 0x0e => self.tclhr = v, 0x0f => self.tcllr = v, 0x10 => self.vssr = v, 0x11 => self.vser = VertSyncEnd::unpack(data).unwrap(), 0x12 => self.vdeer = v, 0x13 => self.or = v, 0x14 => self.ulr = UnderLocate::unpack(data).unwrap(), 0x15 => self.vbsr = v, 0x16 => self.vber = v, 0x17 => self.cmr = CRTMode::unpack(data).unwrap(), 0x18 => self.lcr = v, _ => {}, } } pub fn get_windowsize(&self) -> (u32, u32) { (8 * self.hdeer as u32, 8 * self.vdeer as u32) } pub fn char_height(&self) -> u8 { self.mslr.scan_count + 1 } pub fn pixel_to_pos(&self, pxl: u32) -> (u32, u32) { let (x_size, _) = self.get_windowsize(); (pxl % x_size, pxl / x_size) } pub fn pos_to_chridx(&self, x: u32, y: u32) -> u16 { (y/self.char_height() as u32 * self.hdeer as u32 + x/8) as u16 } pub fn get_cursor(&self, idx: u16) -> Option<(std::ops::RangeInclusive<u8>, u8)> { let loc = ((self.tclhr as u16) << 8) + self.tcllr as u16; if !self.tcsr.cur_off && idx == loc { let (start, end) = (self.tcsr.cur_srt, self.tcer.cur_end); if start > end { None } else { Some((start..=end, self.tcer.cur_skew)) } } else { None } } } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct CRTCtrlIndex { #[packed_field(bits="0:6")] idx: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct HorBlnkEnd { #[packed_field(bits="0:4")] bl_end: u8, #[packed_field(bits="5:6")] skew_ctrl: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct HorSyncEnd { #[packed_field(bits="0:4")] end: u8, #[packed_field(bits="5:6")] delay: u8, #[packed_field(bits="7")] bl_end: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct Overflow { #[packed_field(bits="0")] vt_total8: u8, #[packed_field(bits="1")] vt_disp_ena8: u8, #[packed_field(bits="2")] vt_sync_str8: u8, #[packed_field(bits="3")] vt_bl_str8: u8, #[packed_field(bits="4")] line_cmp8: u8, #[packed_field(bits="5")] vt_total9: u8, #[packed_field(bits="6")] vt_disp_ena9: u8, #[packed_field(bits="7")] vt_sync_str9: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct PresetRowScan { #[packed_field(bits="0:4")] scan_count: u8, #[packed_field(bits="5:6")] byte_pan: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct MaxScanLine { #[packed_field(bits="0:4")] scan_count: u8, #[packed_field(bits="5")] vt_bl_str9: u8, #[packed_field(bits="6")] line_cmp9: u8, #[packed_field(bits="7")] dbl_scan: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct TextCurStart { #[packed_field(bits="0:4")] cur_srt: u8, #[packed_field(bits="5")] cur_off: bool, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct TextCurEnd { #[packed_field(bits="0:4")] cur_end: u8, #[packed_field(bits="5:6")] cur_skew: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct VertSyncEnd { #[packed_field(bits="0:3")] end: u8, #[packed_field(bits="4")] int_clr: u8, #[packed_field(bits="5")] int_ena: u8, #[packed_field(bits="7")] prot_reg: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct UnderLocate { #[packed_field(bits="0:4")] location: u8, #[packed_field(bits="5")] count: u8, #[packed_field(bits="6")] dword: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct CRTMode { #[packed_field(bits="0")] compat: u8, #[packed_field(bits="1")] row_ctrl: u8, #[packed_field(bits="2")] hor_sel: u8, #[packed_field(bits="3")] count: u8, #[packed_field(bits="5")] addr_wrap: u8, #[packed_field(bits="6")] wb_mode: u8, #[packed_field(bits="7")] ctrl_reset: u8, }
use packed_struct::prelude::*; #[derive(Debug, Default)] pub(super) struct CRT { pub ccir: CRTCtrlIndex, htr: u8, pub hdeer: u8, hbsr: u8, hber: HorBlnkEnd, hssr: u8, hser: HorSyncEnd, vtr: u8, ofr: Overflow, prsr: PresetRowScan, mslr: MaxScanLine, tcsr: TextCurStart, tcer: TextCurEnd, sahr: u8, salr: u8, tclhr: u8, tcllr: u8, vssr: u8, vser: VertSyncEnd, pub vdeer: u8, or: u8, ulr: UnderLocate, vbsr: u8, vber: u8, cmr: CRTMode, lcr: u8, pub latch: u8, } impl CRT { pub fn get(&self) -> u8 { match self.ccir.idx { 0x00 => self.htr, 0x01 => self.hdeer, 0x02 => self.hbsr, 0x03 => self.hber.pack().unwrap()[0], 0x04 => self.hssr, 0x05 => self.hser.pack().unwrap()[0], 0x06 => self.vtr, 0x07 => self.ofr.pack().unwrap()[0], 0x08 => self.prsr.pack().unwrap()[0], 0x09 => self.mslr.pack().unwrap()[0], 0x0a => self.tcsr.pack().unwrap()[0], 0x0b => self.tcer.pack().unwrap()[0], 0x0c => self.sahr, 0x0d => self.salr, 0x0e => self.tclhr, 0x0f => self.tcllr, 0x10 => self.vssr, 0x11 => self.vser.pack().unwrap()[0], 0x12 => self.vdeer, 0x13 => self.or, 0x14 => self.ulr.pack().unwrap()[0], 0x15 => self.vbsr, 0x16 => self.vber, 0x17 => self.cmr.pack().unwrap()[0], 0x18 => self.lcr, 0x22 => self.latch, _ => 0, } } pub fn set(&mut self, v: u8) -> () { let data = &[v]; match self.ccir.idx { 0x00 => self.htr = v, 0x01 => self.hdeer = v, 0x02 => self.hbsr = v, 0x03 => self.hber = HorBlnkEnd::unpack(data).unwrap(), 0x04 => self.hssr = v, 0x05 => self.hser = HorSyncEnd::unpack(data).unwrap(), 0x06 => self.vtr = v, 0x07 => self.ofr = Overflow::unpack(data).unwrap(), 0x08 => self.prsr = PresetRowScan::unpack(data).unwrap(), 0x09 => self.mslr = MaxScanLine::unpack(data).unwrap(), 0x0a => self.tcsr = TextCurStart::unpack(data).unwrap(), 0x0b => self.tcer = TextCurEnd::unpack(data).unwrap(), 0x0c => self.sahr = v, 0x0d => self.salr = v, 0x0e => self.tclhr = v, 0x0f => self.tcllr = v, 0x10 => self.vssr = v, 0x11 => self.vser = VertSyncEnd::unpack(data).unwrap(), 0x12 => self.vdeer = v, 0x13 => self.or = v, 0x14 => self.ulr = UnderLocate::unpack(data).unwrap(), 0x15 => self.vbsr = v, 0x16 => self.vber = v, 0x17 => self.cmr = CRTMode::unpack(data).unwrap(), 0x18 => self.lcr = v, _ => {}, } } pub fn get_windowsize(&self) -> (u32, u32) { (8 * self.hdeer as u32, 8 * self.vdeer as u32) } pub fn char_height(&self) -> u8 { self.mslr.scan_count + 1 } pub fn pixel_to_pos(&self, pxl: u32) -> (u32, u32) { let (x_size, _) = self.get_windowsize(); (pxl % x_size, pxl / x_size) } pub fn pos_to_chridx(&self, x: u32, y: u32) -> u16 { (y/self.char_height() as u32 * self.hdeer as u32 + x/8) as u16 } pub fn get_cursor(&self, idx: u16) -> Option<(std::ops::RangeInclusive<u8>, u8)> { let loc = ((self.tclhr as u16) << 8) + self.tcllr as u16; if !self.tcsr.cur_off && idx == loc { let (start, end) = (self.tcsr.cur_srt, self.tcer.cur_end); if start > end { None } else { Some((start..=end, self.tcer.cur_skew)) } } else { None } } } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct CRTCtrlIndex { #[packed_field(bits="0:6")] idx: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct HorBlnkEnd { #[packed_field(bits="0:4")] bl_end: u8, #[packed_field(bits="5:6")] skew_ctrl: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct HorSyncEnd { #[packed_field(bits="0:4")] end: u8, #[packed_field(bits="5:6")] delay: u8, #[packed_field(bits="7")] bl_end: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct Overflow { #[packed_field(bits="0")] vt_total8: u8, #[packed_field(bits="1")] vt_disp_ena8: u8, #[packed_field(bits="2")] vt_sync_str8: u8, #[packed_field(bits="3")] vt_bl_str8: u8, #[packed_field(bits="4")] line_cmp8: u8, #[packed_field(bits="5")] vt_total9: u8, #[packed_field(bits="6")] vt_disp_ena9: u8, #[packed_field(bits="7")] vt_sync_str9: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct PresetRowScan { #[packed_field(bits="0:4")] scan_count: u8, #[packed_field(bits="5:6")] byte_pan: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct MaxScanLine { #[packed_field(bits="0:4")] scan_count: u8, #[packed_field(bits="5")] vt_bl_str9: u8, #[packed_field(bits="6")] line_cmp9:
: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct UnderLocate { #[packed_field(bits="0:4")] location: u8, #[packed_field(bits="5")] count: u8, #[packed_field(bits="6")] dword: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct CRTMode { #[packed_field(bits="0")] compat: u8, #[packed_field(bits="1")] row_ctrl: u8, #[packed_field(bits="2")] hor_sel: u8, #[packed_field(bits="3")] count: u8, #[packed_field(bits="5")] addr_wrap: u8, #[packed_field(bits="6")] wb_mode: u8, #[packed_field(bits="7")] ctrl_reset: u8, }
u8, #[packed_field(bits="7")] dbl_scan: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct TextCurStart { #[packed_field(bits="0:4")] cur_srt: u8, #[packed_field(bits="5")] cur_off: bool, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct TextCurEnd { #[packed_field(bits="0:4")] cur_end: u8, #[packed_field(bits="5:6")] cur_skew: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct VertSyncEnd { #[packed_field(bits="0:3")] end: u8, #[packed_field(bits="4")] int_clr: u8, #[packed_field(bits="5")] int_ena: u8, #[packed_field(bits="7")] prot_reg
random
[ { "content": "pub fn wait_for_tcp(port: u16) -> DynResult<TcpStream> {\n\n let sockaddr = format!(\"127.0.0.1:{}\", port);\n\n eprintln!(\"Waiting for a GDB connection on {:?}...\", sockaddr);\n\n\n\n let sock = TcpListener::bind(sockaddr)?;\n\n let (stream, addr) = sock.accept()?;\n\n eprintln!(...
Rust
src/photosdir.rs
kaj/rphotos
640dc328eb6338368b66831061530d8c894722f6
use crate::models::Photo; use crate::myexif::ExifData; use image::imageops::FilterType; use image::{self, GenericImageView, ImageError, ImageFormat}; use log::{debug, info, warn}; use std::ffi::OsStr; use std::path::{Path, PathBuf}; use std::{fs, io}; use tokio::task::{spawn_blocking, JoinError}; pub struct PhotosDir { basedir: PathBuf, } impl PhotosDir { pub fn new(basedir: &Path) -> Self { PhotosDir { basedir: basedir.into(), } } pub fn get_raw_path(&self, photo: &Photo) -> PathBuf { self.basedir.join(&photo.path) } pub fn has_file<S: AsRef<OsStr> + ?Sized>(&self, path: &S) -> bool { self.basedir.join(Path::new(path)).is_file() } pub fn find_files( &self, dir: &Path, cb: &dyn Fn(&str, &ExifData), ) -> io::Result<()> { let absdir = self.basedir.join(dir); if fs::metadata(&absdir)?.is_dir() { debug!("Should look in {:?}", absdir); for entry in fs::read_dir(absdir)? { let path = entry?.path(); if fs::metadata(&path)?.is_dir() { self.find_files(&path, cb)?; } else if let Some(exif) = load_meta(&path) { cb(self.subpath(&path)?, &exif); } else { debug!("{:?} is no pic.", path) } } } Ok(()) } fn subpath<'a>(&self, fullpath: &'a Path) -> Result<&'a str, io::Error> { let path = fullpath .strip_prefix(&self.basedir) .map_err(|e| io::Error::new(io::ErrorKind::InvalidInput, e))?; path.to_str().ok_or_else(|| { io::Error::new( io::ErrorKind::InvalidInput, format!("Non-utf8 path {:?}", path), ) }) } } fn load_meta(path: &Path) -> Option<ExifData> { if let Ok(mut exif) = ExifData::read_from(&path) { if exif.width.is_none() || exif.height.is_none() { if let Ok((width, height)) = actual_image_size(&path) { exif.width = Some(width); exif.height = Some(height); } } Some(exif) } else if let Ok((width, height)) = actual_image_size(&path) { let mut meta = ExifData::default(); meta.width = Some(width); meta.height = Some(height); Some(meta) } else { None } } fn actual_image_size(path: &Path) -> Result<(u32, u32), ImageError> { let image = image::open(&path)?; Ok((image.width(), image.height())) } #[derive(Debug)] pub enum ImageLoadFailed { File(io::Error), Image(image::ImageError), Join(JoinError), } impl std::error::Error for ImageLoadFailed {} impl std::fmt::Display for ImageLoadFailed { fn fmt(&self, out: &mut std::fmt::Formatter) -> std::fmt::Result { match &self { ImageLoadFailed::File(e) => e.fmt(out), ImageLoadFailed::Image(e) => e.fmt(out), ImageLoadFailed::Join(e) => e.fmt(out), } } } impl From<io::Error> for ImageLoadFailed { fn from(e: io::Error) -> ImageLoadFailed { ImageLoadFailed::File(e) } } impl From<image::ImageError> for ImageLoadFailed { fn from(e: image::ImageError) -> ImageLoadFailed { ImageLoadFailed::Image(e) } } impl From<JoinError> for ImageLoadFailed { fn from(e: JoinError) -> ImageLoadFailed { ImageLoadFailed::Join(e) } } pub async fn get_scaled_jpeg( path: PathBuf, rotation: i16, size: u32, ) -> Result<Vec<u8>, ImageLoadFailed> { spawn_blocking(move || { info!("Should open {:?}", path); let img = if is_jpeg(&path) { use std::fs::File; use std::io::BufReader; let file = BufReader::new(File::open(path)?); let mut decoder = image::jpeg::JpegDecoder::new(file)?; decoder.scale(size as u16, size as u16)?; image::DynamicImage::from_decoder(decoder)? } else { image::open(path)? }; let img = if 3 * size <= img.width() || 3 * size <= img.height() { info!("T-nail from {}x{} to {}", img.width(), img.height(), size); img.thumbnail(size, size) } else if size < img.width() || size < img.height() { info!("Scaling from {}x{} to {}", img.width(), img.height(), size); img.resize(size, size, FilterType::CatmullRom) } else { img }; let img = match rotation { _x @ 0..=44 | _x @ 315..=360 => img, _x @ 45..=134 => img.rotate90(), _x @ 135..=224 => img.rotate180(), _x @ 225..=314 => img.rotate270(), x => { warn!("Should rotate photo {} deg, which is unsupported", x); img } }; let mut buf = Vec::new(); img.write_to(&mut buf, ImageFormat::Jpeg)?; Ok(buf) }) .await? } fn is_jpeg(path: &Path) -> bool { if let Some(suffix) = path.extension().and_then(|s| s.to_str()) { suffix.eq_ignore_ascii_case("jpg") || suffix.eq_ignore_ascii_case("jpeg") } else { false } }
use crate::models::Photo; use crate::myexif::ExifData; use image::imageops::FilterType; use image::{self, GenericImageView, ImageError, ImageFormat}; use log::{debug, info, warn}; use std::ffi::OsStr; use std::path::{Path, PathBuf}; use std::{fs, io}; use tokio::task::{spawn_blocking, JoinError}; pub struct PhotosDir { basedir: PathBuf, } impl PhotosDir { pub fn new(basedir: &Path) -> Self { PhotosDir { basedir: basedir.into(), } } pub fn get_raw_path(&self, photo: &Photo) -> PathBuf { self.basedir.join(&photo.path) } pub fn has_file<S: AsRef<OsStr> + ?Sized>(&self, path: &S) -> bool { self.basedir.join(Path::new(path)).is_file() } pub fn find_files( &self, dir: &Path, cb: &dyn Fn(&str, &ExifData), ) -> io::Result<()> { let absdir = self.basedir.join(dir); if fs::metadata(&absdir)?.is_dir() { debug!("Should look in {:?}", absdir); for entry in fs::read_dir(absdir)? { let path = entry?.path(); if fs::metadata(&path)?.is_dir() { self.find_files(&path, cb)?; } else if let Some(exif) = load_meta(&path) { cb(self.subpath(&path)?, &exif); } else { debug!("{:?} is no pic.", path) } } } Ok(()) } fn subpath<'a>(&self, fullpath: &'a Path) -> Resu
} fn load_meta(path: &Path) -> Option<ExifData> { if let Ok(mut exif) = ExifData::read_from(&path) { if exif.width.is_none() || exif.height.is_none() { if let Ok((width, height)) = actual_image_size(&path) { exif.width = Some(width); exif.height = Some(height); } } Some(exif) } else if let Ok((width, height)) = actual_image_size(&path) { let mut meta = ExifData::default(); meta.width = Some(width); meta.height = Some(height); Some(meta) } else { None } } fn actual_image_size(path: &Path) -> Result<(u32, u32), ImageError> { let image = image::open(&path)?; Ok((image.width(), image.height())) } #[derive(Debug)] pub enum ImageLoadFailed { File(io::Error), Image(image::ImageError), Join(JoinError), } impl std::error::Error for ImageLoadFailed {} impl std::fmt::Display for ImageLoadFailed { fn fmt(&self, out: &mut std::fmt::Formatter) -> std::fmt::Result { match &self { ImageLoadFailed::File(e) => e.fmt(out), ImageLoadFailed::Image(e) => e.fmt(out), ImageLoadFailed::Join(e) => e.fmt(out), } } } impl From<io::Error> for ImageLoadFailed { fn from(e: io::Error) -> ImageLoadFailed { ImageLoadFailed::File(e) } } impl From<image::ImageError> for ImageLoadFailed { fn from(e: image::ImageError) -> ImageLoadFailed { ImageLoadFailed::Image(e) } } impl From<JoinError> for ImageLoadFailed { fn from(e: JoinError) -> ImageLoadFailed { ImageLoadFailed::Join(e) } } pub async fn get_scaled_jpeg( path: PathBuf, rotation: i16, size: u32, ) -> Result<Vec<u8>, ImageLoadFailed> { spawn_blocking(move || { info!("Should open {:?}", path); let img = if is_jpeg(&path) { use std::fs::File; use std::io::BufReader; let file = BufReader::new(File::open(path)?); let mut decoder = image::jpeg::JpegDecoder::new(file)?; decoder.scale(size as u16, size as u16)?; image::DynamicImage::from_decoder(decoder)? } else { image::open(path)? }; let img = if 3 * size <= img.width() || 3 * size <= img.height() { info!("T-nail from {}x{} to {}", img.width(), img.height(), size); img.thumbnail(size, size) } else if size < img.width() || size < img.height() { info!("Scaling from {}x{} to {}", img.width(), img.height(), size); img.resize(size, size, FilterType::CatmullRom) } else { img }; let img = match rotation { _x @ 0..=44 | _x @ 315..=360 => img, _x @ 45..=134 => img.rotate90(), _x @ 135..=224 => img.rotate180(), _x @ 225..=314 => img.rotate270(), x => { warn!("Should rotate photo {} deg, which is unsupported", x); img } }; let mut buf = Vec::new(); img.write_to(&mut buf, ImageFormat::Jpeg)?; Ok(buf) }) .await? } fn is_jpeg(path: &Path) -> bool { if let Some(suffix) = path.extension().and_then(|s| s.to_str()) { suffix.eq_ignore_ascii_case("jpg") || suffix.eq_ignore_ascii_case("jpeg") } else { false } }
lt<&'a str, io::Error> { let path = fullpath .strip_prefix(&self.basedir) .map_err(|e| io::Error::new(io::ErrorKind::InvalidInput, e))?; path.to_str().ok_or_else(|| { io::Error::new( io::ErrorKind::InvalidInput, format!("Non-utf8 path {:?}", path), ) }) }
function_block-function_prefixed
[ { "content": "pub fn to_dir(dir: &str) -> Result<(), Error> {\n\n let dir: &Path = dir.as_ref();\n\n for s in STATICS {\n\n // s.name may contain directory components.\n\n if let Some(parent) = dir.join(s.name).parent() {\n\n create_dir_all(parent)?;\n\n }\n\n File::...
Rust
src/main.rs
Masorubka1/rs_graph_system
ac51d9ccdbd7f60996804287e527d9633fa5d4e9
use std::collections::HashSet; use std::thread::sleep_ms; use std::collections::HashMap; use rs_graph_system::ThreadPool; use petgraph::Graph; use petgraph::adj::NodeIndex; use petgraph::adj::IndexType; use std::collections::VecDeque; #[derive(Copy, Clone)] pub struct Xz { Xz: usize } impl Xz { fn new(num: usize) -> Xz { Xz {Xz: num} } } #[derive(Clone)] pub struct InfoNode<'a, T, V> { func: fn(&HashMap<&'a str, T>) -> V, args: HashMap<&'a str, T>, res: Box<V> } impl<'a> InfoNode<'a, usize, Xz> { fn new(name: fn(&HashMap<&'a str, usize>) -> Xz, Args: HashMap<&'a str, usize>) -> InfoNode<'a, usize, Xz> { InfoNode {func: name, args: Args, res: Box::new(Xz::new(0))} } fn execute(helper: InfoNode<'a, usize, Xz>) -> Box<Xz> { Box::new((helper.func)(&helper.args)) } fn execute_self(mut self) { self.res = Box::new((self.func)(&self.args)); } } impl<'a> Default for InfoNode<'a, usize, Xz> { fn default() -> Self { InfoNode { func: do_smth_2, args: HashMap::<&'a str, usize>::new(), res: Box::<Xz>::new(Xz::new(0)) } } } fn test_build_graph(deps: &mut Graph<InfoNode<usize, Xz>, &str>) -> HashMap<usize, NodeIndex> { let mut first_h = HashMap::new(); first_h.insert("name", 1); let mut second_h = HashMap::new(); second_h.insert("name", 2); let mut third_h = HashMap::new(); third_h.insert("name", 3); let mut fourth_h = HashMap::new(); fourth_h.insert("name", 4); let mut thith_h = HashMap::new(); thith_h.insert("name", 5); let first = InfoNode::new(do_smth_2, first_h); let second = InfoNode::new(do_smth_2, second_h); let third = InfoNode::new(do_smth_2, third_h); let fourth = InfoNode::new(do_smth_2, fourth_h); let thith = InfoNode::new(do_smth_2, thith_h); let arr = vec![first, second, third, fourth, thith]; let mut list_nodes = HashMap::<usize, NodeIndex>::new(); let mut tmp_cnt = 0; for i in arr { list_nodes.insert(tmp_cnt, deps.add_node(i).index().try_into().unwrap()); tmp_cnt += 1; } let pg = list_nodes[&0]; let fb = list_nodes[&1]; let qc = list_nodes[&2]; let rand = list_nodes[&3]; let libc = list_nodes[&4]; deps.extend_with_edges(&[ (pg, fb), (pg, qc), (qc, rand), (rand, libc), (qc, libc), ]); list_nodes } fn do_smth_2(tmp: &HashMap<&str, usize>) -> Xz { println!("{}", tmp["name"]); sleep_ms(400); Xz::new(5) } fn timesort(deps: &Graph::<InfoNode<usize, Xz>, &str>, ind: NodeIndex) -> Vec<isize> { let mut hash_nodes = HashSet::<usize>::new(); let mut queue_nodes = VecDeque::<usize>::new(); let mut ans = Vec::<isize>::new(); for _ in 0..deps.node_count() { ans.push(-1); } let mut cnt = 0; queue_nodes.push_back(ind.index().try_into().unwrap()); while queue_nodes.len() != 0 { let node = queue_nodes.pop_front().unwrap(); let tmp_node_index = NodeIndex::new(node); hash_nodes.insert(node); for i in deps.neighbors_directed(tmp_node_index, petgraph::EdgeDirection::Outgoing){ if ans[i.index()] == -1 { queue_nodes.push_back(i.index()); hash_nodes.insert(i.index()); ans[i.index()] = -2; } else { hash_nodes.remove(&i.index()); } } let mut f = 0; for i in deps.neighbors_directed(tmp_node_index, petgraph::EdgeDirection::Incoming) { if hash_nodes.contains(&i.index()) { f = 1; break; } } if f == 1{ queue_nodes.push_back(node); } else { ans[node] = cnt; cnt += 1; hash_nodes.remove(&node); } } ans } fn main() { let mut deps = Graph::<InfoNode<usize, Xz>, &str>::new(); let pool = ThreadPool::new(4); let list_nodes; { list_nodes = test_build_graph(&mut deps); } let sorted_nodes = timesort(&deps, NodeIndex::new(0)); println!("{:?}", sorted_nodes); for i in sorted_nodes { let id = list_nodes[&i.try_into().unwrap()]; let node_id = NodeIndex::new(id.try_into().unwrap()); { let info_node = deps.node_weight(node_id).unwrap().to_owned(); pool.execute(move || { info_node.execute_self(); }); } } }
use std::collections::HashSet; use std::thread::sleep_ms; use std::collections::HashMap; use rs_graph_system::ThreadPool; use petgraph::Graph; use petgraph::adj::NodeIndex; use petgraph::adj::IndexType; use std::collections::VecDeque; #[derive(Copy, Clone)] pub struct Xz { Xz: usize } impl Xz { fn new(num: usize) -> Xz { Xz {Xz: num} } } #[derive(Clone)] pub struct InfoNode<'a, T, V> { func: fn(&HashMap<&'a str, T>) -> V, args: HashMap<&'a str, T>, res: Box<V> } impl<'a> InfoNode<'a, usize, Xz> { fn new(name: fn(&HashMap<&'a str, usize>) -> Xz, Args: HashMap<&'a str, usize>) -> InfoNode<'a, usize, Xz> { InfoNode {func: name, args: Args, res: Box::new(Xz::new(0))} } fn execute(helper: InfoNode<'a, usize, Xz>) -> Box<Xz> { Box::new((helper.func)(&helper.args)) } fn execute_self(mut self) { self.res = Box::new((self.func)(&self.args)); } } impl<'a> Default for InfoNode<'a, usize, Xz> { fn default() -> Self { InfoNode { func: do_smth_2, args: HashMap::<&'a str, usize>::new(), res: Box::<Xz>::new(Xz::new(0)) } } } fn test_build_graph(deps: &mut Graph<InfoNode<usize, Xz>, &str>) -> HashMap<usize, NodeIndex>
); let third = InfoNode::new(do_smth_2, third_h); let fourth = InfoNode::new(do_smth_2, fourth_h); let thith = InfoNode::new(do_smth_2, thith_h); let arr = vec![first, second, third, fourth, thith]; let mut list_nodes = HashMap::<usize, NodeIndex>::new(); let mut tmp_cnt = 0; for i in arr { list_nodes.insert(tmp_cnt, deps.add_node(i).index().try_into().unwrap()); tmp_cnt += 1; } let pg = list_nodes[&0]; let fb = list_nodes[&1]; let qc = list_nodes[&2]; let rand = list_nodes[&3]; let libc = list_nodes[&4]; deps.extend_with_edges(&[ (pg, fb), (pg, qc), (qc, rand), (rand, libc), (qc, libc), ]); list_nodes } fn do_smth_2(tmp: &HashMap<&str, usize>) -> Xz { println!("{}", tmp["name"]); sleep_ms(400); Xz::new(5) } fn timesort(deps: &Graph::<InfoNode<usize, Xz>, &str>, ind: NodeIndex) -> Vec<isize> { let mut hash_nodes = HashSet::<usize>::new(); let mut queue_nodes = VecDeque::<usize>::new(); let mut ans = Vec::<isize>::new(); for _ in 0..deps.node_count() { ans.push(-1); } let mut cnt = 0; queue_nodes.push_back(ind.index().try_into().unwrap()); while queue_nodes.len() != 0 { let node = queue_nodes.pop_front().unwrap(); let tmp_node_index = NodeIndex::new(node); hash_nodes.insert(node); for i in deps.neighbors_directed(tmp_node_index, petgraph::EdgeDirection::Outgoing){ if ans[i.index()] == -1 { queue_nodes.push_back(i.index()); hash_nodes.insert(i.index()); ans[i.index()] = -2; } else { hash_nodes.remove(&i.index()); } } let mut f = 0; for i in deps.neighbors_directed(tmp_node_index, petgraph::EdgeDirection::Incoming) { if hash_nodes.contains(&i.index()) { f = 1; break; } } if f == 1{ queue_nodes.push_back(node); } else { ans[node] = cnt; cnt += 1; hash_nodes.remove(&node); } } ans } fn main() { let mut deps = Graph::<InfoNode<usize, Xz>, &str>::new(); let pool = ThreadPool::new(4); let list_nodes; { list_nodes = test_build_graph(&mut deps); } let sorted_nodes = timesort(&deps, NodeIndex::new(0)); println!("{:?}", sorted_nodes); for i in sorted_nodes { let id = list_nodes[&i.try_into().unwrap()]; let node_id = NodeIndex::new(id.try_into().unwrap()); { let info_node = deps.node_weight(node_id).unwrap().to_owned(); pool.execute(move || { info_node.execute_self(); }); } } }
{ let mut first_h = HashMap::new(); first_h.insert("name", 1); let mut second_h = HashMap::new(); second_h.insert("name", 2); let mut third_h = HashMap::new(); third_h.insert("name", 3); let mut fourth_h = HashMap::new(); fourth_h.insert("name", 4); let mut thith_h = HashMap::new(); thith_h.insert("name", 5); let first = InfoNode::new(do_smth_2, first_h); let second = InfoNode::new(do_smth_2, second_h
random
[ { "content": "struct Worker {\n\n id: usize,\n\n thread: Option<thread::JoinHandle<()>>,\n\n}\n\n\n\nimpl Worker {\n\n fn new(id: usize, receiver: Arc<Mutex<mpsc::Receiver<Message>>>) -> Worker {\n\n let thread = thread::spawn(move || loop {\n\n let message = receiver.lock().unwrap()....
Rust
src/colln_paint/mod.rs
pwil3058/rs_epaint
abde42728f65fc166df6416df8b3c3067faf51ed
use std::cell::RefCell; use std::cmp::Ordering; use std::fmt; use std::fmt::Debug; use std::fs::File; use std::hash::*; use std::io::Read; use std::marker::PhantomData; use std::path::Path; use std::rc::Rc; use std::str::FromStr; use pw_gix::{ gtk::{self, prelude::*}, wrapper::*, }; pub mod binder; pub mod collection; pub mod display; pub mod editor; use crate::basic_paint::*; use crate::colour::*; use crate::error::*; pub use crate::struct_traits::SimpleCreation; pub trait CollnIdInterface: Debug + PartialEq + PartialOrd + Eq + Ord + Clone + Default + Hash { fn new(colln_name: &str, colln_owner: &str) -> Self; fn colln_name_label() -> String; fn colln_owner_label() -> String; fn paint_select_label() -> String; fn paint_select_tooltip_text() -> String; fn recollection_name_for(item_name: &str) -> String; fn colln_load_image(size: i32) -> gtk::Image; fn display_current_target() -> bool { true } fn colln_name(&self) -> String; fn colln_owner(&self) -> String; fn tooltip_text(&self) -> String { format!("{}\n({})", self.colln_name(), self.colln_owner()) } fn rc_new(colln_name: &str, colln_owner: &str) -> Rc<Self> { Rc::new(Self::new(colln_name, colln_owner)) } } #[derive(PWO, Wrapper)] pub struct CollnIdEntryData<CID> where CID: CollnIdInterface, { grid: gtk::Grid, colln_name_entry: gtk::Entry, colln_owner_entry: gtk::Entry, changed_callbacks: RefCell<Vec<Box<dyn Fn()>>>, phantom_data: PhantomData<CID>, } pub type CollnIdEntry<CID> = Rc<CollnIdEntryData<CID>>; impl<CID> SimpleCreation for CollnIdEntry<CID> where CID: CollnIdInterface + 'static, { fn create() -> CollnIdEntry<CID> { let psie = Rc::new(CollnIdEntryData { grid: gtk::Grid::new(), colln_owner_entry: gtk::Entry::new(), colln_name_entry: gtk::Entry::new(), changed_callbacks: RefCell::new(Vec::new()), phantom_data: PhantomData, }); let label = gtk::Label::new(Some(CID::colln_name_label().as_str())); label.set_halign(gtk::Align::End); psie.grid.attach(&label, 0, 0, 1, 1); psie.colln_name_entry.set_hexpand(true); psie.grid.attach_next_to( &psie.colln_name_entry.clone(), Some(&label), gtk::PositionType::Right, 1, 1, ); let label = gtk::Label::new(Some(CID::colln_owner_label().as_str())); label.set_halign(gtk::Align::End); psie.grid.attach(&label, 0, 1, 1, 1); psie.colln_owner_entry.set_hexpand(true); psie.grid.attach_next_to( &psie.colln_owner_entry.clone(), Some(&label), gtk::PositionType::Right, 1, 1, ); let psie_c = psie.clone(); psie.colln_name_entry .connect_changed(move |_| psie_c.inform_changed()); let psie_c = psie.clone(); psie.colln_owner_entry .connect_changed(move |_| psie_c.inform_changed()); psie } } impl<CID> CollnIdEntryData<CID> where CID: CollnIdInterface, { pub fn get_colln_id(&self) -> Option<Rc<CID>> { let colln_name = self.colln_name_entry.get_text(); if colln_name.len() > 0 { let colln_owner = self.colln_owner_entry.get_text(); if colln_owner.len() > 0 { return Some(CID::rc_new(&colln_name, &colln_owner)); } }; None } pub fn set_colln_id(&self, o_cid: Option<&Rc<CID>>) { if let Some(cid) = o_cid { self.colln_name_entry.set_text(&cid.colln_name()); self.colln_owner_entry.set_text(&cid.colln_owner()); } else { self.colln_name_entry.set_text(""); self.colln_owner_entry.set_text(""); } } pub fn connect_changed<F: 'static + Fn()>(&self, callback: F) { self.changed_callbacks.borrow_mut().push(Box::new(callback)); } fn inform_changed(&self) { for callback in self.changed_callbacks.borrow().iter() { callback() } } } #[derive(Debug, Clone)] pub struct CollnPaintCore<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { colln_id: Rc<CID>, paint: BasicPaint<C>, } impl<C, CID> PartialEq for CollnPaintCore<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn eq(&self, other: &CollnPaintCore<C, CID>) -> bool { if self.colln_id != other.colln_id { false } else { self.paint == other.paint } } } impl<C, CID> Eq for CollnPaintCore<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { } impl<C, CID> PartialOrd for CollnPaintCore<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn partial_cmp(&self, other: &CollnPaintCore<C, CID>) -> Option<Ordering> { if let Some(ordering) = self.colln_id.partial_cmp(&other.colln_id) { if ordering == Ordering::Equal { self.paint.partial_cmp(&other.paint) } else { Some(ordering) } } else { None } } } impl<C, CID> Ord for CollnPaintCore<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn cmp(&self, other: &CollnPaintCore<C, CID>) -> Ordering { let ordering = self.colln_id.cmp(&other.colln_id); if ordering == Ordering::Equal { self.paint.cmp(&other.paint) } else { ordering } } } pub type CollnPaint<C, CID> = Rc<CollnPaintCore<C, CID>>; impl<C, CID> ColouredItemInterface for CollnPaint<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn colour(&self) -> Colour { self.paint.colour() } } impl<C, CID> BasicPaintInterface<C> for CollnPaint<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn name(&self) -> String { self.paint.name() } fn notes(&self) -> String { self.paint.notes() } fn tooltip_text(&self) -> String { format!( "{}\n{}", self.paint.tooltip_text(), self.colln_id.tooltip_text() ) } fn characteristics(&self) -> C { self.paint.characteristics() } } pub trait CollnPaintInterface<C, CID>: BasicPaintInterface<C> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn create(paint: &BasicPaint<C>, cid: &Rc<CID>) -> Self; fn colln_id(&self) -> Rc<CID>; } impl<C, CID> CollnPaintInterface<C, CID> for CollnPaint<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn create(paint: &BasicPaint<C>, cid: &Rc<CID>) -> CollnPaint<C, CID> { Rc::new(CollnPaintCore::<C, CID> { colln_id: cid.clone(), paint: paint.clone(), }) } fn colln_id(&self) -> Rc<CID> { self.colln_id.clone() } } #[derive(Debug)] pub struct PaintCollnSpec<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { pub colln_id: Rc<CID>, pub paint_specs: Vec<BasicPaintSpec<C>>, } impl<C, CID> PaintCollnSpec<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { pub fn from_file(path: &Path) -> Result<PaintCollnSpec<C, CID>, PaintError<C>> { let mut file = File::open(path)?; let mut string = String::new(); file.read_to_string(&mut string)?; PaintCollnSpec::<C, CID>::from_str(string.as_str()) } pub fn get_index_for_name(&self, name: &str) -> Option<usize> { match self .paint_specs .binary_search_by_key(&name.to_string(), |spec| spec.name.clone()) { Ok(index) => Some(index), Err(_) => None, } } } impl<C, CID> FromStr for PaintCollnSpec<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { type Err = PaintError<C>; fn from_str(string: &str) -> Result<PaintCollnSpec<C, CID>, PaintError<C>> { let mut lines = string.lines(); let mut colln_name = ""; let mut colln_owner = ""; for _ in 0..2 { if let Some(line) = lines.next() { if line.starts_with(&CID::colln_name_label()) { if let Some(tail) = line.get(CID::colln_name_label().len()..) { colln_name = tail.trim(); } } else if line.starts_with(&CID::colln_owner_label()) { if let Some(tail) = line.get(CID::colln_owner_label().len()..) { colln_owner = tail.trim(); } } else { return Err(PaintErrorType::MalformedText(line.to_string()).into()); } } else { return Err(PaintErrorType::MalformedText(string.to_string()).into()); } } if colln_name.len() == 0 || colln_owner.len() == 0 { return Err(PaintErrorType::MalformedText(string.to_string()).into()); }; let colln_id = Rc::new(CID::new(colln_name, colln_owner)); let mut paint_specs: Vec<BasicPaintSpec<C>> = Vec::new(); for line in lines { let spec = BasicPaintSpec::<C>::from_str(line)?; match paint_specs.binary_search_by_key(&spec.name, |bps| bps.name.clone()) { Ok(_) => return Err(PaintErrorType::AlreadyExists(spec.name).into()), Err(index) => paint_specs.insert(index, spec), } } let psc = PaintCollnSpec::<C, CID> { colln_id, paint_specs, }; Ok(psc) } } impl<C, CID> fmt::Display for PaintCollnSpec<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "{} {}\n", CID::colln_name_label(), self.colln_id.colln_name() )?; write!( f, "{} {}\n", CID::colln_owner_label(), self.colln_id.colln_owner() )?; for paint_spec in self.paint_specs.iter() { write!(f, "{}\n", paint_spec)?; } Ok(()) } } #[cfg(test)] mod tests { }
use std::cell::RefCell; use std::cmp::Ordering; use std::fmt; use std::fmt::Debug; use std::fs::File; use std::hash::*; use std::io::Read; use std::marker::PhantomData; use std::path::Path; use std::rc::Rc; use std::str::FromStr; use pw_gix::{ gtk::{self, prelude::*}, wrapper::*, }; pub mod binder; pub mod collection; pub mod display; pub mod editor; use crate::basic_paint::*; use crate::colour::*; use crate::error::*; pub use crate::struct_traits::SimpleCreation; pub trait CollnIdInterface: Debug + PartialEq + PartialOrd + Eq + Ord + Clone + Default + Hash { fn new(colln_name: &str, colln_owner: &str) -> Self; fn colln_name_label() -> String; fn colln_owner_label() -> String; fn paint_select_label() -> String; fn paint_select_tooltip_text() -> String; fn recollection_name_for(item_name: &str) -> String; fn colln_load_image(size: i32) -> gtk::Image; fn display_current_target() -> bool { true } fn colln_name(&self) -> String; fn colln_owner(&self) -> String; fn tooltip_text(&self) -> String { format!("{}\n({})", self.colln_name(), self.colln_owner()) } fn rc_new(colln_name: &str, colln_owner: &str) -> Rc<Self> { Rc::new(Self::new(colln_name, colln_owner)) } } #[derive(PWO, Wrapper)] pub struct CollnIdEntryData<CID> where CID: CollnIdInterface, { grid: gtk::Grid, colln_name_entry: gtk::Entry, colln_owner_entry: gtk::Entry, changed_callbacks: RefCell<Vec<Box<dyn Fn()>>>, phantom_data: PhantomData<CID>, } pub type CollnIdEntry<CID> = Rc<CollnIdEntryData<CID>>; impl<CID> SimpleCreation for CollnIdEntry<CID> where CID: CollnIdInterface + 'static, { fn create() -> CollnIdEntry<CID> { let psie = Rc::new(CollnIdEntryData { grid: gtk::Grid::new(), colln_owner_entry: gtk::Entry::new(), colln_name_entry: gtk::Entry::new(), changed_callbacks: RefCell::new(Vec::new()), phantom_data: PhantomData, }); let label = gtk::Label::new(Some(CID::colln_name_label().as_str())); label.set_halign(gtk::Align::End); psie.grid.attach(&label, 0, 0, 1, 1); psie.colln_name_entry.set_hexpand(true); psie.grid.attach_next_to( &psie.colln_name_entry.clone(), Some(&label), gtk::PositionType::Right, 1, 1, ); let label = gtk::Label::new(Some(CID::colln_owner_label().as_str())); label.set_halign(gtk::Align::End); psie.grid.attach(&label, 0, 1, 1, 1); psie.colln_owner_entry.set_hexpand(true); psie.grid.attach_next_to( &psie.colln_owner_entry.clone(), Some(&label), gtk::PositionType::Right, 1, 1, ); let psie_c = psie.clone(); psie.colln_name_entry .connect_changed(move |_| psie_c.inform_changed()); let psie_c = psie.clone(); psie.colln_owner_entry .connect_changed(move |_| psie_c.inform_changed()); psie } } impl<CID> CollnIdEntryData<CID> where CID: CollnIdInterface, { pub fn get_colln_id(&self) -> Option<Rc<CID>> { let colln_name = self.colln_name_entry.get_text(); if colln_name.len() > 0 { let colln_owner = self.colln_owner_entry.get_text(); if colln_owner.len() > 0 { return Some(CID::rc_new(&colln_name, &colln_owner)); } }; None } pub fn set_colln_id(&self, o_cid: Option<&Rc<CID>>) { if let Some(cid) = o_cid { self.colln_name_entry.set_text(&cid.colln_name()); self.colln_owner_entry.set_text(&cid.colln_owner()); } else { self.colln_name_entry.set_text(""); self.colln_owner_entry.set_text(""); } } pub fn connect_changed<F: 'static + Fn()>(&self, callback: F) { self.changed_callbacks.borrow_mut().push(Box::new(callback)); } fn inform_changed(&self) { for callback in self.changed_callbacks.borrow().iter() { callback() } } } #[derive(Debug, Clone)] pub struct CollnPaintCore<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { colln_id: Rc<CID>, paint: BasicPaint<C>, } impl<C, CID> PartialEq for CollnPaintCore<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn eq(&self, other: &CollnPaintCore<C, CID>) -> bool { if self.colln_id != other.colln_id { false } else { self.paint == other.paint } } } impl<C, CID> Eq for CollnPaintCore<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { } impl<C, CID> PartialOrd for CollnPaintCore<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn partial_cmp(&self, other: &CollnPaintCore<C, CID>) -> Option<Ordering> { if let Some(ordering) = self.colln_id.partial_cmp(&other.colln_id) { if ordering == Ordering::Equal { self.paint.partial_cmp(&other.paint) } else { Some(ordering) } } else { None } } } impl<C, CID> Ord for CollnPaintCore<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn cmp(&self, other: &CollnPaintCore<C, CID>) -> Ordering { let ordering = self.colln_id.cmp(&other.colln_id); if ordering == Ordering::Equal { self.paint.cmp(&other.paint) } else { ordering } } } pub type CollnPaint<C, CID> = Rc<CollnPaintCore<C, CID>>; impl<C, CID> ColouredItemInterface for CollnPaint<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn colour(&self) -> Colour { self.paint.colour() } } impl<C, CID> BasicPaintInterface<C> for CollnPaint<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn name(&self) -> String { self.paint.name() } fn notes(&self) -> String { self.paint.notes() } fn tooltip_text(&self) -> String { format!( "{}\n{}", self.paint.tooltip_text(), self.colln_id.tooltip_text() ) } fn characteristics(&self) -> C { self.paint.characteristics() } } pub trait CollnPaintInterface<C, CID>: BasicPaintInterface<C> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn create(paint: &BasicPaint<C>, cid: &Rc<CID>) -> Self; fn colln_id(&self) -> Rc<CID>; } impl<C, CID> CollnPaintInterface<C, CID> for CollnPaint<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn create(paint: &BasicPaint<C>, cid: &Rc<CID>) -> CollnPaint<C, CID> { Rc::new(CollnPaintCore::<C, CID> { colln_id: cid.clone(), paint: paint.clone(), }) } fn colln_id(&self) -> Rc<CID> { self.colln_id.clone() } } #[derive(Debug)] pub struct PaintCollnSpec<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { pub colln_id: Rc<CID>, pub paint_specs: Vec<BasicPaintSpec<C>>, } impl<C, CID> PaintCollnSpec<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { pub fn from_file(path: &Path) -> Result<PaintCollnSpec<C, CID>, PaintError<C>> { let mut file = File::open(path)?; let mut string = String::new(); file.read_to_string(&mut string)?; PaintCollnSpec::<C, CID>::from_str(string.as_str()) } pub fn get_index_for_name(&self, name: &str) -> Option<usize> { match self .paint_specs .binary_search_by_key(&name.to_string(), |spec| spec.name.clone()) { Ok(index) => Some(index), Err(_) => None, } } } impl<C, CID> FromStr for PaintCollnSpec<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { type Err = PaintError<C>; fn from_str(string: &str) -> Result<PaintCollnSpec<C, CID>, PaintError<C>> { let mut lines = string.lines(); let mut colln_name = ""; let mut colln_owner = ""; for _ in 0..2 { if let Some(line) = lines.next() { if line.starts_with(&CID::colln_name_label()) { if let Some(tail) = line.get(CID::colln_name_label().len()..) { colln_name = tail.trim(); } } else if line.starts_with(&CID::colln_owner_label()) { if let Some(tail) = line.get(CID::colln_owner_label().len()..) { colln_owner = tail.trim(); } } else { return Err(PaintErrorType::MalformedText(line.to_string()).into()); } } else { return Err(PaintErrorType::MalformedText(string.to_string()).into()); } }
} impl<C, CID> fmt::Display for PaintCollnSpec<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "{} {}\n", CID::colln_name_label(), self.colln_id.colln_name() )?; write!( f, "{} {}\n", CID::colln_owner_label(), self.colln_id.colln_owner() )?; for paint_spec in self.paint_specs.iter() { write!(f, "{}\n", paint_spec)?; } Ok(()) } } #[cfg(test)] mod tests { }
if colln_name.len() == 0 || colln_owner.len() == 0 { return Err(PaintErrorType::MalformedText(string.to_string()).into()); }; let colln_id = Rc::new(CID::new(colln_name, colln_owner)); let mut paint_specs: Vec<BasicPaintSpec<C>> = Vec::new(); for line in lines { let spec = BasicPaintSpec::<C>::from_str(line)?; match paint_specs.binary_search_by_key(&spec.name, |bps| bps.name.clone()) { Ok(_) => return Err(PaintErrorType::AlreadyExists(spec.name).into()), Err(index) => paint_specs.insert(index, spec), } } let psc = PaintCollnSpec::<C, CID> { colln_id, paint_specs, }; Ok(psc) }
function_block-function_prefix_line
[ { "content": "pub trait BasicPaintInterface<C>: Clone + PartialEq + Ord + Debug + ColouredItemInterface\n\nwhere\n\n C: CharacteristicsInterface,\n\n{\n\n fn name(&self) -> String;\n\n fn notes(&self) -> String;\n\n fn tooltip_text(&self) -> String;\n\n fn characteristics(&self) -> C;\n\n\n\n ...
Rust
rounded-svg/src/main.rs
Ar37-rs/demos
b77283496f4076863cc16c059f1e1721932d3ea1
use fltk::{enums::*, prelude::*, *}; use std::cell::RefCell; use std::ops::{Deref, DerefMut}; use std::rc::Rc; use svg::node::element::Rectangle; use svg::Document; struct RoundedImageDisplay { frame_: frame::Frame, bordercolor_: Rc<RefCell<[u8; 3]>>, radius_: Rc<RefCell<i32>>, } impl RoundedImageDisplay { pub fn new(x: i32, y: i32, w: i32, h: i32, title: Option<&'static str>) -> Self { let mut frame_ = frame::Frame::new(x, y, w, h, title); let radius_ = 20; let bordercolor_ = [0x80, 0x80, 0x80]; frame_.set_frame(FrameType::BorderBox); let radius_ = Rc::from(RefCell::from(radius_)); let bordercolor_ = Rc::from(RefCell::from(bordercolor_)); frame_.draw({ let radius_ = radius_.clone(); let bordercolor_ = bordercolor_.clone(); move |f| { let radius_ = radius_.borrow(); let bordercolor_ = bordercolor_.borrow(); let rect = Rectangle::new() .set("x", 0 - *radius_ / 2) .set("y", 0 - *radius_ / 2) .set("rx", *radius_) .set("ry", *radius_) .set("width", f.w() + *radius_) .set("height", f.h() + *radius_) .set("fill", "none") .set( "stroke", format!( "rgb({},{},{})", bordercolor_[0], bordercolor_[1], bordercolor_[2], ), ) .set("stroke-width", *radius_); let document = Document::new() .set("viewBox", (0, 0, f.w(), f.h())) .add(rect); let mut svg = image::SvgImage::from_data(&document.to_string()).unwrap(); svg.draw(f.x(), f.y(), f.w(), f.h()) } }); Self { frame_, radius_, bordercolor_, } } pub fn bordercolor(&mut self, r: u8, g: u8, b: u8) { let mut bordercolor = self.bordercolor_.borrow_mut(); bordercolor[0] = r; bordercolor[1] = g; bordercolor[2] = b; self.frame_.parent().unwrap().redraw(); } pub fn radius(&mut self, val: i32) { *self.radius_.borrow_mut() = val; self.frame_.parent().unwrap().redraw(); } } impl Deref for RoundedImageDisplay { type Target = frame::Frame; fn deref(&self) -> &Self::Target { &self.frame_ } } impl DerefMut for RoundedImageDisplay { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.frame_ } } fn main() { let a = app::App::default().with_scheme(app::Scheme::Gtk); let border = [0x80, 0xa0, 0x80]; let mut win = window::Window::default() .with_size(1000, 800) .with_label("Rounded Corners"); win.set_color(Color::from_rgb(border[0], border[1], border[2])); let jpg = image::JpegImage::load("../opengl/ex.jpg").expect("Failed to open jpg file"); let mut rimage = RoundedImageDisplay::new(10, 10, jpg.w(), jpg.h(), None); rimage.bordercolor(border[0], border[1], border[2]); rimage.radius(50); rimage.set_image(Some(jpg)); let mut slider = valuator::Slider::new(1000 - 50, 10, 20, 200, "border\nradius"); slider.set_align(Align::Bottom); slider.set_bounds(0., 200.); slider.set_value(20.); slider.do_callback(); slider.set_color(Color::from_rgb( (border[0] as f64 / 1.5) as u8, (border[1] as f64 / 1.5) as u8, (border[2] as f64 / 1.5) as u8, )); slider.set_callback(move |s| { rimage.radius(s.value() as i32); }); win.end(); win.show(); a.run().unwrap(); }
use fltk::{enums::*, prelude::*, *}; use std::cell::RefCell; use std::ops::{Deref, DerefMut}; use std::rc::Rc; use svg::node::element::Rectangle; use svg::Document; struct RoundedImageDisplay { frame_: frame::Frame, bordercolor_: Rc<RefCell<[u8; 3]>>, radius_: Rc<RefCell<i32>>, } impl RoundedImageDisplay { pub fn new(x: i32, y: i32, w: i32, h: i32, title: Option<&'static str>) -> Self { let mut frame_ = frame::Frame::new(x, y, w, h, title); let radius_ = 20; let bordercolor_ = [0x80, 0x80, 0x80]; frame_.set_frame(FrameType::BorderBox); let radius_ = Rc::from(RefCell::from(radius_)); let bordercolor_ = Rc::from(RefCell::from(bordercolor_)); frame_.draw({ let radius_ = radius_.clone(); let bordercolor_ = bordercolor_.clone(); move |f| { let radius_ = radius_.borrow(); let bordercolor_ = bordercolor_.borrow(); let rect = Rectangle::new() .set("x", 0 - *radius_ / 2) .set("y", 0 - *radius_ / 2) .set("rx", *radius_) .set("ry", *radius_) .set("width", f.w() + *radius_) .set("height", f.h() + *radius_) .set("fill", "none") .set( "stroke", format!( "rgb({},{},{})", bordercolor_[0], bordercolor_[1], bordercolor_[2], ), ) .set("stroke-width", *radius_); let document = Document::new() .set("viewBox", (0, 0, f.w(), f.h())) .add(rect); let mut svg = image::SvgImage::from_data(&document.to_string()).unwrap(); svg.draw(f.x(), f.y(), f.w(), f.h()) } }); Self { frame_, radius_, bordercolor_, } } pub fn bordercolor(&mut self, r: u8, g: u8, b: u8) { let mut bordercolor = self.bordercolor_.borrow_mut(); bordercolor[0] = r; bordercolor[1] = g; bordercolor[2] = b; self.frame_.parent().unwrap().redraw(); } pub fn radius(&mut self, val: i32) { *self.radius_.borrow_mut() = val; self.frame_.parent().unwrap().redraw(); } } impl Deref for RoundedImageDisplay { type Target = frame::Frame; fn deref(&self) -> &Self::Target { &self.frame_ } } impl DerefMut for RoundedImageDisplay { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.frame_ } } fn main() { let a = app::App::default().with_scheme(app::Scheme::Gtk); let border = [0x80, 0xa0, 0x80]; let mut win = window::Window::default() .with_size(1000, 800) .with_label("Rounded Corners"); win.set_color(Color::from_rgb(border[0], border[1], border[2])); let jpg = image::JpegImage::load("../opengl/ex.jpg").expect("Failed to open jpg file"); let mut rimage = RoundedImageDisplay::new(10, 10, jpg.w(), jpg.h(), None); rimage.bordercolor(border[0], border[1], border[2]); rimage.radius(50); rimage.set_image(Some(jpg)); let mut slider = valuator::Slider::new(1000 - 50, 10, 20, 200, "border\nradius"); slider.set_align(Align::Bottom); slider.set_bounds(0., 200.); slider.set_value(20.); slider.do_callback(); slider.set_color(Color::from_rgb( (border[0] as f64 / 1.5) as u8, (border[1] as f64 /
1.5) as u8, (border[2] as f64 / 1.5) as u8, )); slider.set_callback(move |s| { rimage.radius(s.value() as i32); }); win.end(); win.show(); a.run().unwrap(); }
function_block-function_prefixed
[ { "content": "pub fn get_proc_address(win: &window::GlWindow, name: &str) -> *mut c_void {\n\n win.get_proc_address(name) as _\n\n}\n\n\n", "file_path": "libmpv/src/main.rs", "rank": 0, "score": 225561.0756709798 }, { "content": "// draw header with day names\n\nfn draw_header(txt: &str, ...
Rust
build/sdk/meta/src/product_bundle_container.rs
allansrc/fuchsia
a2c235b33fc4305044d496354a08775f30cdcf37
use { crate::{ common::{ElementType, Envelope}, json::{schema, JsonObject}, metadata::Metadata, ProductBundleV1, }, serde::{Deserialize, Serialize}, }; #[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] #[serde(deny_unknown_fields)] pub struct WorkaroundProductBundleWrapper { pub data: ProductBundleV1, pub schema_id: String, } #[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] #[serde(deny_unknown_fields)] pub struct ProductBundleContainerV1 { pub name: String, #[serde(skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[serde(rename = "type")] pub kind: ElementType, pub bundles: Vec<WorkaroundProductBundleWrapper>, } impl JsonObject for Envelope<ProductBundleContainerV1> { fn get_schema() -> &'static str { include_str!("../product_bundle_container-76a5c104.json") } fn get_referenced_schemata() -> &'static [&'static str] { &[ schema::COMMON, schema::HARDWARE_V1, schema::EMU_MANIFEST, schema::FLASH_MANIFEST_V1, schema::PRODUCT_BUNDLE_V1, ] } } #[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] #[serde(deny_unknown_fields)] pub struct ProductBundleContainerV2 { pub name: String, #[serde(skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[serde(rename = "type")] pub kind: ElementType, pub fms_entries: Vec<Metadata>, } impl JsonObject for Envelope<ProductBundleContainerV2> { fn get_schema() -> &'static str { include_str!("../product_bundle_container-32z5e391.json") } fn get_referenced_schemata() -> &'static [&'static str] { &[ schema::COMMON, schema::HARDWARE_V1, schema::EMU_MANIFEST, schema::FLASH_MANIFEST_V1, schema::PRODUCT_BUNDLE_V1, schema::PHYSICAL_DEVICE_V1, schema::VIRTUAL_DEVICE_V1, ] } } #[cfg(test)] mod tests { use super::*; test_validation! { name = test_validation_v1, kind = Envelope::<ProductBundleContainerV1>, data = r#" { "schema_id": "http://fuchsia.com/schemas/sdk/product_bundle_container-76a5c104.json", "data": { "name": "Fuchsia F1", "type": "product_bundle_container", "bundles": [ { "data": { "name": "generic-x64", "type": "product_bundle", "device_refs": ["generic-x64"], "images": [{ "base_uri": "gs://fuchsia/development/0.20201216.2.1/images/generic-x64.tgz", "format": "tgz" }], "packages": [{ "format": "tgz", "repo_uri": "gs://fuchsia/development/0.20201216.2.1/packages/generic-x64.tar.gz" }] }, "schema_id": "product_bundle-6320eef1.json#/definitions/product_bundle" } ] } } "#, valid = true, } test_validation! { name = test_validation_v1_invalid, kind = Envelope::<ProductBundleContainerV1>, data = r#" { "schema_id": "http://fuchsia.com/schemas/sdk/product_bundle_container-76a5c104.json", "data": { "name": "Fuchsia F1", "type": "cc_prebuilt_library", "bundles": [] } } "#, valid = false, } test_validation! { name = test_validation_v2_pbm, kind = Envelope::<ProductBundleContainerV2>, data = r#" { "schema_id": "http://fuchsia.com/schemas/sdk/product_bundle_container-32z5e391.json", "data": { "name": "PBM container", "type": "product_bundle_container", "fms_entries": [ { "name": "generic-x64", "type": "product_bundle", "device_refs": ["generic-x64"], "images": [{ "base_uri": "gs://fuchsia/development/0.20201216.2.1/images/generic-x64.tgz", "format": "tgz" }], "packages": [{ "format": "tgz", "repo_uri": "gs://fuchsia/development/0.20201216.2.1/packages/generic-x64.tar.gz" }] } ] } } "#, valid = true, } test_validation! { name = test_validation_v2_virt_device, kind = Envelope::<ProductBundleContainerV2>, data = r#" { "schema_id": "http://fuchsia.com/schemas/sdk/product_bundle_container-32z5e391.json", "data": { "name": "Virtual device container", "type": "product_bundle_container", "fms_entries": [ { "name": "generic-x64", "type": "virtual_device", "hardware": { "audio": { "model": "hda" }, "cpu": { "arch": "x64" }, "inputs": { "pointing_device": "touch" }, "window_size": { "width": 640, "height": 480, "units": "pixels" }, "memory": { "quantity": 1, "units": "gigabytes" }, "storage": { "quantity": 1, "units": "gigabytes" } } } ] } } "#, valid = true, } test_validation! { name = test_validation_v2_phys_device, kind = Envelope::<ProductBundleContainerV2>, data = r#" { "schema_id": "http://fuchsia.com/schemas/sdk/product_bundle_container-32z5e391.json", "data": { "name": "Virtual device container", "type": "product_bundle_container", "fms_entries": [ { "name": "generic-x64", "type": "physical_device", "hardware": { "cpu": { "arch": "x64" } } } ] } } "#, valid = true, } }
use { crate::{ common::{ElementType, Envelope}, json::{schema, JsonObject}, metadata::Metadata, ProductBundleV1, }, serde::{Deserialize, Serialize}, }; #[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] #[serde(deny_unknown_fields)] pub struct WorkaroundProductBundleWrapper { pub data: ProductBundleV1, pub schema_id: String, } #[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] #[serde(deny_unknown_fields)] pub struct ProductBundleContainerV1 { pub name: String, #[serde(skip_serializing_if = "Option::is_none")] pub description: Option<String>,
opment/0.20201216.2.1/packages/generic-x64.tar.gz" }] }, "schema_id": "product_bundle-6320eef1.json#/definitions/product_bundle" } ] } } "#, valid = true, } test_validation! { name = test_validation_v1_invalid, kind = Envelope::<ProductBundleContainerV1>, data = r#" { "schema_id": "http://fuchsia.com/schemas/sdk/product_bundle_container-76a5c104.json", "data": { "name": "Fuchsia F1", "type": "cc_prebuilt_library", "bundles": [] } } "#, valid = false, } test_validation! { name = test_validation_v2_pbm, kind = Envelope::<ProductBundleContainerV2>, data = r#" { "schema_id": "http://fuchsia.com/schemas/sdk/product_bundle_container-32z5e391.json", "data": { "name": "PBM container", "type": "product_bundle_container", "fms_entries": [ { "name": "generic-x64", "type": "product_bundle", "device_refs": ["generic-x64"], "images": [{ "base_uri": "gs://fuchsia/development/0.20201216.2.1/images/generic-x64.tgz", "format": "tgz" }], "packages": [{ "format": "tgz", "repo_uri": "gs://fuchsia/development/0.20201216.2.1/packages/generic-x64.tar.gz" }] } ] } } "#, valid = true, } test_validation! { name = test_validation_v2_virt_device, kind = Envelope::<ProductBundleContainerV2>, data = r#" { "schema_id": "http://fuchsia.com/schemas/sdk/product_bundle_container-32z5e391.json", "data": { "name": "Virtual device container", "type": "product_bundle_container", "fms_entries": [ { "name": "generic-x64", "type": "virtual_device", "hardware": { "audio": { "model": "hda" }, "cpu": { "arch": "x64" }, "inputs": { "pointing_device": "touch" }, "window_size": { "width": 640, "height": 480, "units": "pixels" }, "memory": { "quantity": 1, "units": "gigabytes" }, "storage": { "quantity": 1, "units": "gigabytes" } } } ] } } "#, valid = true, } test_validation! { name = test_validation_v2_phys_device, kind = Envelope::<ProductBundleContainerV2>, data = r#" { "schema_id": "http://fuchsia.com/schemas/sdk/product_bundle_container-32z5e391.json", "data": { "name": "Virtual device container", "type": "product_bundle_container", "fms_entries": [ { "name": "generic-x64", "type": "physical_device", "hardware": { "cpu": { "arch": "x64" } } } ] } } "#, valid = true, } }
#[serde(rename = "type")] pub kind: ElementType, pub bundles: Vec<WorkaroundProductBundleWrapper>, } impl JsonObject for Envelope<ProductBundleContainerV1> { fn get_schema() -> &'static str { include_str!("../product_bundle_container-76a5c104.json") } fn get_referenced_schemata() -> &'static [&'static str] { &[ schema::COMMON, schema::HARDWARE_V1, schema::EMU_MANIFEST, schema::FLASH_MANIFEST_V1, schema::PRODUCT_BUNDLE_V1, ] } } #[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] #[serde(deny_unknown_fields)] pub struct ProductBundleContainerV2 { pub name: String, #[serde(skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[serde(rename = "type")] pub kind: ElementType, pub fms_entries: Vec<Metadata>, } impl JsonObject for Envelope<ProductBundleContainerV2> { fn get_schema() -> &'static str { include_str!("../product_bundle_container-32z5e391.json") } fn get_referenced_schemata() -> &'static [&'static str] { &[ schema::COMMON, schema::HARDWARE_V1, schema::EMU_MANIFEST, schema::FLASH_MANIFEST_V1, schema::PRODUCT_BUNDLE_V1, schema::PHYSICAL_DEVICE_V1, schema::VIRTUAL_DEVICE_V1, ] } } #[cfg(test)] mod tests { use super::*; test_validation! { name = test_validation_v1, kind = Envelope::<ProductBundleContainerV1>, data = r#" { "schema_id": "http://fuchsia.com/schemas/sdk/product_bundle_container-76a5c104.json", "data": { "name": "Fuchsia F1", "type": "product_bundle_container", "bundles": [ { "data": { "name": "generic-x64", "type": "product_bundle", "device_refs": ["generic-x64"], "images": [{ "base_uri": "gs://fuchsia/development/0.20201216.2.1/images/generic-x64.tgz", "format": "tgz" }], "packages": [{ "format": "tgz", "repo_uri": "gs://fuchsia/devel
random
[]
Rust
src/lib.rs
Uriopass/inline_tweak
8ef340ea259854e21edfe2374c1237d1bf07a5e5
pub trait Tweakable: Sized { fn parse(x: &str) -> Option<Self>; } #[cfg(any(debug_assertions, feature = "release_tweak"))] mod itweak { use super::Tweakable; use lazy_static::*; use std::any::Any; use std::collections::{HashMap, HashSet}; use std::fs::File; use std::io::{BufRead, BufReader}; use std::path::Path; use std::sync::Mutex; use std::time::{Instant, SystemTime}; macro_rules! impl_tweakable { ($($t: ty) +) => { $( impl Tweakable for $t { fn parse(x: &str) -> Option<$t> { x.parse().ok() } } )+ }; } impl_tweakable!(u8 u16 u32 u64 u128 i8 i16 i32 i64 i128 usize isize bool f32 f64); impl Tweakable for &'static str { fn parse(x: &str) -> Option<Self> { Some(Box::leak(Box::new(String::from( x.trim_start_matches('"').trim_end_matches('"'), )))) } } struct TweakValue { position: usize, value: Option<Box<dyn Any + Send>>, initialized: bool, last_checked: Instant, file_modified: SystemTime, } struct FileWatcher { last_checked: Instant, file_modified: SystemTime, } lazy_static! { static ref VALUES: Mutex<HashMap<(&'static str, u32, u32), TweakValue>> = Default::default(); static ref PARSED_FILES: Mutex<HashSet<&'static str>> = Default::default(); static ref WATCHERS: Mutex<HashMap<&'static str, FileWatcher>> = Default::default(); } fn try_open(file: &'static str) -> std::io::Result<File> { let p: &Path = file.as_ref(); if let Some(x) = File::open(p).ok() { return Ok(x); } let p: &Path = p .strip_prefix(p.iter().next().ok_or(std::io::Error::new( std::io::ErrorKind::Other, "path is empty", ))?) .map_err(|e| std::io::Error::new( std::io::ErrorKind::Other, e, ))?; File::open(&p) } fn last_modified(file: &File) -> Option<SystemTime> { file.metadata().ok()?.modified().ok() } fn parse_tweaks(fpath: &'static str) -> Option<()> { let mut fileinfos = PARSED_FILES.lock().unwrap(); if !fileinfos.contains(&fpath) { let mut values = VALUES.lock().unwrap(); let file = match try_open(fpath) { Ok(x) => x, Err(e) => { eprintln!("[inline-tweak] couldn't open file for tweaking: {}\n do you have the access rights? are you running this from the workspace root?", e); return None; } }; let file_modified = last_modified(&file).unwrap_or_else(SystemTime::now); let now = Instant::now(); let mut tweaks_seen = 0; for (line_n, line) in BufReader::new(file) .lines() .filter_map(|line| line.ok()) .enumerate() { for (column, _) in line.match_indices("tweak!(") { let path_corrected_column = line[..column] .rfind(|c: char| !(c.is_ascii_alphanumeric() || c == ':' || c == '_')) .map(|x| x + 1) .unwrap_or(0); values.insert( (fpath, line_n as u32 + 1, path_corrected_column as u32 + 1), TweakValue { position: tweaks_seen, value: None, initialized: false, last_checked: now, file_modified, }, ); tweaks_seen += 1; } } fileinfos.insert(fpath); } Some(()) } fn update_tweak<T: 'static + Tweakable + Clone + Send>( tweak: &mut TweakValue, fpath: &'static str, ) -> Option<()> { let file = try_open(fpath).ok()?; let last_modified = last_modified(&file)?; if tweak.value.is_none() || last_modified .duration_since(tweak.file_modified) .ok()? .as_secs_f32() > 0.5 { let mut tweaks_seen = 0; let line_str = BufReader::new(&file) .lines() .filter_map(|line| line.ok()) .find(|line| { tweaks_seen += line.matches("tweak!(").count(); tweaks_seen > tweak.position })?; let val_str = line_str .rsplit("tweak!(") .nth(tweaks_seen - tweak.position - 1)?; let mut prec = 1; let (end, _) = val_str.char_indices().find(|(_, c)| { match c { ';' | ')' if prec == 1 => { return true; } ')' => prec -= 1, '(' => prec += 1, _ => {} } false })?; let parsed: Option<T> = Tweakable::parse(&val_str[..end]); tweak.file_modified = last_modified; tweak.last_checked = Instant::now(); tweak.value = parsed.map(|inner| Box::new(inner) as Box<dyn Any + Send>); } Some(()) } pub(crate) fn get_value<T: 'static + Tweakable + Clone + Send>( initial_value: Option<T>, file: &'static str, line: u32, column: u32, ) -> Option<T> { parse_tweaks(file); let mut lock = VALUES.lock().unwrap(); let mut tweak = lock.get_mut(&(file, line, column))?; if !tweak.initialized { tweak.value = initial_value.map(|inner| Box::new(inner) as Box<dyn Any + Send>); tweak.initialized = true; } if tweak.last_checked.elapsed().as_secs_f32() > 0.5 { update_tweak::<T>(&mut tweak, file)?; } tweak.value.as_ref()?.downcast_ref().cloned() } pub fn watch_modified(file: &'static str) -> bool { let mut lock = WATCHERS.lock().unwrap(); let entry = lock.entry(file); let now = Instant::now(); let last_modified = try_open(file) .ok() .and_then(|f| last_modified(&f)) .unwrap_or_else(SystemTime::now); let watcher = entry.or_insert_with(|| FileWatcher { last_checked: now, file_modified: last_modified, }); watcher.last_checked = now; last_modified .duration_since(watcher.file_modified) .map(|time| { watcher.file_modified = last_modified; time.as_secs_f32() > 0.5 }) .unwrap_or(true) } } #[cfg(any(debug_assertions, feature = "release_tweak"))] pub fn inline_tweak<T: 'static + Tweakable + Clone + Send>( initial_value: Option<T>, file: &'static str, line: u32, column: u32, ) -> Option<T> { itweak::get_value(initial_value, file, line, column) } #[cfg(feature = "release_tweak")] #[macro_export] macro_rules! release_tweak { ($default:expr) => { inline_tweak::inline_tweak(None, file!(), line!(), column!()).unwrap_or_else(|| $default) }; ($value:literal; $default:expr) => { inline_tweak::inline_tweak(Some($value), file!(), line!(), column!()) .unwrap_or_else(|| $default) }; } #[cfg(debug_assertions)] #[macro_export] macro_rules! tweak { ($default:expr) => { inline_tweak::inline_tweak(None, file!(), line!(), column!()).unwrap_or_else(|| $default) }; ($value:literal; $default:expr) => { inline_tweak::inline_tweak(Some($value), file!(), line!(), column!()) .unwrap_or_else(|| $default) }; } #[cfg(not(debug_assertions))] #[macro_export] macro_rules! tweak { ($default:expr) => { $default }; ($value:literal; $default:expr) => { $default }; } #[cfg(debug_assertions)] pub fn watch_file(file: &'static str) { while !itweak::watch_modified(file) { std::thread::sleep(std::time::Duration::from_millis(500)); } } #[cfg(not(debug_assertions))] pub fn watch_file(_file: &'static str) {} #[macro_export] macro_rules! watch { () => { inline_tweak::watch_file(file!()); }; }
pub trait Tweakable: Sized { fn parse(x: &str) -> Option<Self>; } #[cfg(any(debug_assertions, feature = "release_tweak"))] mod itweak { use super::Tweakable; use lazy_static::*; use std::any::Any; use std::collections::{HashMap, HashSet}; use std::fs::File; use std::io::{BufRead, BufReader}; use std::path::Path; use std::sync::Mutex; use std::time::{Instant, SystemTime}; macro_rules! impl_tweakable { ($($t: ty) +) => { $( impl Tweakable for $t { fn parse(x: &str) -> Option<$t> { x.parse().ok() } } )+ }; } impl_tweakable!(u8 u16 u32 u64 u128 i8 i16 i32 i64 i128 usize isize bool f32 f64); impl Tweakable for &'static str { fn parse(x: &str) -> Option<Self> { Some(Box::leak(Box::new(String::from( x.trim_start_matches('"').trim_end_matches('"'), )))) } } struct TweakValue { position: usize, value: Option<Box<dyn Any + Send>>, initialized: bool, last_checked: Instant, file_modified: SystemTime, } struct FileWatcher { last_checked: Instant, file_modified: SystemTime, } lazy_static! { static ref VALUES: Mutex<HashMap<(&'static str, u32, u32), TweakValue>> = Default::default(); static ref PARSED_FILES: Mutex<HashSet<&'static str>> = Default::default(); static ref WATCHERS: Mutex<HashMap<&'static str, FileWatcher>> = Default::default(); } fn try_open(file: &'static str) -> std::io::Result<File> { let p: &Path = file.as_ref(); if let Some(x) = File::open(p).ok() { return Ok(x); } let p: &Path = p .strip_prefix(p.iter().next().ok_or(std::io::Error::new( std::io::ErrorKind::Other, "path is empty", ))?) .map_err(|e| std::io::Error::new( std::io::ErrorKind::Other, e, ))?; File::open(&p) } fn last_modified(file: &File) -> Option<SystemTime> { file.metadata().ok()?.modified().ok() } fn parse_tweaks(fpath: &'static str) -> Option<()> { let mut fileinfos = PARSED_FILES.lock().unwrap(); if !fileinfos.contains(&fpath) { let mut values = VALUES.lock().unwrap(); let file = match try_open(fpath) { Ok(x) => x, Err(e) => { eprintln!("[inline-tweak] couldn't open file for tweaking: {}\n do you have the access rights? are you running this from the workspace root?", e); return None; } }; let file_modified = last_modified(&file).unwrap_or_else(SystemTime::now); let now = Instant::now(); let mut tweaks_seen = 0; for (line_n, line) in BufReader::new(file) .lines() .filter_map(|line| line.ok()) .enumerate() { for (column, _) in line.match_indices("tweak!(") { let path_corrected_column = line[..column] .rfind(|c: char| !(c.is_ascii_alphanumeric() || c == ':' || c == '_')) .map(|x| x + 1) .unwrap_or(0); values.insert( (fpath, line_n as u32 + 1, path_corrected_column as u32 + 1), TweakValue { position: tweaks_seen, value: None, initialized: false, last_checked: now, file_modified, }, );
{ let mut tweaks_seen = 0; let line_str = BufReader::new(&file) .lines() .filter_map(|line| line.ok()) .find(|line| { tweaks_seen += line.matches("tweak!(").count(); tweaks_seen > tweak.position })?; let val_str = line_str .rsplit("tweak!(") .nth(tweaks_seen - tweak.position - 1)?; let mut prec = 1; let (end, _) = val_str.char_indices().find(|(_, c)| { match c { ';' | ')' if prec == 1 => { return true; } ')' => prec -= 1, '(' => prec += 1, _ => {} } false })?; let parsed: Option<T> = Tweakable::parse(&val_str[..end]); tweak.file_modified = last_modified; tweak.last_checked = Instant::now(); tweak.value = parsed.map(|inner| Box::new(inner) as Box<dyn Any + Send>); } Some(()) } pub(crate) fn get_value<T: 'static + Tweakable + Clone + Send>( initial_value: Option<T>, file: &'static str, line: u32, column: u32, ) -> Option<T> { parse_tweaks(file); let mut lock = VALUES.lock().unwrap(); let mut tweak = lock.get_mut(&(file, line, column))?; if !tweak.initialized { tweak.value = initial_value.map(|inner| Box::new(inner) as Box<dyn Any + Send>); tweak.initialized = true; } if tweak.last_checked.elapsed().as_secs_f32() > 0.5 { update_tweak::<T>(&mut tweak, file)?; } tweak.value.as_ref()?.downcast_ref().cloned() } pub fn watch_modified(file: &'static str) -> bool { let mut lock = WATCHERS.lock().unwrap(); let entry = lock.entry(file); let now = Instant::now(); let last_modified = try_open(file) .ok() .and_then(|f| last_modified(&f)) .unwrap_or_else(SystemTime::now); let watcher = entry.or_insert_with(|| FileWatcher { last_checked: now, file_modified: last_modified, }); watcher.last_checked = now; last_modified .duration_since(watcher.file_modified) .map(|time| { watcher.file_modified = last_modified; time.as_secs_f32() > 0.5 }) .unwrap_or(true) } } #[cfg(any(debug_assertions, feature = "release_tweak"))] pub fn inline_tweak<T: 'static + Tweakable + Clone + Send>( initial_value: Option<T>, file: &'static str, line: u32, column: u32, ) -> Option<T> { itweak::get_value(initial_value, file, line, column) } #[cfg(feature = "release_tweak")] #[macro_export] macro_rules! release_tweak { ($default:expr) => { inline_tweak::inline_tweak(None, file!(), line!(), column!()).unwrap_or_else(|| $default) }; ($value:literal; $default:expr) => { inline_tweak::inline_tweak(Some($value), file!(), line!(), column!()) .unwrap_or_else(|| $default) }; } #[cfg(debug_assertions)] #[macro_export] macro_rules! tweak { ($default:expr) => { inline_tweak::inline_tweak(None, file!(), line!(), column!()).unwrap_or_else(|| $default) }; ($value:literal; $default:expr) => { inline_tweak::inline_tweak(Some($value), file!(), line!(), column!()) .unwrap_or_else(|| $default) }; } #[cfg(not(debug_assertions))] #[macro_export] macro_rules! tweak { ($default:expr) => { $default }; ($value:literal; $default:expr) => { $default }; } #[cfg(debug_assertions)] pub fn watch_file(file: &'static str) { while !itweak::watch_modified(file) { std::thread::sleep(std::time::Duration::from_millis(500)); } } #[cfg(not(debug_assertions))] pub fn watch_file(_file: &'static str) {} #[macro_export] macro_rules! watch { () => { inline_tweak::watch_file(file!()); }; }
tweaks_seen += 1; } } fileinfos.insert(fpath); } Some(()) } fn update_tweak<T: 'static + Tweakable + Clone + Send>( tweak: &mut TweakValue, fpath: &'static str, ) -> Option<()> { let file = try_open(fpath).ok()?; let last_modified = last_modified(&file)?; if tweak.value.is_none() || last_modified .duration_since(tweak.file_modified) .ok()? .as_secs_f32() > 0.5
random
[ { "content": "fn do_fn(item: TokenStream, release_tweak: bool) -> TokenStream {\n\n let mut v: syn::ItemFn = parse_macro_input!(item as syn::ItemFn);\n\n\n\n let fname = v.sig.ident.clone();\n\n\n\n LiteralReplacer {\n\n nth: 0,\n\n fname,\n\n release_tweak,\n\n }\n\n .visit_...
Rust
contract/ft-transfer-receiver-mock/src/lib.rs
evgenykuzyakov/oysterpack-near-stake-token
86a01e80f57780fa755bbc09e55b91714c0751d4
use near_sdk::serde::export::TryFrom; use near_sdk::{ borsh::{self, BorshDeserialize, BorshSerialize}, env, json_types::{ValidAccountId, U128}, log, near_bindgen, serde::{Deserialize, Serialize}, serde_json::{self, json}, wee_alloc, AccountId, Promise, PromiseOrValue, }; use std::{ cmp::Ordering, fmt::{self, Display, Formatter}, }; #[global_allocator] static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; #[near_bindgen] #[derive(BorshDeserialize, BorshSerialize, Default)] pub struct TransferReceiverMock {} const TGAS: u64 = 1_000_000_000_000; const YOCTO: u128 = 1_000_000_000_000_000_000_000_000; #[near_bindgen] impl TransferReceiver for TransferReceiverMock { fn ft_on_transfer( &mut self, #[allow(unused_variables)] sender_id: ValidAccountId, amount: TokenAmount, msg: String, ) -> PromiseOrValue<TokenAmount> { log!("{:#?}", msg); let msg = Message::try_from(msg.as_str()).expect("invalid msg"); match msg { Message::Panic => panic!("BOOM!"), Message::Accept { transfer_relay, refund_percent, } => { if let Some(relay) = transfer_relay { let transfer_relay_amount = amount.value() * relay.percent as u128 / 100; self.invoke_ft_transfer( &env::predecessor_account_id(), &relay.account_id, transfer_relay_amount.into(), ) .then(self.invoke_resolve_ft_on_transfer(amount, refund_percent)) .into() } else { let refund_amount = amount.value() * refund_percent as u128 / 100; PromiseOrValue::Value(refund_amount.into()) } } } } } #[near_bindgen] impl TransferReceiverMock { #[private] pub fn resolve_ft_on_transfer(&self, amount: TokenAmount, refund_percent: u8) -> TokenAmount { let refund_amount = amount.value() * refund_percent as u128 / 100; refund_amount.into() } pub fn register_account(&self, contract_id: ValidAccountId) -> Promise { Promise::new(contract_id.as_ref().to_string()).function_call( b"register_account".to_vec(), vec![], YOCTO, 5 * TGAS, ) } pub fn unregister_account(&self, contract_id: ValidAccountId) -> Promise { Promise::new(contract_id.as_ref().to_string()).function_call( b"unregister_account".to_vec(), vec![], YOCTO, 10 * TGAS, ) } pub fn ft_transfer( &self, token_contract: ValidAccountId, receiver_id: ValidAccountId, amount: TokenAmount, ) -> Promise { self.invoke_ft_transfer(token_contract.as_ref(), receiver_id.as_ref(), amount) } fn invoke_ft_transfer( &self, token_contract: &str, receiver_id: &str, amount: TokenAmount, ) -> Promise { Promise::new(token_contract.to_string()).function_call( b"ft_transfer".to_vec(), json!({ "receiver_id": receiver_id, "amount":amount }) .to_string() .into_bytes(), 1, 10 * TGAS, ) } fn invoke_resolve_ft_on_transfer(&self, amount: TokenAmount, refund_percent: u8) -> Promise { Promise::new(env::current_account_id()).function_call( b"resolve_ft_on_transfer".to_vec(), json!({ "amount": amount, "refund_percent": refund_percent }) .to_string() .into_bytes(), 0, 5 * TGAS, ) } } pub trait TransferReceiver { fn ft_on_transfer( &mut self, sender_id: ValidAccountId, amount: TokenAmount, msg: String, ) -> PromiseOrValue<TokenAmount>; } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] #[serde(crate = "near_sdk::serde")] pub struct TokenAmount(pub U128); impl From<u128> for TokenAmount { fn from(value: u128) -> Self { Self(U128::from(value)) } } impl TokenAmount { pub fn value(&self) -> u128 { self.0 .0 } } impl Display for TokenAmount { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { self.0 .0.fmt(f) } } impl PartialOrd for TokenAmount { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { self.value().partial_cmp(&other.value()) } } impl Default for TokenAmount { fn default() -> Self { Self(U128(0)) } } #[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] #[serde(crate = "near_sdk::serde")] pub enum Message { Accept { refund_percent: u8, transfer_relay: Option<TransferRelay>, }, Panic, } impl TryFrom<&str> for Message { type Error = serde_json::Error; fn try_from(json: &str) -> Result<Self, Self::Error> { serde_json::from_str(json) } } #[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] #[serde(crate = "near_sdk::serde")] pub struct TransferRelay { account_id: AccountId, percent: u8, } #[cfg(test)] mod tests { use super::*; use near_sdk::serde_json::{self, json}; #[test] fn message() { let msg = Message::Accept { refund_percent: 0, transfer_relay: None, }; let json = serde_json::to_string_pretty(&msg).unwrap(); println!("{}", json); let json = json!({ "Accept": { "refund_percent": 0, "transfer_relay": {"account_id": "account.near", "percent": 50} } }); let json = serde_json::to_string(&json).unwrap(); println!("{}", json); let msg: Message = serde_json::from_str(&json).unwrap(); match msg { Message::Accept { refund_percent, transfer_relay, } => { println!( "refund_percent={}% transfer_relay={:?}", refund_percent, transfer_relay ) } Message::Panic => panic!("expected Accept message type"), } let msg = Message::Panic; let json = serde_json::to_string_pretty(&msg).unwrap(); println!("{}", json); let msg: Message = serde_json::from_str(&json).unwrap(); println!("{:?}", msg); } }
use near_sdk::serde::export::TryFrom; use near_sdk::{ borsh::{self, BorshDeserialize, BorshSerialize}, env, json_types::{ValidAccountId, U128}, log, near_bindgen, serde::{Deserialize, Serialize}, serde_json::{self, json}, wee_alloc, AccountId, Promise, PromiseOrValue, }; use std::{ cmp::Ordering, fmt::{self, Display, Formatter}, }; #[global_allocator] static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; #[near_bindgen] #[derive(BorshDeserialize, BorshSerialize, Default)] pub struct TransferReceiverMock {} const TGAS: u64 = 1_000_000_000_000; const YOCTO: u128 = 1_000_000_000_000_000_000_000_000; #[near_bindgen] impl TransferReceiver for TransferReceiverMock { fn ft_on_transfer( &mut self, #[allow(unused_variables)] sender_id: ValidAccountId, amount: TokenAmount, msg: String, ) -> PromiseOrValue<TokenAmount> { log!("{:#?}", msg); let msg = Message::try_from(msg.as_str()).expect("invalid msg"); match msg { Message::Panic => panic!("BOOM!"), Message::Accept { transfer_relay, refund_percent, } => { if let Some(relay) = transfer_relay { let transfer_relay_amount = amount.value() * relay.percent as u128 / 100; self.invoke_ft_transfer( &env::predecessor_account_id(), &relay.account_id, transfer_relay_amount.into(), ) .then(self.invoke_resolve_ft_on_transfer(amount, refund_percent)) .into() } else { let refund_amount = amount.value() * refund_percent as u128 / 100; PromiseOrValue::Value(refund_amount.into()) } } } } } #[near_bindgen] impl TransferReceiverMock { #[private] pub fn resolve_ft_on_transfer(&self, amount: TokenAmount, refund_percent: u8) -> TokenAmount { let refund_amount = amount.value() * refund_percent as u128 / 100; refund_amount.into() } pub fn register_account(&self, contract_id: ValidAccountId) -> Promise { Promise::new(contract_id.as_ref().to_string()).function_call( b"register_account".to_vec(), vec![], YOCTO, 5 * TGAS, ) } pub fn unregister_account(&self, contract_id: ValidAccountId) -> Promise { Promise::new(contract_id.as_ref().to_string()).function_call( b"unregister_account".to_vec(), vec![], YOCTO, 10 * TGAS, ) } pub fn ft_transfer( &self, token_contract: ValidAccountId, receiver_id: ValidAccountId, amount: TokenAmount, ) -> Promise { self.invoke_ft_transfer(token_contract.as_ref(), receiver_id.as_ref(), amount) } fn invoke_ft_transfer( &self, token_contract: &str, receiver_id: &str, amount: TokenAmount, ) -> Promise { Promise::new(token_contract.to_string()).function_call( b"ft_transfer".to_vec(), json!({ "receiver_id": receiver_id, "amount":amount }) .to_string() .into_bytes(), 1, 10 * TGAS, ) } fn invoke_resolve_ft_on_transfer(&self, amount: TokenAmount, refund_percent: u8) -> Promise { Promise::new(env::current_account_id()).function_call( b"resolve_ft_on_transfer".to_vec(), json!({ "amount": amount, "refund_percent": refund_percent }) .to_string() .into_bytes(), 0, 5 * TGAS, ) } } pub trait TransferReceiver { fn ft_on_transfer( &mut self, sender_id: ValidAccountId, amount: TokenAmount, msg: String, ) -> PromiseOrValue<TokenAmount>; } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] #[serde(crate = "near_sdk::serde")] pub struct TokenAmount(pub U128); impl From<u128> for TokenAmount { fn from(value: u128) -> Self { Self(U128::from(value)) } } impl TokenAmount { pub fn value(&self) -> u128 { self.0 .0 } } impl Display for TokenAmount { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { self.0 .0.fmt(f) } } impl PartialOrd for TokenAmount { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { self.value().partial_cmp(&other.value()) } } impl Default for TokenAmount { fn default() -> Self { Self(U128(0)) } } #[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] #[serde(crate = "near_sdk::serde")] pub enum Message { Accept { refund_percent: u8, transfer_relay: Option<TransferRelay>, }, Panic, } impl TryFrom<&str> for Message { type Error = serde_json::Error; fn try_from(json: &str) -> Result<Self, Self::Error> { serde_json::from_str(json) } } #[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] #[serde(crate = "near_sdk::serde")] pub struct TransferRelay { account_id: AccountId, percent: u8, } #[cfg(test)] mod tests { use super::*; use near_sdk::serde_json::{self, json}; #[test] fn message() { let msg = Message::Accept { refund_percent: 0, transfer_relay: None, }; let json = serde_json::to_string_pretty(&msg).unwrap(); println!("{}", json); let json = json!({ "Accept": {
refund_percent, transfer_relay, } => { println!( "refund_percent={}% transfer_relay={:?}", refund_percent, transfer_relay ) } Message::Panic => panic!("expected Accept message type"), } let msg = Message::Panic; let json = serde_json::to_string_pretty(&msg).unwrap(); println!("{}", json); let msg: Message = serde_json::from_str(&json).unwrap(); println!("{:?}", msg); } }
"refund_percent": 0, "transfer_relay": {"account_id": "account.near", "percent": 50} } }); let json = serde_json::to_string(&json).unwrap(); println!("{}", json); let msg: Message = serde_json::from_str(&json).unwrap(); match msg { Message::Accept {
random
[ { "content": "pub fn deserialize_receipts() -> Vec<Receipt> {\n\n get_created_receipts()\n\n .iter()\n\n .map(|receipt| {\n\n let json = serde_json::to_string_pretty(receipt).unwrap();\n\n println!(\"{}\", json);\n\n let receipt: Receipt = serde_json::from_str(&...
Rust
src/articles/library.rs
tiagoamaro/pickpocket-rust
fc95d1152da1e6526e4d357896a323b3294293d8
use crate::articles::api::API; use crate::articles::article::Article; use crate::articles::inventory::Inventory; use crate::configuration::Configuration; use crate::logger; use rand::seq::SliceRandom; use serde::{Deserialize, Serialize}; use serde_yaml; use std::collections::HashMap; use std::fs::File; use std::path::Path; #[derive(Serialize, Deserialize, Debug)] pub struct Library { read: Inventory, unread: Inventory, } impl Library { pub fn new() -> Library { Library { read: Inventory::new(), unread: Inventory::new(), } } pub fn guarantee_home_folder() { let config = Configuration::default(); match std::fs::create_dir_all(config.home_folder) { Ok(_) => {} Err(error) => { let message = format!("Could not create home folder. Motive: {}", error); logger::log(&message); } }; } fn write_inventory(library: &Library) { let config = Configuration::default(); let library_string = serde_yaml::to_string(library).unwrap(); std::fs::write(config.library_file, library_string).ok(); } fn load() -> Library { let config = Configuration::default(); if !Path::new(&config.library_file).exists() { logger::log("Inventory file not found. Creating..."); Library::write_inventory(&Library::new()); File::open(&config.library_file).unwrap(); } let content = std::fs::read_to_string(config.library_file).unwrap(); serde_yaml::from_str::<Library>(&content).unwrap() } fn random_unread_article() -> Option<Article> { let library = Library::load(); let article_ids: Vec<&String> = library.unread.articles.keys().collect(); let mut rng = rand::thread_rng(); let choice = article_ids.choose(&mut rng); match choice { Some(article_id) => { let id = article_id.to_string(); let article = &library.unread.articles[&id]; Some(article.to_owned()) } None => None, } } fn move_to_read(article_id: String) { let mut library = Library::load(); match library.unread.articles.remove(&article_id) { Some(read_article) => { library .read .articles .insert(read_article.id.to_owned(), read_article.to_owned()); } None => {} }; Library::write_inventory(&library); } pub fn status() { let library = Library::load(); logger::log(&format!( "You have {} read articles", &library.read.articles.len() )); logger::log(&format!( "You have {} unread articles", &library.unread.articles.len() )); } pub fn pick(quantity: Option<usize>) { let quantity = quantity.unwrap_or(1); for _ in 0..quantity { match Library::random_unread_article() { Some(article) => { Library::move_to_read(article.id); open::that(article.url).ok(); } None => { logger::log("You have read all articles!"); } }; } } pub fn renew() { let api = API::new(); let library = Library::load(); let read_articles: Vec<&Article> = library.read.articles.values().collect(); api.delete(read_articles); let api_list = api.retrieve()["list"].to_owned(); let api_articles = match serde_json::from_value::<HashMap<String, serde_json::Value>>(api_list) { Ok(articles) => articles, Err(_) => HashMap::new(), }; let new_inventory: HashMap<String, Article> = api_articles .into_iter() .map(|(id, data)| { ( id.to_string(), Article { id: id.to_owned(), url: data["given_url"].as_str().unwrap().to_owned(), title: data["resolved_title"].as_str().unwrap().to_owned(), }, ) }) .collect(); let new_library = Library { read: Inventory::new(), unread: Inventory { articles: new_inventory, }, }; Library::write_inventory(&new_library); logger::log("Refreshed library"); } }
use crate::articles::api::API; use crate::articles::article::Article; use crate::articles::inventory::Inventory; use crate::configuration::Configuration; use crate::logger; use rand::seq::SliceRandom; use serde::{Deserialize, Serialize}; use serde_yaml; use std::collections::HashMap; use std::fs::File; use std::path::Path; #[derive(Serialize, Deserialize, Debug)] pub struct Library { read: Inventory, unread: Inventory, } impl Library { pub fn new() -> Library { Library { read: Inventory::new(), unread: Inventory::new(), } } pub fn guarantee_home_folder() { let config = Configuration::default(); match std::fs::create_dir_all(config.home_folder) { Ok(_) => {} Err(error) => { let message = format!("Could not create home folder. Motive: {}", error); logger::log(&message); } }; } fn write_inventory(library: &Library) { let config = Configuration::default(); let library_string = serde_yaml::to_string(library).unwrap(); std::fs::write(config.library_file, library_string).ok(); } fn load() -> Library { let config = Configuration::default(); if !Path::new(&config.library_file).exists() { logger::log("Inventory file not found. Creating..."); Library::write_inventory(&Library::new()); File::open(&config.library_file).unwrap(); } let content = std::fs::read_to_string(config.library_file).unwrap(); serde_yaml::from_str::<Library>(&content).unwrap() } fn random_unread_article() -> Option<Article> { let library = Library::load(); let article_ids: Vec<&String> = library.unread.articles.keys().collect(); let mut rng = rand::thread_rng(); let choice = article_ids.choose(&mut rng); match choice { Some(article_id) => { let id = article_id.to_string(); let article = &library.unread.articles[&id]; Some(article.to_owned()) } None => None, } }
pub fn status() { let library = Library::load(); logger::log(&format!( "You have {} read articles", &library.read.articles.len() )); logger::log(&format!( "You have {} unread articles", &library.unread.articles.len() )); } pub fn pick(quantity: Option<usize>) { let quantity = quantity.unwrap_or(1); for _ in 0..quantity { match Library::random_unread_article() { Some(article) => { Library::move_to_read(article.id); open::that(article.url).ok(); } None => { logger::log("You have read all articles!"); } }; } } pub fn renew() { let api = API::new(); let library = Library::load(); let read_articles: Vec<&Article> = library.read.articles.values().collect(); api.delete(read_articles); let api_list = api.retrieve()["list"].to_owned(); let api_articles = match serde_json::from_value::<HashMap<String, serde_json::Value>>(api_list) { Ok(articles) => articles, Err(_) => HashMap::new(), }; let new_inventory: HashMap<String, Article> = api_articles .into_iter() .map(|(id, data)| { ( id.to_string(), Article { id: id.to_owned(), url: data["given_url"].as_str().unwrap().to_owned(), title: data["resolved_title"].as_str().unwrap().to_owned(), }, ) }) .collect(); let new_library = Library { read: Inventory::new(), unread: Inventory { articles: new_inventory, }, }; Library::write_inventory(&new_library); logger::log("Refreshed library"); } }
fn move_to_read(article_id: String) { let mut library = Library::load(); match library.unread.articles.remove(&article_id) { Some(read_article) => { library .read .articles .insert(read_article.id.to_owned(), read_article.to_owned()); } None => {} }; Library::write_inventory(&library); }
function_block-full_function
[ { "content": "pub fn log(message: &str) -> &str {\n\n println!(\"[Pickpocket] {}\", message);\n\n message\n\n}\n", "file_path": "src/logger.rs", "rank": 0, "score": 50771.27213072561 }, { "content": "use crate::articles::article::Article;\n\nuse serde::{Deserialize, Serialize};\n\nuse ...
Rust
src/event.rs
vstojkovic/sapi-lite
202e96fd1cca47863f5eca2c9b5b82b7ea390d88
use windows as Windows; use Windows::core::{implement, IUnknown}; use Windows::Win32::Foundation::PWSTR; use Windows::Win32::Media::Speech::{ ISpEventSource, ISpNotifySink, ISpObjectToken, ISpRecoResult, SPEI_END_INPUT_STREAM, SPEI_RECOGNITION, SPEI_RESERVED1, SPEI_RESERVED2, SPET_LPARAM_IS_OBJECT, SPET_LPARAM_IS_POINTER, SPET_LPARAM_IS_STRING, SPET_LPARAM_IS_TOKEN, SPET_LPARAM_IS_UNDEFINED, SPEVENT, SPEVENTENUM, SPEVENTLPARAMTYPE, }; use crate::com_util::{next_elem, ComBox, MaybeWeak}; use crate::token::Token; use crate::Result; #[derive(Debug)] pub(crate) enum Event { Recognition(ISpRecoResult), SpeechFinished(u32), OtherObject(IUnknown), OtherToken(Token), OtherString(ComBox<PWSTR>), OtherValue(ComBox<*const std::ffi::c_void>), Other, } impl Event { pub fn from_sapi(sapi_event: SPEVENT) -> Result<Self> { use Windows::core::{Abi, Interface}; let id = SPEVENTENUM(sapi_event._bitfield & 0xffff); let lparam = sapi_event.lParam.0; match SPEVENTLPARAMTYPE(sapi_event._bitfield >> 16) { SPET_LPARAM_IS_OBJECT => { let intf = unsafe { IUnknown::from_abi(lparam as _) }?; match id { SPEI_RECOGNITION => Ok(Self::Recognition(intf.cast()?)), _ => Ok(Self::OtherObject(intf)), } } SPET_LPARAM_IS_POINTER => { Ok(Self::OtherValue(unsafe { ComBox::from_raw(lparam as _) })) } SPET_LPARAM_IS_STRING => Ok(Self::OtherString(unsafe { ComBox::from_raw(PWSTR(lparam as _)) })), SPET_LPARAM_IS_TOKEN => Ok(Self::OtherToken(Token::from_sapi(unsafe { ISpObjectToken::from_abi(lparam as _) }?))), SPET_LPARAM_IS_UNDEFINED => match id { SPEI_END_INPUT_STREAM => Ok(Self::SpeechFinished(sapi_event.ulStreamNum)), _ => Ok(Self::Other), }, _ => panic!("Unrecognized SPEVENTLPARAMTYPE value"), } } } pub(crate) struct EventSource { intf: MaybeWeak<ISpEventSource>, } impl EventSource { pub(crate) fn from_sapi(intf: ISpEventSource) -> Self { Self { intf: MaybeWeak::new(intf), } } pub(crate) fn next_event(&self) -> Result<Option<Event>> { Ok( match unsafe { next_elem(&*self.intf, ISpEventSource::GetEvents) }? { Some(sapi_event) => Some(Event::from_sapi(sapi_event)?), None => None, }, ) } fn downgrade(&mut self) { self.intf.set_weak(true); } } #[implement(Windows::Win32::Media::Speech::ISpNotifySink)] pub(crate) struct EventSink { source: EventSource, handler: Box<dyn Fn(Event) -> Result<()>>, } #[allow(non_snake_case)] impl EventSink { pub(crate) fn new<F: Fn(Event) -> Result<()> + 'static>( source: EventSource, handler: F, ) -> Self { Self { source, handler: Box::new(handler), } } pub(crate) fn install(self, interest: Option<&[SPEVENTENUM]>) -> Result<()> { use windows::core::ToImpl; let src_intf = self.source.intf.clone(); let sink_intf: ISpNotifySink = self.into(); unsafe { src_intf.SetNotifySink(&sink_intf) }?; unsafe { Self::to_impl(&sink_intf) }.source.downgrade(); if let Some(flags) = interest { let mut flags_arg = (1u64 << SPEI_RESERVED1.0) | (1u64 << SPEI_RESERVED2.0); for flag in flags { flags_arg |= 1u64 << flag.0; } unsafe { src_intf.SetInterest(flags_arg, flags_arg) }?; } Ok(()) } fn Notify(&self) -> Result<()> { while let Some(event) = self.source.next_event()? { (*self.handler)(event)? } Ok(()) } }
use windows as Windows; use Windows::core::{implement, IUnknown}; use Windows::Win32::Fo
{ source, handler: Box::new(handler), } } pub(crate) fn install(self, interest: Option<&[SPEVENTENUM]>) -> Result<()> { use windows::core::ToImpl; let src_intf = self.source.intf.clone(); let sink_intf: ISpNotifySink = self.into(); unsafe { src_intf.SetNotifySink(&sink_intf) }?; unsafe { Self::to_impl(&sink_intf) }.source.downgrade(); if let Some(flags) = interest { let mut flags_arg = (1u64 << SPEI_RESERVED1.0) | (1u64 << SPEI_RESERVED2.0); for flag in flags { flags_arg |= 1u64 << flag.0; } unsafe { src_intf.SetInterest(flags_arg, flags_arg) }?; } Ok(()) } fn Notify(&self) -> Result<()> { while let Some(event) = self.source.next_event()? { (*self.handler)(event)? } Ok(()) } }
undation::PWSTR; use Windows::Win32::Media::Speech::{ ISpEventSource, ISpNotifySink, ISpObjectToken, ISpRecoResult, SPEI_END_INPUT_STREAM, SPEI_RECOGNITION, SPEI_RESERVED1, SPEI_RESERVED2, SPET_LPARAM_IS_OBJECT, SPET_LPARAM_IS_POINTER, SPET_LPARAM_IS_STRING, SPET_LPARAM_IS_TOKEN, SPET_LPARAM_IS_UNDEFINED, SPEVENT, SPEVENTENUM, SPEVENTLPARAMTYPE, }; use crate::com_util::{next_elem, ComBox, MaybeWeak}; use crate::token::Token; use crate::Result; #[derive(Debug)] pub(crate) enum Event { Recognition(ISpRecoResult), SpeechFinished(u32), OtherObject(IUnknown), OtherToken(Token), OtherString(ComBox<PWSTR>), OtherValue(ComBox<*const std::ffi::c_void>), Other, } impl Event { pub fn from_sapi(sapi_event: SPEVENT) -> Result<Self> { use Windows::core::{Abi, Interface}; let id = SPEVENTENUM(sapi_event._bitfield & 0xffff); let lparam = sapi_event.lParam.0; match SPEVENTLPARAMTYPE(sapi_event._bitfield >> 16) { SPET_LPARAM_IS_OBJECT => { let intf = unsafe { IUnknown::from_abi(lparam as _) }?; match id { SPEI_RECOGNITION => Ok(Self::Recognition(intf.cast()?)), _ => Ok(Self::OtherObject(intf)), } } SPET_LPARAM_IS_POINTER => { Ok(Self::OtherValue(unsafe { ComBox::from_raw(lparam as _) })) } SPET_LPARAM_IS_STRING => Ok(Self::OtherString(unsafe { ComBox::from_raw(PWSTR(lparam as _)) })), SPET_LPARAM_IS_TOKEN => Ok(Self::OtherToken(Token::from_sapi(unsafe { ISpObjectToken::from_abi(lparam as _) }?))), SPET_LPARAM_IS_UNDEFINED => match id { SPEI_END_INPUT_STREAM => Ok(Self::SpeechFinished(sapi_event.ulStreamNum)), _ => Ok(Self::Other), }, _ => panic!("Unrecognized SPEVENTLPARAMTYPE value"), } } } pub(crate) struct EventSource { intf: MaybeWeak<ISpEventSource>, } impl EventSource { pub(crate) fn from_sapi(intf: ISpEventSource) -> Self { Self { intf: MaybeWeak::new(intf), } } pub(crate) fn next_event(&self) -> Result<Option<Event>> { Ok( match unsafe { next_elem(&*self.intf, ISpEventSource::GetEvents) }? { Some(sapi_event) => Some(Event::from_sapi(sapi_event)?), None => None, }, ) } fn downgrade(&mut self) { self.intf.set_weak(true); } } #[implement(Windows::Win32::Media::Speech::ISpNotifySink)] pub(crate) struct EventSink { source: EventSource, handler: Box<dyn Fn(Event) -> Result<()>>, } #[allow(non_snake_case)] impl EventSink { pub(crate) fn new<F: Fn(Event) -> Result<()> + 'static>( source: EventSource, handler: F, ) -> Self { Self
random
[ { "content": "use std::ops::Deref;\n\n\n\nuse windows as Windows;\n\nuse Windows::core::Interface;\n\nuse Windows::Win32::Media::Speech::{SPEI_END_INPUT_STREAM, SPF_ASYNC};\n\n\n\nuse crate::event::{Event, EventSink, EventSource};\n\nuse crate::tts::Speech;\n\nuse crate::Result;\n\n\n\nuse super::Synthesizer;\n...
Rust
src/udp_mux/mod.rs
webrtc-rs/ice
ebdf3e3b6f431f0e5e59ca0be9f61d563743fb45
use std::{collections::HashMap, io::ErrorKind, net::SocketAddr, sync::Arc}; use util::{sync::RwLock, Conn, Error}; use async_trait::async_trait; use tokio::sync::{watch, Mutex}; mod udp_mux_conn; use udp_mux_conn::{UDPMuxConn, UDPMuxConnParams}; #[cfg(test)] mod udp_mux_test; mod socket_addr_ext; use stun::{ attributes::ATTR_USERNAME, message::{is_message as is_stun_message, Message as STUNMessage}, }; use crate::candidate::RECEIVE_MTU; fn normalize_socket_addr(target: &SocketAddr, socket_addr: &SocketAddr) -> SocketAddr { match (target, socket_addr) { (SocketAddr::V4(target_ipv4), SocketAddr::V6(_)) => { let ipv6_mapped = target_ipv4.ip().to_ipv6_mapped(); SocketAddr::new(std::net::IpAddr::V6(ipv6_mapped), target_ipv4.port()) } (_, _) => *target, } } #[async_trait] pub trait UDPMux { async fn close(&self) -> Result<(), Error>; async fn get_conn(self: Arc<Self>, ufrag: &str) -> Result<Arc<dyn Conn + Send + Sync>, Error>; async fn remove_conn_by_ufrag(&self, ufrag: &str); } pub struct UDPMuxParams { conn: Box<dyn Conn + Send + Sync>, } impl UDPMuxParams { pub fn new<C>(conn: C) -> Self where C: Conn + Send + Sync + 'static, { Self { conn: Box::new(conn), } } } pub struct UDPMuxDefault { params: UDPMuxParams, conns: Mutex<HashMap<String, UDPMuxConn>>, address_map: RwLock<HashMap<SocketAddr, UDPMuxConn>>, closed_watch_tx: Mutex<Option<watch::Sender<()>>>, closed_watch_rx: watch::Receiver<()>, } impl UDPMuxDefault { pub fn new(params: UDPMuxParams) -> Arc<Self> { let (closed_watch_tx, closed_watch_rx) = watch::channel(()); let mux = Arc::new(Self { params, conns: Mutex::default(), address_map: RwLock::default(), closed_watch_tx: Mutex::new(Some(closed_watch_tx)), closed_watch_rx: closed_watch_rx.clone(), }); let cloned_mux = Arc::clone(&mux); cloned_mux.start_conn_worker(closed_watch_rx); mux } pub async fn is_closed(&self) -> bool { self.closed_watch_tx.lock().await.is_none() } async fn send_to(&self, buf: &[u8], target: &SocketAddr) -> Result<usize, Error> { self.params .conn .send_to(buf, *target) .await .map_err(Into::into) } async fn create_muxed_conn(self: &Arc<Self>, ufrag: &str) -> Result<UDPMuxConn, Error> { let local_addr = self.params.conn.local_addr().await?; let params = UDPMuxConnParams { local_addr, key: ufrag.into(), udp_mux: Arc::clone(self), }; Ok(UDPMuxConn::new(params)) } async fn register_conn_for_address(&self, conn: &UDPMuxConn, addr: SocketAddr) { if self.is_closed().await { return; } let key = conn.key(); { let mut addresses = self.address_map.write(); addresses .entry(addr) .and_modify(|e| { if e.key() != key { e.remove_address(&addr); *e = conn.clone() } }) .or_insert_with(|| conn.clone()); } log::debug!("Registered {} for {}", addr, key); } async fn conn_from_stun_message(&self, buffer: &[u8], addr: &SocketAddr) -> Option<UDPMuxConn> { let (result, message) = { let mut m = STUNMessage::new(); (m.unmarshal_binary(buffer), m) }; match result { Err(err) => { log::warn!("Failed to handle decode ICE from {}: {}", addr, err); None } Ok(_) => { let (attr, found) = message.attributes.get(ATTR_USERNAME); if !found { log::warn!("No username attribute in STUN message from {}", &addr); return None; } let s = match String::from_utf8(attr.value) { Err(err) => { log::warn!( "Failed to decode USERNAME from STUN message as UTF-8: {}", err ); return None; } Ok(s) => s, }; let conns = self.conns.lock().await; let conn = s .split(':') .next() .and_then(|ufrag| conns.get(ufrag)) .map(Clone::clone); conn } } } fn start_conn_worker(self: Arc<Self>, mut closed_watch_rx: watch::Receiver<()>) { tokio::spawn(async move { let mut buffer = [0u8; RECEIVE_MTU]; loop { let loop_self = Arc::clone(&self); let conn = &loop_self.params.conn; tokio::select! { res = conn.recv_from(&mut buffer) => { match res { Ok((len, addr)) => { let conn = { let address_map = loop_self .address_map .read(); address_map.get(&addr).map(Clone::clone) }; let conn = match conn { None if is_stun_message(&buffer) => { loop_self.conn_from_stun_message(&buffer, &addr).await } s @ Some(_) => s, _ => None, }; match conn { None => { log::trace!("Dropping packet from {}", &addr); } Some(conn) => { if let Err(err) = conn.write_packet(&buffer[..len], addr).await { log::error!("Failed to write packet: {}", err); } } } } Err(Error::Io(err)) if err.0.kind() == ErrorKind::TimedOut => continue, Err(err) => { log::error!("Could not read udp packet: {}", err); break; } } } _ = closed_watch_rx.changed() => { return; } } } }); } } #[async_trait] impl UDPMux for UDPMuxDefault { async fn close(&self) -> Result<(), Error> { if self.is_closed().await { return Err(Error::ErrAlreadyClosed); } let mut closed_tx = self.closed_watch_tx.lock().await; if let Some(tx) = closed_tx.take() { let _ = tx.send(()); drop(closed_tx); let old_conns = { let mut conns = self.conns.lock().await; std::mem::take(&mut (*conns)) }; for (_, conn) in old_conns.into_iter() { conn.close(); } { let mut address_map = self.address_map.write(); let _ = std::mem::take(&mut (*address_map)); } } Ok(()) } async fn get_conn(self: Arc<Self>, ufrag: &str) -> Result<Arc<dyn Conn + Send + Sync>, Error> { if self.is_closed().await { return Err(Error::ErrUseClosedNetworkConn); } { let mut conns = self.conns.lock().await; if let Some(conn) = conns.get(ufrag) { return Ok(Arc::new(conn.clone()) as Arc<dyn Conn + Send + Sync>); } let muxed_conn = self.create_muxed_conn(ufrag).await?; let mut close_rx = muxed_conn.close_rx(); let cloned_self = Arc::clone(&self); let cloned_ufrag = ufrag.to_string(); tokio::spawn(async move { let _ = close_rx.changed().await; cloned_self.remove_conn_by_ufrag(&cloned_ufrag).await; }); conns.insert(ufrag.into(), muxed_conn.clone()); Ok(Arc::new(muxed_conn) as Arc<dyn Conn + Send + Sync>) } } async fn remove_conn_by_ufrag(&self, ufrag: &str) { let removed_conn = { let mut conns = self.conns.lock().await; conns.remove(ufrag) }; if let Some(conn) = removed_conn { let mut address_map = self.address_map.write(); for address in conn.get_addresses() { address_map.remove(&address); } } } }
use std::{collections::HashMap, io::ErrorKind, net::SocketAddr, sync::Arc}; use util::{sync::RwLock, Conn, Error}; use async_trait::async_trait; use tokio::sync::{watch, Mutex}; mod udp_mux_conn; use udp_mux_conn::{UDPMuxConn, UDPMuxConnParams}; #[cfg(test)] mod udp_mux_test; mod socket_addr_ext; use stun::{ attributes::ATTR_USERNAME, message::{is_message as is_stun_message, Message as STUNMessage}, }; use crate::candidate::RECEIVE_MTU; fn normalize_socket_addr(target: &SocketAddr, socket_addr: &SocketAddr) -> SocketAddr { match (target, socket_addr) { (SocketAddr::V4(target_ipv4), SocketAddr::V6(_)) => { let ipv6_mapped = target_ipv4.ip().to_ipv6_mapped(); SocketAddr::new(std::net::IpAddr::V6(ipv6_mapped), target_ipv4.port()) } (_, _) => *target, } } #[async_trait] pub trait UDPMux { async fn close(&self) -> Result<(), Error>; async fn get_conn(self: Arc<Self>, ufrag: &str) -> Result<Arc<dyn Conn + Send + Sync>, Error>; async fn remove_conn_by_ufrag(&self, ufrag: &str); } pub struct UDPMuxParams { conn: Box<dyn Conn + Send + Sync>, } impl UDPMuxParams { pub fn new<C>(conn: C) -> Self where C: Conn + Send + Sync + 'static, { Self { conn: Box::new(conn), } } } pub struct UDPMuxDefault { params: UDPMuxParams, conns: Mutex<HashMap<String, UDPMuxConn>>, address_map: RwLock<HashMap<SocketAddr, UDPMuxConn>>, closed_watch_tx: Mutex<Option<watch::Sender<()>>>, closed_watch_rx: watch::Receiver<()>, } impl UDPMuxDefault { pub fn new(params: UDPMuxParams) -> Arc<Self> { let (closed_watch_tx, closed_watch_rx) = watch::channel(()); let mux = Arc::new(Self { params, conns: Mutex::default(), address_map: RwLock::default(), closed_watch_tx: Mutex::new(Some(closed_watch_tx)), closed_watch_rx: closed_watch_rx.clone(), }); let cloned_mux = Arc::clone(&mux); cloned_mux.start_conn_worker(closed_watch_rx); mux } pub async fn is_closed(&self) -> bool { self.closed_watch_tx.lock().await.is_none() } async fn send_to(&self, buf: &[u8], target: &SocketAddr) -> Result<usize, Error> { self.params .conn .send_to(buf, *target) .await .map_err(Into::into) } async fn create_muxed_conn(self: &Arc<Self>, ufrag: &str) -> Result<UDPMuxConn, Error> { let local_addr = self.params.conn.local_addr().await?; let params = UDPMuxConnParams { local_addr, key: ufrag.into(), udp_mux: Arc::clone(self), }; Ok(UDPMuxConn::new(params)) } async fn register_conn_for_address(&self, conn: &UDPMuxConn, addr: SocketAddr) { if self.is_closed().await { return; } let key = conn.key(); { let mut addresses = self.address_map.write(); addresses .entry(addr) .and_modify(|e| { if e.key() != key { e.remove_address(&addr); *e = conn.clone() } }) .or_insert_with(|| conn.clone()); } log::debug!("Registered {} for {}", addr, key); } async fn conn_from_stun_message(&self, buffer: &[u8], addr: &SocketAddr) -> Option<UDPMuxConn> { let (result, message) = { let mut m = STUNMessage::new(); (m.unmarshal_binary(buffer), m) }; match result { Err(err) => { log::warn!("Failed to handle decode ICE from {}: {}", addr, err); None } Ok(_) => { let (attr, found) = message.attributes.get(ATTR_USERNAME); if !found { log::warn!("No username attribute in STUN message from {}", &addr); return None; } let s = match String::from_utf8(attr.value) { Err(err) => { log::warn!( "Failed to decode USERNAME from STUN message as UTF-8: {}", err ); return None; } Ok(s) => s, }; let conns = self.conns.lock().await; let conn = s .split(':') .next() .and_then(|ufrag| conns.get(ufrag)) .map(Clone::clone); conn } } } fn start_conn_worker(self: Arc<Self>, mut closed_watch_rx: watch::Receiver<()>) { tokio::spawn(async move { let mut buffer = [0u8; RECEIVE_MTU]; loop { let loop_self = Arc::clone(&self); let conn = &loop_self.params.conn; tokio::select! { res = conn.recv_from(&mut buffer) => { match res { Ok((len, addr)) => { let conn = { let address_map = loop_self .address_map .read(); address_map.get(&addr).map(Clone::clone) }; let conn = match conn { None if is_stun_message(&buffer) => { loop_self.conn_from_stun_message(&buffer, &addr).await } s @ Some(_) => s, _ => None, }; match conn { None => { log::trace!("Dropping packet from {}", &addr); } Some(conn) => { if let Err(err) = conn.write_packet(&buffer[..len], addr).await { log::error!("Failed to write packet: {}", err); } } } } Err(Error::Io(err)) if err.0.kind() == ErrorKind::TimedOut => continue, Err(err) => { log::error!("Could not read udp packet: {}", err); break; } } } _ = closed_watch_rx.changed() => { return; } } } }); } } #[async_trait] impl UDPMux for UDPMuxDefault { async fn close(&self) -> Result<(), Error> { if self.is_closed().await { return Err(Error::ErrAlreadyClosed); } let mut closed_tx = self.closed_watch_tx.lock().await; if let Some(tx) = closed_tx.take() { let _ = tx.send(()); drop(closed_tx); let old_conns = { let mut conns = self.conns.lock().await; std::mem::take(&mut (*conns)) };
async fn get_conn(self: Arc<Self>, ufrag: &str) -> Result<Arc<dyn Conn + Send + Sync>, Error> { if self.is_closed().await { return Err(Error::ErrUseClosedNetworkConn); } { let mut conns = self.conns.lock().await; if let Some(conn) = conns.get(ufrag) { return Ok(Arc::new(conn.clone()) as Arc<dyn Conn + Send + Sync>); } let muxed_conn = self.create_muxed_conn(ufrag).await?; let mut close_rx = muxed_conn.close_rx(); let cloned_self = Arc::clone(&self); let cloned_ufrag = ufrag.to_string(); tokio::spawn(async move { let _ = close_rx.changed().await; cloned_self.remove_conn_by_ufrag(&cloned_ufrag).await; }); conns.insert(ufrag.into(), muxed_conn.clone()); Ok(Arc::new(muxed_conn) as Arc<dyn Conn + Send + Sync>) } } async fn remove_conn_by_ufrag(&self, ufrag: &str) { let removed_conn = { let mut conns = self.conns.lock().await; conns.remove(ufrag) }; if let Some(conn) = removed_conn { let mut address_map = self.address_map.write(); for address in conn.get_addresses() { address_map.remove(&address); } } } }
for (_, conn) in old_conns.into_iter() { conn.close(); } { let mut address_map = self.address_map.write(); let _ = std::mem::take(&mut (*address_map)); } } Ok(()) }
function_block-function_prefix_line
[ { "content": "pub fn assert_inbound_message_integrity(m: &mut Message, key: &[u8]) -> Result<()> {\n\n let message_integrity_attr = MessageIntegrity(key.to_vec());\n\n Ok(message_integrity_attr.check(m)?)\n\n}\n\n\n\n/// Initiates a stun requests to `server_addr` using conn, reads the response and returns...
Rust
src/header.rs
ssands1/elf2tab
9ba7a8dd3832d4edd1e323ee62a660313ccdf8e0
use std::fmt; use std::io; use std::io::{Read, Seek, SeekFrom, Write}; use std::mem; use std::vec; use util; #[repr(u16)] #[derive(Clone, Copy, Debug)] #[allow(dead_code)] enum TbfHeaderTypes { Main = 1, WriteableFlashRegions = 2, PackageName = 3, PicOption1 = 4, } #[repr(C)] #[derive(Clone, Copy, Debug)] struct TbfHeaderTlv { tipe: TbfHeaderTypes, length: u16, } #[repr(C)] #[derive(Clone, Copy, Debug)] struct TbfHeaderBase { version: u16, header_size: u16, total_size: u32, flags: u32, checksum: u32, } #[repr(C)] #[derive(Clone, Copy, Debug)] struct TbfHeaderMain { base: TbfHeaderTlv, init_fn_offset: u32, protected_size: u32, minimum_ram_size: u32, } #[repr(C)] #[derive(Clone, Copy, Debug)] struct TbfHeaderWriteableFlashRegion { base: TbfHeaderTlv, offset: u32, size: u32, } impl fmt::Display for TbfHeaderBase { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, " version: {:>8} {:>#10X} header_size: {:>8} {:>#10X} total_size: {:>8} {:>#10X} flags: {:>8} {:>#10X} ", self.version, self.version, self.header_size, self.header_size, self.total_size, self.total_size, self.flags, self.flags, ) } } impl fmt::Display for TbfHeaderMain { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, " init_fn_offset: {:>8} {:>#10X} protected_size: {:>8} {:>#10X} minimum_ram_size: {:>8} {:>#10X} ", self.init_fn_offset, self.init_fn_offset, self.protected_size, self.protected_size, self.minimum_ram_size, self.minimum_ram_size, ) } } impl fmt::Display for TbfHeaderWriteableFlashRegion { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, " flash region: offset: {:>8} {:>#10X} size: {:>8} {:>#10X} ", self.offset, self.offset, self.size, self.size, ) } } pub struct TbfHeader { hdr_base: TbfHeaderBase, hdr_main: TbfHeaderMain, hdr_pkg_name_tlv: Option<TbfHeaderTlv>, hdr_wfr: Vec<TbfHeaderWriteableFlashRegion>, package_name: String, package_name_pad: usize, } impl TbfHeader { pub fn new() -> Self { Self { hdr_base: TbfHeaderBase { version: 2, header_size: 0, total_size: 0, flags: 0, checksum: 0, }, hdr_main: TbfHeaderMain { base: TbfHeaderTlv { tipe: TbfHeaderTypes::Main, length: (mem::size_of::<TbfHeaderMain>() - mem::size_of::<TbfHeaderTlv>()) as u16, }, init_fn_offset: 0, protected_size: 0, minimum_ram_size: 0, }, hdr_pkg_name_tlv: None, hdr_wfr: Vec::new(), package_name: String::new(), package_name_pad: 0, } } pub fn create( &mut self, minimum_ram_size: u32, writeable_flash_regions: usize, package_name: String, ) -> usize { let mut header_length = mem::size_of::<TbfHeaderBase>() + mem::size_of::<TbfHeaderMain>(); self.package_name_pad = if !package_name.is_empty() { header_length += mem::size_of::<TbfHeaderTlv>() + package_name.len(); let pad = align4needed!(header_length); header_length += pad; pad } else { 0 }; header_length += mem::size_of::<TbfHeaderWriteableFlashRegion>() * writeable_flash_regions; let flags = 0x0000_0001; self.hdr_base.header_size = header_length as u16; self.hdr_base.flags = flags; self.hdr_main.minimum_ram_size = minimum_ram_size; self.package_name = package_name; if !self.package_name.is_empty() { self.hdr_pkg_name_tlv = Some(TbfHeaderTlv { tipe: TbfHeaderTypes::PackageName, length: self.package_name.len() as u16, }); } for _ in 0..writeable_flash_regions { self.hdr_wfr.push(TbfHeaderWriteableFlashRegion { base: TbfHeaderTlv { tipe: TbfHeaderTypes::WriteableFlashRegions, length: 8, }, offset: 0, size: 0, }); } self.generate().expect("No header was generated").get_ref().len() } pub fn set_protected_size(&mut self, protected_size: u32) { self.hdr_main.protected_size = protected_size; } pub fn set_total_size(&mut self, total_size: u32) { self.hdr_base.total_size = total_size; } pub fn set_init_fn_offset(&mut self, init_fn_offset: u32) { self.hdr_main.init_fn_offset = init_fn_offset; } pub fn set_writeable_flash_region_values(&mut self, offset: u32, size: u32) { for wfr in &mut self.hdr_wfr { if wfr.size == 0 { wfr.offset = offset; wfr.size = size; break; } } } pub fn generate(&self) -> io::Result<(io::Cursor<vec::Vec<u8>>)> { let mut header_buf = io::Cursor::new(Vec::new()); header_buf.write_all(unsafe { util::as_byte_slice(&self.hdr_base) })?; header_buf.write_all(unsafe { util::as_byte_slice(&self.hdr_main) })?; if !self.package_name.is_empty() { header_buf.write_all(unsafe { util::as_byte_slice(&self.hdr_pkg_name_tlv) })?; header_buf.write_all(self.package_name.as_ref())?; util::do_pad(&mut header_buf, self.package_name_pad)?; } for wfr in &self.hdr_wfr { header_buf.write_all(unsafe { util::as_byte_slice(wfr) })?; } let current_length = header_buf.get_ref().len(); util::do_pad(&mut header_buf, align4needed!(current_length))?; self.inject_checksum(header_buf) } fn inject_checksum( &self, mut header_buf: io::Cursor<vec::Vec<u8>>, ) -> io::Result<(io::Cursor<vec::Vec<u8>>)> { header_buf.seek(SeekFrom::Start(0))?; let mut wordbuf = [0_u8; 4]; let mut checksum: u32 = 0; loop { let count = header_buf.read(&mut wordbuf)?; let mut word = 0; for (i, c) in wordbuf.iter().enumerate().take(count) { word |= u32::from(*c) << (8 * i); } checksum ^= word; if count != 4 { break; } } header_buf.seek(io::SeekFrom::Start(12))?; wordbuf[0] = (checksum & 0xFF) as u8; wordbuf[1] = ((checksum >> 8) & 0xFF) as u8; wordbuf[2] = ((checksum >> 16) & 0xFF) as u8; wordbuf[3] = ((checksum >> 24) & 0xFF) as u8; header_buf.write(&wordbuf)?; header_buf.seek(io::SeekFrom::Start(0))?; Ok(header_buf) } } impl fmt::Display for TbfHeader { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "TBF Header:")?; write!(f, "{}", self.hdr_base)?; write!(f, "{}", self.hdr_main)?; for wfr in &self.hdr_wfr { write!(f, "{}", wfr)?; } Ok(()) } }
use std::fmt; use std::io; use std::io::{Read, Seek, SeekFrom, Write}; use std::mem; use std::vec; use util; #[repr(u16)] #[derive(Clone, Copy, Debug)] #[allow(dead_code)] enum TbfHeaderTypes { Main = 1, WriteableFlashRegions = 2, PackageName = 3, PicOption1 = 4, } #[repr(C)] #[derive(Clone, Copy, Debug)] struct TbfHeaderTlv { tipe: TbfHeaderTypes, length: u16, } #[repr(C)] #[derive(Clone, Copy, Debug)] struct TbfHeaderBase { version: u16, header_size: u16, total_size: u32, flags: u32, checksum: u32, } #[repr(C)] #[derive(Clone, Copy, Debug)] struct TbfHeaderMain { base: TbfHeaderTlv, init_fn_offset: u32, protected_size: u32, minimum_ram_size: u32, } #[repr(C)] #[derive(Clone, Copy, Debug)] struct TbfHeaderWriteableFlashRegion { base: TbfHeaderTlv, offset: u32, size: u32, } impl fmt::Display for TbfHeaderBase { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, " version: {:>8} {:>#10X} header_size: {:>8} {:>#10X} total_size: {:>8} {:>#10X} flags: {:>8} {:>#10X} ", self.version, self.version, self.header_size, self.header_size, self.total_size, self.total_size, self.flags, self.flags, ) } } impl fmt::Display for TbfHeaderMain { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, " init_fn_offset: {:>8} {:>#10X} protected_size: {:>8} {:>#10X} minimum_ram_size: {:>8} {:>#10X} ", self.init_fn_offset, self.init_fn_offset, self.protected_size, self.protected_size, self.minimum_ram_size, self.minimum_ram_size, ) } } impl fmt::Display for TbfHeaderWriteableFlashRegion { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, " flash region: offset: {:>8} {:>#10X} size: {:>8} {:>#10X} ", self.offset, self.offset, self.size, self.size, ) } } pub struct TbfHeader { hdr_base: TbfHeaderBase, hdr_main: TbfHeaderMain, hdr_pkg_name_tlv: Option<TbfHeaderTlv>, hdr_wfr: Vec<TbfHeaderWriteableFlashRegion>, package_name: String, package_name_pad: usize, } impl TbfHeader { pub fn new() -> Self { Self { hdr_base: TbfHeaderBase { version: 2, header_size: 0, total_size: 0, flags: 0, checksum: 0, }, hdr_main: TbfHeaderMain { base: TbfHeaderTlv { tipe: TbfHeaderTypes::Main, length: (mem::size_of::<TbfHeaderMain>() - mem::size_of::<TbfHeaderTlv>()) as u16, }, init_fn_offset: 0, protected_size: 0, minimum_ram_size: 0, }, hdr_pkg_name_tlv: None, hdr_wfr: Vec::new(), package_name: String::new(), package_name_pad: 0, } } pub fn create( &mut self, minimum_ram_size: u32, writeable_flash_regions: usize, package_name: String, ) -> usize { let mut header_length = mem::size_of::<TbfHeaderBase>() + mem::size_of::<TbfHeaderMain>(); self.package_name_pad = if !package_name.is_empty() { header_length += mem::size_of::<TbfHeaderTlv>() + package_name.len(); let pad = align4needed!(header_length); header_length += pad; pad } else { 0 };
pub fn set_protected_size(&mut self, protected_size: u32) { self.hdr_main.protected_size = protected_size; } pub fn set_total_size(&mut self, total_size: u32) { self.hdr_base.total_size = total_size; } pub fn set_init_fn_offset(&mut self, init_fn_offset: u32) { self.hdr_main.init_fn_offset = init_fn_offset; } pub fn set_writeable_flash_region_values(&mut self, offset: u32, size: u32) { for wfr in &mut self.hdr_wfr { if wfr.size == 0 { wfr.offset = offset; wfr.size = size; break; } } } pub fn generate(&self) -> io::Result<(io::Cursor<vec::Vec<u8>>)> { let mut header_buf = io::Cursor::new(Vec::new()); header_buf.write_all(unsafe { util::as_byte_slice(&self.hdr_base) })?; header_buf.write_all(unsafe { util::as_byte_slice(&self.hdr_main) })?; if !self.package_name.is_empty() { header_buf.write_all(unsafe { util::as_byte_slice(&self.hdr_pkg_name_tlv) })?; header_buf.write_all(self.package_name.as_ref())?; util::do_pad(&mut header_buf, self.package_name_pad)?; } for wfr in &self.hdr_wfr { header_buf.write_all(unsafe { util::as_byte_slice(wfr) })?; } let current_length = header_buf.get_ref().len(); util::do_pad(&mut header_buf, align4needed!(current_length))?; self.inject_checksum(header_buf) } fn inject_checksum( &self, mut header_buf: io::Cursor<vec::Vec<u8>>, ) -> io::Result<(io::Cursor<vec::Vec<u8>>)> { header_buf.seek(SeekFrom::Start(0))?; let mut wordbuf = [0_u8; 4]; let mut checksum: u32 = 0; loop { let count = header_buf.read(&mut wordbuf)?; let mut word = 0; for (i, c) in wordbuf.iter().enumerate().take(count) { word |= u32::from(*c) << (8 * i); } checksum ^= word; if count != 4 { break; } } header_buf.seek(io::SeekFrom::Start(12))?; wordbuf[0] = (checksum & 0xFF) as u8; wordbuf[1] = ((checksum >> 8) & 0xFF) as u8; wordbuf[2] = ((checksum >> 16) & 0xFF) as u8; wordbuf[3] = ((checksum >> 24) & 0xFF) as u8; header_buf.write(&wordbuf)?; header_buf.seek(io::SeekFrom::Start(0))?; Ok(header_buf) } } impl fmt::Display for TbfHeader { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "TBF Header:")?; write!(f, "{}", self.hdr_base)?; write!(f, "{}", self.hdr_main)?; for wfr in &self.hdr_wfr { write!(f, "{}", wfr)?; } Ok(()) } }
header_length += mem::size_of::<TbfHeaderWriteableFlashRegion>() * writeable_flash_regions; let flags = 0x0000_0001; self.hdr_base.header_size = header_length as u16; self.hdr_base.flags = flags; self.hdr_main.minimum_ram_size = minimum_ram_size; self.package_name = package_name; if !self.package_name.is_empty() { self.hdr_pkg_name_tlv = Some(TbfHeaderTlv { tipe: TbfHeaderTypes::PackageName, length: self.package_name.len() as u16, }); } for _ in 0..writeable_flash_regions { self.hdr_wfr.push(TbfHeaderWriteableFlashRegion { base: TbfHeaderTlv { tipe: TbfHeaderTypes::WriteableFlashRegions, length: 8, }, offset: 0, size: 0, }); } self.generate().expect("No header was generated").get_ref().len() }
function_block-function_prefix_line
[ { "content": "pub fn do_pad(output: &mut io::Write, length: usize) -> io::Result<()> {\n\n let mut pad = length;\n\n let zero_buf = [0_u8; 512];\n\n while pad > 0 {\n\n let amount_to_write = cmp::min(zero_buf.len(), pad);\n\n pad -= output.write(&zero_buf[..amount_to_write])?;\n\n }\n\...
Rust
src/solution/string/str_str.rs
smallswan/leetcode-rust
9b8bb3f91bec613de61f1cfdd203dd9eeda23ebe
pub fn str_str(haystack: String, needle: String) -> i32 { let source = haystack.as_bytes(); let target = needle.as_bytes(); let source_offset = 0usize; let source_count = source.len(); let target_offset = 0usize; let target_count = target.len(); let from_index = 0usize; if target_count == 0usize { return 0; } if target_count > source_count { return -1; } let first = target[target_offset]; let max = source_offset + (source_count - target_count); let mut i = source_offset + from_index; while i <= max { while source[i] != first { i += 1; if i <= max { continue; } else { break; } } if i <= max { let mut j = i + 1; let end = j + target_count - 1; let mut k = target_offset + 1; while j < end && source[j] == target[k] { j += 1; k += 1; } if j == end { return (i - source_offset) as i32; } } i += 1; } -1 } pub fn str_str_v2(haystack: String, needle: String) -> i32 { match haystack.find(&needle) { Some(index) => index as i32, None => -1, } } pub fn str_str_v3(haystack: String, needle: String) -> i32 { let (m, n) = (needle.len(), haystack.len()); if m == 0 { return 0; } let haystack_chars = haystack.chars().collect::<Vec<char>>(); let needle_chars = needle.chars().collect::<Vec<char>>(); let mut pi = vec![0; m]; let (mut i, mut j) = (1, 0); while i < m { while j > 0 && (needle_chars[i] != needle_chars[j]) { j = pi[j - 1]; } if needle_chars[i] == needle_chars[j] { j += 1; } pi[i] = j; i += 1; } let (mut i, mut j) = (0, 0); while i < n { while j > 0 && (haystack_chars[i] != needle_chars[j]) { j = pi[j - 1]; } if haystack_chars[i] == needle_chars[j] { j += 1; } if (j == m) { return (i - m + 1) as i32; } i += 1; } -1 } pub fn find_substring(s: String, words: Vec<String>) -> Vec<i32> { use std::collections::HashMap; let mut bytes = s.chars().collect::<Vec<char>>(); let mut result: Vec<i32> = Vec::new(); if s.is_empty() || words.is_empty() { return result; } let mut map: HashMap<String, i32> = HashMap::new(); let one_word = words[0].len(); let words_len = words.len(); for word in words { let counter = map.entry(word).or_insert(0); *counter += 1; } for i in 0..one_word { let (mut left, mut right, mut count) = (i, i, 0); let mut tmp_map: HashMap<String, i32> = HashMap::new(); while right + one_word <= s.len() { let w: String = bytes.iter().skip(right).take(one_word).collect(); right += one_word; if !map.contains_key(&w.clone()) { count = 0; left = right; tmp_map.clear(); } else { let w_str = w.clone(); let mut counter = tmp_map.entry(w_str).or_insert(0); *counter += 1; count += 1; while tmp_map.get(&w.clone()).unwrap_or(&0) > map.get(&w.clone()).unwrap_or(&0) { let t_w: String = bytes.iter().skip(left).take(one_word).collect(); count -= 1; let t_w_str = t_w.clone(); let mut counter = tmp_map.entry(t_w_str).or_insert(0); *counter -= 1; left += one_word; } if count == words_len { result.push(left as i32); } } } } result } pub fn knuth_morris_pratt(st: String, pat: String) -> Vec<usize> { if st.is_empty() || pat.is_empty() { return vec![]; } let string = st.into_bytes(); let pattern = pat.into_bytes(); let mut partial = vec![0]; for i in 1..pattern.len() { let mut j = partial[i - 1]; while j > 0 && pattern[j] != pattern[i] { j = partial[j - 1]; } partial.push(if pattern[j] == pattern[i] { j + 1 } else { j }); } let mut ret = vec![]; let mut j = 0; for (i, &c) in string.iter().enumerate() { while j > 0 && c != pattern[j] { j = partial[j - 1]; } if c == pattern[j] { j += 1; } if j == pattern.len() { ret.push(i + 1 - j); j = partial[j - 1]; } } ret } pub fn rabin_karp(target: String, pattern: String) -> Vec<usize> { if target.is_empty() || pattern.is_empty() || pattern.len() > target.len() { return vec![]; } let string: String = (&pattern[0..pattern.len()]).to_string(); let hash_pattern = hash(string.clone()); let mut ret = vec![]; for i in 0..(target.len() - pattern.len() + 1) { let s = (&target[i..(i + pattern.len())]).to_string(); let string_hash = hash(s.clone()); if string_hash == hash_pattern && s == string { ret.push(i); } } ret } fn hash(mut s: String) -> u16 { let prime: u16 = 101; let last_char = s .drain(s.len() - 1..) .next() .expect("Failed to get the last char of the string"); let mut res: u16 = 0; for (i, &c) in s.as_bytes().iter().enumerate() { if i == 0 { res = (c as u16 * 256) % prime; } else { res = (((res + c as u16) % 101) * 256) % 101; } } (res + last_char as u16) % prime } use std::cmp::Ordering; pub fn is_subsequence(s: String, t: String) -> bool { let mut s_chars: Vec<char> = s.chars().collect::<Vec<char>>(); let mut t_chars: Vec<char> = t.chars().collect::<Vec<char>>(); let (s_len, t_len) = (s_chars.len(), t_chars.len()); match s_len.cmp(&t_len) { Ordering::Greater => return false, Ordering::Equal => return s == t, Ordering::Less => (), } let (mut i, mut j) = (0, 0); while i < s_len && j < t_len { if s_chars[i] == t_chars[j] { i += 1; } j += 1; } i == s_len } use std::mem; pub fn num_distinct(s: String, t: String) -> i32 { let mut cache = vec![0; t.len() + 1]; let mut temp = vec![0; t.len() + 1]; cache[0] = 1; temp[0] = 1; for c_1 in s.into_bytes() { for (i, c_2) in t.bytes().enumerate() { temp[i + 1] = if c_2 == c_1 { cache[i] + cache[i + 1] } else { cache[i + 1] }; } mem::swap(&mut cache, &mut temp); } cache[t.len()] } pub fn repeated_substring_pattern(s: String) -> bool { let mut t = String::with_capacity(s.len() * 2 - 2); t.push_str(&s[1..]); t.push_str(&s[..s.len() - 1]); t.contains(&s) } #[cfg(test)] mod tests { use super::*; #[test] fn sub_string() { let haystack = String::from("aaacaaab"); let needle = String::from("aaab"); dbg!(str_str(haystack, needle)); let index = knuth_morris_pratt("Rust is a programming language empowering everyone to build reliable and efficient software".to_string(),"everyone".to_string()); println!("{:?}", index); dbg!(is_subsequence("acb".to_string(), "ahbgdc".to_string())); } mod kmp { use super::*; #[test] fn each_letter_matches() { let index = knuth_morris_pratt("aaa".to_string(), "a".to_string()); assert_eq!(index, vec![0, 1, 2]); } #[test] fn a_few_separate_matches() { let index = knuth_morris_pratt("abababa".to_string(), "ab".to_string()); assert_eq!(index, vec![0, 2, 4]); } #[test] fn one_match() { let index = knuth_morris_pratt("ABC ABCDAB ABCDABCDABDE".to_string(), "ABCDABD".to_string()); assert_eq!(index, vec![15]); } #[test] fn lots_of_matches() { let index = knuth_morris_pratt("aaabaabaaaaa".to_string(), "aa".to_string()); assert_eq!(index, vec![0, 1, 4, 7, 8, 9, 10]); } #[test] fn lots_of_intricate_matches() { let index = knuth_morris_pratt("ababababa".to_string(), "aba".to_string()); assert_eq!(index, vec![0, 2, 4, 6]); } #[test] fn not_found0() { let index = knuth_morris_pratt("abcde".to_string(), "f".to_string()); assert_eq!(index, vec![]); } #[test] fn not_found1() { let index = knuth_morris_pratt("abcde".to_string(), "ac".to_string()); assert_eq!(index, vec![]); } #[test] fn not_found2() { let index = knuth_morris_pratt("ababab".to_string(), "bababa".to_string()); assert_eq!(index, vec![]); } #[test] fn empty_string() { let index = knuth_morris_pratt("".to_string(), "abcdef".to_string()); assert_eq!(index, vec![]); } } mod rabin_karp { use super::*; #[test] fn hi_hash() { let hash_result = hash("hi".to_string()); assert_eq!(hash_result, 65); } #[test] fn abr_hash() { let hash_result = hash("abr".to_string()); assert_eq!(hash_result, 4); } #[test] fn bra_hash() { let hash_result = hash("bra".to_string()); assert_eq!(hash_result, 30); } #[test] fn each_letter_matches() { let index = rabin_karp("aaa".to_string(), "a".to_string()); assert_eq!(index, vec![0, 1, 2]); } #[test] fn a_few_separate_matches() { let index = rabin_karp("abababa".to_string(), "ab".to_string()); assert_eq!(index, vec![0, 2, 4]); } #[test] fn one_match() { let index = rabin_karp("ABC ABCDAB ABCDABCDABDE".to_string(), "ABCDABD".to_string()); assert_eq!(index, vec![15]); } #[test] fn lots_of_matches() { let index = rabin_karp("aaabaabaaaaa".to_string(), "aa".to_string()); assert_eq!(index, vec![0, 1, 4, 7, 8, 9, 10]); } #[test] fn lots_of_intricate_matches() { let index = rabin_karp("ababababa".to_string(), "aba".to_string()); assert_eq!(index, vec![0, 2, 4, 6]); } #[test] fn not_found0() { let index = rabin_karp("abcde".to_string(), "f".to_string()); assert_eq!(index, vec![]); } #[test] fn not_found1() { let index = rabin_karp("abcde".to_string(), "ac".to_string()); assert_eq!(index, vec![]); } #[test] fn not_found2() { let index = rabin_karp("ababab".to_string(), "bababa".to_string()); assert_eq!(index, vec![]); } #[test] fn empty_string() { let index = rabin_karp("".to_string(), "abcdef".to_string()); assert_eq!(index, vec![]); } } }
pub fn str_str(haystack: String, needle: String) -> i32 { let source = haystack.as_bytes(); let target = needle.as_bytes(); let source_offset = 0usize; let source_count = source.len(); let target_offset = 0usize; let target_count = target.len(); let from_index = 0usize; if target_count == 0usize { return 0; } if target_count > source_count { return -1; } let first = target[target_offset]; let max = source_offset + (source_count - target_count); let mut i = source_offset + from_index; while i <= max { while source[i] != first { i += 1; if i <= max { continue; } else { break; } } if i <= max { let mut j = i + 1; let end = j + target_count - 1; let mut k = target_offset + 1; while j < end && source[j] == target[k] { j += 1; k += 1; } if j == end { return (i - source_offset) as i32; } } i += 1; } -1 } pub fn str_str_v2(haystack: String, needle: String) -> i32 { match haystack.find(&needle) { Some(index) => index as i32, None => -1, } } pub fn str_str_v3(haystack: String, needle: String) -> i32 { let (m, n) = (needle.len(), haystack.len()); if m == 0 { return 0; } let haystack_chars = haystack.chars().collect::<Vec<char>>(); let needle_chars = needle.chars().collect::<Vec<char>>(); let mut pi = vec![0; m]; let (mut i, mut j) = (1, 0); while i < m { while j > 0 && (needle_chars[i] != needle_chars[j]) { j = pi[j - 1]; } if needle_chars[i] == needle_chars[j] { j += 1; } pi[i] = j; i += 1; } let (mut i, mut j) = (0, 0); while i < n { while j > 0 && (haystack_chars[i] != needle_chars[j]) { j = pi[j - 1]; } if haystack_chars[i] == needle_chars[j] { j += 1; } if (j == m) { return (i - m + 1) as i32; } i += 1; } -1 } pub fn find_substring(s: String, words: Vec<String>) -> Vec<i32> { use std::collections::HashMap; let mut bytes = s.chars().collect::<Vec<char>>(); let mut result: Vec<i32> = Vec::new(); if s.is_empty() || words.is_empty() { return result; } let mut map: HashMap<String, i32> = HashMap::new(); let one_word = words[0].len(); let words_len = words.len(); for word in words { let counter = map.entry(word).or_insert(0); *counter += 1; } for i in 0..one_word { let (mut left, mut right, mut count) = (i, i, 0); let mut tmp_map: HashMap<String, i32> = HashMap::new(); while right + one_word <= s.len() { let w: String = bytes.iter().skip(right).take(one_word).collect(); right += one_word; if !map.contains_key(&w.clone()) { count = 0; left = right; tmp_map.clear(); } else { let w_str = w.clone(); let mut counter = tmp_map.entry(w_str).or_insert(0); *counter += 1; count += 1; while tmp_map.get(&w.clone()).unwrap_or(&0) > map.get(&w.clone()).unwrap_or(&0) { let t_w: String = bytes.iter().skip(left).take(one_word).collect(); count -= 1; let t_w_str = t_w.clone(); let mut counter = tmp_map.entry(t_w_str).or_insert(0); *counter -= 1; left += one_word; } if count == words_len { result.push(left as i32); } } } } result } pub fn knuth_morris_pratt(st: String, pat: String) -> Vec<usize> { if st.is_empty() || pat.
pub fn rabin_karp(target: String, pattern: String) -> Vec<usize> { if target.is_empty() || pattern.is_empty() || pattern.len() > target.len() { return vec![]; } let string: String = (&pattern[0..pattern.len()]).to_string(); let hash_pattern = hash(string.clone()); let mut ret = vec![]; for i in 0..(target.len() - pattern.len() + 1) { let s = (&target[i..(i + pattern.len())]).to_string(); let string_hash = hash(s.clone()); if string_hash == hash_pattern && s == string { ret.push(i); } } ret } fn hash(mut s: String) -> u16 { let prime: u16 = 101; let last_char = s .drain(s.len() - 1..) .next() .expect("Failed to get the last char of the string"); let mut res: u16 = 0; for (i, &c) in s.as_bytes().iter().enumerate() { if i == 0 { res = (c as u16 * 256) % prime; } else { res = (((res + c as u16) % 101) * 256) % 101; } } (res + last_char as u16) % prime } use std::cmp::Ordering; pub fn is_subsequence(s: String, t: String) -> bool { let mut s_chars: Vec<char> = s.chars().collect::<Vec<char>>(); let mut t_chars: Vec<char> = t.chars().collect::<Vec<char>>(); let (s_len, t_len) = (s_chars.len(), t_chars.len()); match s_len.cmp(&t_len) { Ordering::Greater => return false, Ordering::Equal => return s == t, Ordering::Less => (), } let (mut i, mut j) = (0, 0); while i < s_len && j < t_len { if s_chars[i] == t_chars[j] { i += 1; } j += 1; } i == s_len } use std::mem; pub fn num_distinct(s: String, t: String) -> i32 { let mut cache = vec![0; t.len() + 1]; let mut temp = vec![0; t.len() + 1]; cache[0] = 1; temp[0] = 1; for c_1 in s.into_bytes() { for (i, c_2) in t.bytes().enumerate() { temp[i + 1] = if c_2 == c_1 { cache[i] + cache[i + 1] } else { cache[i + 1] }; } mem::swap(&mut cache, &mut temp); } cache[t.len()] } pub fn repeated_substring_pattern(s: String) -> bool { let mut t = String::with_capacity(s.len() * 2 - 2); t.push_str(&s[1..]); t.push_str(&s[..s.len() - 1]); t.contains(&s) } #[cfg(test)] mod tests { use super::*; #[test] fn sub_string() { let haystack = String::from("aaacaaab"); let needle = String::from("aaab"); dbg!(str_str(haystack, needle)); let index = knuth_morris_pratt("Rust is a programming language empowering everyone to build reliable and efficient software".to_string(),"everyone".to_string()); println!("{:?}", index); dbg!(is_subsequence("acb".to_string(), "ahbgdc".to_string())); } mod kmp { use super::*; #[test] fn each_letter_matches() { let index = knuth_morris_pratt("aaa".to_string(), "a".to_string()); assert_eq!(index, vec![0, 1, 2]); } #[test] fn a_few_separate_matches() { let index = knuth_morris_pratt("abababa".to_string(), "ab".to_string()); assert_eq!(index, vec![0, 2, 4]); } #[test] fn one_match() { let index = knuth_morris_pratt("ABC ABCDAB ABCDABCDABDE".to_string(), "ABCDABD".to_string()); assert_eq!(index, vec![15]); } #[test] fn lots_of_matches() { let index = knuth_morris_pratt("aaabaabaaaaa".to_string(), "aa".to_string()); assert_eq!(index, vec![0, 1, 4, 7, 8, 9, 10]); } #[test] fn lots_of_intricate_matches() { let index = knuth_morris_pratt("ababababa".to_string(), "aba".to_string()); assert_eq!(index, vec![0, 2, 4, 6]); } #[test] fn not_found0() { let index = knuth_morris_pratt("abcde".to_string(), "f".to_string()); assert_eq!(index, vec![]); } #[test] fn not_found1() { let index = knuth_morris_pratt("abcde".to_string(), "ac".to_string()); assert_eq!(index, vec![]); } #[test] fn not_found2() { let index = knuth_morris_pratt("ababab".to_string(), "bababa".to_string()); assert_eq!(index, vec![]); } #[test] fn empty_string() { let index = knuth_morris_pratt("".to_string(), "abcdef".to_string()); assert_eq!(index, vec![]); } } mod rabin_karp { use super::*; #[test] fn hi_hash() { let hash_result = hash("hi".to_string()); assert_eq!(hash_result, 65); } #[test] fn abr_hash() { let hash_result = hash("abr".to_string()); assert_eq!(hash_result, 4); } #[test] fn bra_hash() { let hash_result = hash("bra".to_string()); assert_eq!(hash_result, 30); } #[test] fn each_letter_matches() { let index = rabin_karp("aaa".to_string(), "a".to_string()); assert_eq!(index, vec![0, 1, 2]); } #[test] fn a_few_separate_matches() { let index = rabin_karp("abababa".to_string(), "ab".to_string()); assert_eq!(index, vec![0, 2, 4]); } #[test] fn one_match() { let index = rabin_karp("ABC ABCDAB ABCDABCDABDE".to_string(), "ABCDABD".to_string()); assert_eq!(index, vec![15]); } #[test] fn lots_of_matches() { let index = rabin_karp("aaabaabaaaaa".to_string(), "aa".to_string()); assert_eq!(index, vec![0, 1, 4, 7, 8, 9, 10]); } #[test] fn lots_of_intricate_matches() { let index = rabin_karp("ababababa".to_string(), "aba".to_string()); assert_eq!(index, vec![0, 2, 4, 6]); } #[test] fn not_found0() { let index = rabin_karp("abcde".to_string(), "f".to_string()); assert_eq!(index, vec![]); } #[test] fn not_found1() { let index = rabin_karp("abcde".to_string(), "ac".to_string()); assert_eq!(index, vec![]); } #[test] fn not_found2() { let index = rabin_karp("ababab".to_string(), "bababa".to_string()); assert_eq!(index, vec![]); } #[test] fn empty_string() { let index = rabin_karp("".to_string(), "abcdef".to_string()); assert_eq!(index, vec![]); } } }
is_empty() { return vec![]; } let string = st.into_bytes(); let pattern = pat.into_bytes(); let mut partial = vec![0]; for i in 1..pattern.len() { let mut j = partial[i - 1]; while j > 0 && pattern[j] != pattern[i] { j = partial[j - 1]; } partial.push(if pattern[j] == pattern[i] { j + 1 } else { j }); } let mut ret = vec![]; let mut j = 0; for (i, &c) in string.iter().enumerate() { while j > 0 && c != pattern[j] { j = partial[j - 1]; } if c == pattern[j] { j += 1; } if j == pattern.len() { ret.push(i + 1 - j); j = partial[j - 1]; } } ret }
function_block-function_prefixed
[ { "content": "/// 剑指 Offer 58 - II. 左旋转字符串 https://leetcode-cn.com/problems/zuo-xuan-zhuan-zi-fu-chuan-lcof/\n\npub fn reverse_left_words(s: String, n: i32) -> String {\n\n let mut chars: Vec<char> = s.chars().collect();\n\n chars.rotate_left(n as usize);\n\n chars.iter().collect()\n\n}\n\n\n\nuse std:...
Rust
src/enclave_proc/socket.rs
bercarug/aws-nitro-enclaves-cli-1
ab4e03bc37fc7fcf6f98c42d416612976299989a
#![deny(missing_docs)] #![deny(warnings)] use inotify::{EventMask, Inotify, WatchMask}; use log::{debug, warn}; use std::path::{Path, PathBuf}; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; use std::thread::{self, JoinHandle}; use crate::common::get_socket_path; use crate::common::{ExitGracefully, NitroCliErrorEnum, NitroCliFailure, NitroCliResult}; use crate::new_nitro_cli_failure; #[derive(Default)] pub struct EnclaveProcSock { socket_path: PathBuf, remove_listener_thread: Option<JoinHandle<()>>, requested_remove: Arc<AtomicBool>, } impl Clone for EnclaveProcSock { fn clone(&self) -> Self { EnclaveProcSock { socket_path: self.socket_path.clone(), remove_listener_thread: None, requested_remove: self.requested_remove.clone(), } } } impl Drop for EnclaveProcSock { fn drop(&mut self) { self.close_mut() .ok_or_exit_with_errno(Some("Failed to drop socket")); } } impl EnclaveProcSock { pub fn new(enclave_id: &str) -> NitroCliResult<Self> { let socket_path = get_socket_path(enclave_id).map_err(|_| { new_nitro_cli_failure!( "Failed to create enclave process socket", NitroCliErrorEnum::SocketPathNotFound ) })?; Ok(EnclaveProcSock { socket_path, remove_listener_thread: None, requested_remove: Arc::new(AtomicBool::new(false)), }) } pub fn get_path(&self) -> &Path { &self.socket_path.as_path() } pub fn set_path(&mut self, socket_path: PathBuf) { self.socket_path = socket_path; } pub fn start_monitoring(&mut self, exit_on_delete: bool) -> NitroCliResult<()> { let path_clone = self.socket_path.clone(); let requested_remove_clone = self.requested_remove.clone(); let mut socket_inotify = Inotify::init().map_err(|e| { new_nitro_cli_failure!( &format!("Failed to initialize socket notifications: {:?}", e), NitroCliErrorEnum::InotifyError ) })?; socket_inotify .add_watch( self.socket_path.as_path(), WatchMask::ATTRIB | WatchMask::DELETE_SELF, ) .map_err(|e| { new_nitro_cli_failure!( &format!("Failed to add watch to inotify: {:?}", e), NitroCliErrorEnum::InotifyError ) })?; self.remove_listener_thread = Some(thread::spawn(move || { socket_removal_listener( path_clone, requested_remove_clone, socket_inotify, exit_on_delete, ) })); Ok(()) } fn close_mut(&mut self) -> NitroCliResult<()> { self.requested_remove.store(true, Ordering::SeqCst); if self.socket_path.exists() { std::fs::remove_file(&self.socket_path).map_err(|e| { new_nitro_cli_failure!( &format!( "Failed to remove socket file {:?} from disk: {:?}", self.socket_path, e ), NitroCliErrorEnum::FileOperationFailure ) })?; } if self.remove_listener_thread.is_some() { self.remove_listener_thread .take() .unwrap() .join() .map_err(|e| { new_nitro_cli_failure!( &format!("Failed to join socket notification thread: {:?}", e), NitroCliErrorEnum::ThreadJoinFailure ) })?; } Ok(()) } pub fn close(mut self) -> NitroCliResult<()> { self.close_mut() .map_err(|e| e.add_subaction("Close socket".to_string())) } } fn socket_removal_listener( socket_path: PathBuf, requested_remove: Arc<AtomicBool>, mut socket_inotify: Inotify, exit_on_delete: bool, ) { let mut buffer = [0u8; 4096]; let mut done = false; debug!("Socket file event listener started for {:?}.", socket_path); while !done { let events = socket_inotify .read_events_blocking(&mut buffer) .map_err(|e| { new_nitro_cli_failure!( &format!("Socket removal listener error: {:?}", e), NitroCliErrorEnum::InotifyError ) .set_action("Run Enclave".to_string()) }) .ok_or_exit_with_errno(Some("Failed to read inotify events")); for event in events { if (event.mask.contains(EventMask::ATTRIB) || event.mask.contains(EventMask::DELETE_SELF)) && !socket_path.exists() { if requested_remove.load(Ordering::SeqCst) { debug!("The enclave process socket has deleted itself."); done = true; } else { warn!("The enclave process socket has been deleted!"); if exit_on_delete { std::process::exit(1); } done = true; } } } } debug!("Enclave process socket monitoring is done."); } #[cfg(test)] mod tests { use super::*; use std::os::unix::net::UnixListener; use std::process::Command; const DUMMY_ENCLAVE_ID: &str = "i-0000000000000000-enc0123456789012345"; const THREADS_STR: &str = "Threads:"; const WAIT_REMOVE_MILLIS: u64 = 10; fn get_num_threads_from_status_output(status_str: String) -> u32 { let start_idx = status_str.find(THREADS_STR); let mut iter = status_str.chars(); iter.by_ref().nth(start_idx.unwrap() + THREADS_STR.len()); let slice = iter.as_str(); let new_str = slice.to_string(); let end_idx = new_str.find('\n'); let substr = &slice[..end_idx.unwrap()]; substr.parse().unwrap() } #[test] fn test_enclaveprocsock_init() { let socket = EnclaveProcSock::new(&DUMMY_ENCLAVE_ID.to_string()); assert!(socket.is_ok()); if let Ok(socket) = socket { assert!(socket .socket_path .as_path() .to_str() .unwrap() .contains("0123456789012345")); assert!(socket.remove_listener_thread.is_none()); assert!(!socket.requested_remove.load(Ordering::SeqCst)); } } #[test] fn test_start_monitoring() { let socket = EnclaveProcSock::new(&DUMMY_ENCLAVE_ID.to_string()); assert!(socket.is_ok()); if let Ok(mut socket) = socket { UnixListener::bind(socket.get_path()) .map_err(|e| { new_nitro_cli_failure!( &format!("Failed to bind to socket: {:?}", e), NitroCliErrorEnum::SocketError ) }) .ok_or_exit_with_errno(Some("Error binding")); let result = socket.start_monitoring(false); assert!(result.is_ok()); std::fs::remove_file(&socket.socket_path).unwrap(); std::thread::sleep(std::time::Duration::from_millis(WAIT_REMOVE_MILLIS)); assert!(!socket.requested_remove.load(Ordering::SeqCst)); } } #[test] fn test_close() { let socket = EnclaveProcSock::new(&DUMMY_ENCLAVE_ID.to_string()); assert!(socket.is_ok()); let out_cmd0 = Command::new("cat") .arg(format!("/proc/{}/status", std::process::id())) .output() .expect("Failed to run cat"); let out0 = std::str::from_utf8(&out_cmd0.stdout).unwrap(); let crt_num_threads0 = get_num_threads_from_status_output(out0.to_string()); if let Ok(mut socket) = socket { let _ = UnixListener::bind(socket.get_path()) .map_err(|e| { new_nitro_cli_failure!( &format!("Failed to bind to socket: {:?}", e), NitroCliErrorEnum::SocketError ) }) .ok_or_exit_with_errno(Some("Error binding")); let result = socket.start_monitoring(true); assert!(result.is_ok()); let result = socket.close_mut(); assert!(result.is_ok()); assert!(socket.requested_remove.load(Ordering::SeqCst)); } let out_cmd1 = Command::new("cat") .arg(format!("/proc/{}/status", std::process::id())) .output() .expect("Failed to run cat"); let out1 = std::str::from_utf8(&out_cmd1.stdout).unwrap(); let crt_num_threads1 = get_num_threads_from_status_output(out1.to_string()); assert_eq!(crt_num_threads0, crt_num_threads1); } }
#![deny(missing_docs)] #![deny(warnings)] use inotify::{EventMask, Inotify, WatchMask}; use log::{debug, warn}; use std::path::{Path, PathBuf}; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; use std::thread::{self, JoinHandle}; use crate::common::get_socket_path; use crate::common::{ExitGracefully, NitroCliErrorEnum, NitroCliFailure, NitroCliResult}; use crate::new_nitro_cli_failure; #[derive(Default)] pub struct EnclaveProcSock { socket_path: PathBuf, remove_listener_thread: Option<JoinHandle<()>>, requested_remove: Arc<AtomicBool>, } impl Clone for EnclaveProcSock { fn clone(&self) -> Self { EnclaveProcSock { socket_path: self.socket_path.clone(), remove_listener_thread: None, requested_remove: self.requested_remove.clone(), } } } impl Drop for EnclaveProcSock { fn drop(&mut self) { self.close_mut() .ok_or_exit_with_errno(Some("Failed to drop socket")); } } impl EnclaveProcSock { pub fn new(enclave_id: &str) -> NitroCliResult<Self> { let socket_path = get_socket_path(enclave_id).map_err(|_| { new_nitro_cli_failure!( "Failed to create enclave process socket", NitroCliErrorEnum::SocketPathNotFound ) })?; Ok(EnclaveProcSock { socket_path, remove_listener_thread: None, requested_remove: Arc::new(AtomicBool::new(false)), }) } pub fn get_path(&self) -> &Path { &self.socket_path.as_path() } pub fn set_path(&mut self, socket_path: PathBuf) { self.socket_path = socket_path; } pub fn start_monitoring(&mut self, exit_on_delete: bool) -> NitroCliResult<()> { let path_clone = self.socket_path.clone(); let requested_remove_clone = self.requested_remove.clone(); let mut socket_inotify = Inotify::init().map_err(|e| { new_nitro_cli_failure!( &format!("Failed to initialize socket notifications: {:?}", e), NitroCliErrorEnum::InotifyError ) })?; socket_inotify .add_watch( self.socket_path.as_path(), WatchMask::ATTRIB | WatchMask::DELETE_SELF, ) .map_err(|e| { new_nitro_cli_failure!( &format!("Failed to add watch to inotify: {:?}", e), NitroCliErrorEnum::InotifyError ) })?; self.remove_listener_thread = Some(thread::spawn(move || { socket_removal_listener( path_clone, requested_remove_clone, socket_inotify, exit_on_delete, ) })); Ok(()) } fn close_mut(&mut self) -> NitroCliResult<()> { self.requested_remove.store(true, Ordering::SeqCst); if self.socket_path.exists() { std::fs::remove_file(&self.socket_path).map_err(|e| { new_nitro_cli_failure!( &format!( "Failed to remove socket file {:?} from disk: {:?}", self.socket_path, e ), NitroCliErrorEnum::FileOperationFailure ) })?; } if self.remove_listener_thread.is_some() { self.remove_listener_thread .take() .unwrap() .join() .map_err(|e| { new_nitro_cli_failure!( &format!("Failed to join socket notification thread: {:?}", e), NitroCliErrorEnum::ThreadJoinFailure ) })?; } Ok(()) } pub fn close(mut self) -> NitroCliResult<()> { self.close_mut() .map_err(|e| e.add_subaction("Close socket".to_string())) } }
#[cfg(test)] mod tests { use super::*; use std::os::unix::net::UnixListener; use std::process::Command; const DUMMY_ENCLAVE_ID: &str = "i-0000000000000000-enc0123456789012345"; const THREADS_STR: &str = "Threads:"; const WAIT_REMOVE_MILLIS: u64 = 10; fn get_num_threads_from_status_output(status_str: String) -> u32 { let start_idx = status_str.find(THREADS_STR); let mut iter = status_str.chars(); iter.by_ref().nth(start_idx.unwrap() + THREADS_STR.len()); let slice = iter.as_str(); let new_str = slice.to_string(); let end_idx = new_str.find('\n'); let substr = &slice[..end_idx.unwrap()]; substr.parse().unwrap() } #[test] fn test_enclaveprocsock_init() { let socket = EnclaveProcSock::new(&DUMMY_ENCLAVE_ID.to_string()); assert!(socket.is_ok()); if let Ok(socket) = socket { assert!(socket .socket_path .as_path() .to_str() .unwrap() .contains("0123456789012345")); assert!(socket.remove_listener_thread.is_none()); assert!(!socket.requested_remove.load(Ordering::SeqCst)); } } #[test] fn test_start_monitoring() { let socket = EnclaveProcSock::new(&DUMMY_ENCLAVE_ID.to_string()); assert!(socket.is_ok()); if let Ok(mut socket) = socket { UnixListener::bind(socket.get_path()) .map_err(|e| { new_nitro_cli_failure!( &format!("Failed to bind to socket: {:?}", e), NitroCliErrorEnum::SocketError ) }) .ok_or_exit_with_errno(Some("Error binding")); let result = socket.start_monitoring(false); assert!(result.is_ok()); std::fs::remove_file(&socket.socket_path).unwrap(); std::thread::sleep(std::time::Duration::from_millis(WAIT_REMOVE_MILLIS)); assert!(!socket.requested_remove.load(Ordering::SeqCst)); } } #[test] fn test_close() { let socket = EnclaveProcSock::new(&DUMMY_ENCLAVE_ID.to_string()); assert!(socket.is_ok()); let out_cmd0 = Command::new("cat") .arg(format!("/proc/{}/status", std::process::id())) .output() .expect("Failed to run cat"); let out0 = std::str::from_utf8(&out_cmd0.stdout).unwrap(); let crt_num_threads0 = get_num_threads_from_status_output(out0.to_string()); if let Ok(mut socket) = socket { let _ = UnixListener::bind(socket.get_path()) .map_err(|e| { new_nitro_cli_failure!( &format!("Failed to bind to socket: {:?}", e), NitroCliErrorEnum::SocketError ) }) .ok_or_exit_with_errno(Some("Error binding")); let result = socket.start_monitoring(true); assert!(result.is_ok()); let result = socket.close_mut(); assert!(result.is_ok()); assert!(socket.requested_remove.load(Ordering::SeqCst)); } let out_cmd1 = Command::new("cat") .arg(format!("/proc/{}/status", std::process::id())) .output() .expect("Failed to run cat"); let out1 = std::str::from_utf8(&out_cmd1.stdout).unwrap(); let crt_num_threads1 = get_num_threads_from_status_output(out1.to_string()); assert_eq!(crt_num_threads0, crt_num_threads1); } }
fn socket_removal_listener( socket_path: PathBuf, requested_remove: Arc<AtomicBool>, mut socket_inotify: Inotify, exit_on_delete: bool, ) { let mut buffer = [0u8; 4096]; let mut done = false; debug!("Socket file event listener started for {:?}.", socket_path); while !done { let events = socket_inotify .read_events_blocking(&mut buffer) .map_err(|e| { new_nitro_cli_failure!( &format!("Socket removal listener error: {:?}", e), NitroCliErrorEnum::InotifyError ) .set_action("Run Enclave".to_string()) }) .ok_or_exit_with_errno(Some("Failed to read inotify events")); for event in events { if (event.mask.contains(EventMask::ATTRIB) || event.mask.contains(EventMask::DELETE_SELF)) && !socket_path.exists() { if requested_remove.load(Ordering::SeqCst) { debug!("The enclave process socket has deleted itself."); done = true; } else { warn!("The enclave process socket has been deleted!"); if exit_on_delete { std::process::exit(1); } done = true; } } } } debug!("Enclave process socket monitoring is done."); }
function_block-full_function
[ { "content": "/// Get the path to the Unix socket owned by an enclave process which also owns the enclave with the given ID.\n\npub fn get_socket_path(enclave_id: &str) -> NitroCliResult<PathBuf> {\n\n // The full enclave ID is \"i-(...)-enc<enc_id>\" and we want to extract only <enc_id>.\n\n let tokens: ...
Rust
src/component/sum_of_best.rs
AntyMew/livesplit-core
b59a45ddd85c914121d279df38ad5b0e581bd512
use Timer; use time::formatter::{Accuracy, Regular, TimeFormatter}; use serde_json::{to_writer, Result}; use analysis::sum_of_segments::calculate_best; use std::io::Write; use std::borrow::Cow; use settings::{Color, Field, Gradient, SettingsDescription, Value}; use super::DEFAULT_INFO_TEXT_GRADIENT; #[derive(Default, Clone)] pub struct Component { settings: Settings, } #[derive(Clone, Serialize, Deserialize)] #[serde(default)] pub struct Settings { pub background: Gradient, pub label_color: Option<Color>, pub value_color: Option<Color>, pub accuracy: Accuracy, } impl Default for Settings { fn default() -> Self { Self { background: DEFAULT_INFO_TEXT_GRADIENT, label_color: None, value_color: None, accuracy: Accuracy::Seconds, } } } #[derive(Serialize, Deserialize)] pub struct State { pub background: Gradient, pub label_color: Option<Color>, pub value_color: Option<Color>, pub text: String, pub time: String, } impl State { pub fn write_json<W>(&self, writer: W) -> Result<()> where W: Write, { to_writer(writer, self) } } impl Component { pub fn new() -> Self { Default::default() } pub fn with_settings(settings: Settings) -> Self { Self { settings, ..Default::default() } } pub fn settings(&self) -> &Settings { &self.settings } pub fn settings_mut(&mut self) -> &mut Settings { &mut self.settings } pub fn name(&self) -> Cow<str> { "Sum of Best Segments".into() } pub fn state(&self, timer: &Timer) -> State { let time = calculate_best( timer.run().segments(), false, true, timer.current_timing_method(), ); State { background: self.settings.background, label_color: self.settings.label_color, value_color: self.settings.value_color, text: String::from("Sum of Best Segments"), time: Regular::with_accuracy(self.settings.accuracy) .format(time) .to_string(), } } pub fn settings_description(&self) -> SettingsDescription { SettingsDescription::with_fields(vec![ Field::new("Background".into(), self.settings.background.into()), Field::new("Label Color".into(), self.settings.label_color.into()), Field::new("Value Color".into(), self.settings.value_color.into()), Field::new("Accuracy".into(), self.settings.accuracy.into()), ]) } pub fn set_value(&mut self, index: usize, value: Value) { match index { 0 => self.settings.background = value.into(), 1 => self.settings.label_color = value.into(), 2 => self.settings.value_color = value.into(), 3 => self.settings.accuracy = value.into(), _ => panic!("Unsupported Setting Index"), } } }
use Timer; use time::formatter::{Accuracy, Regular, TimeFormatter}; use serde_json::{to_writer, Result}; use analysis::sum_of_segments::calculate_best; use std::io::Write; use std::borrow::Cow; use settings::{Color, Field, Gradient, SettingsDescription, Value}; use super::DEFAULT_INFO_TEXT_GRADIENT; #[derive(Default, Clone)] pub struct Component { settings: Settings, } #[derive(Clone, Serialize, Deserialize)] #[serde(default)] pub struct Settings { pub background: Gradient, pub label_color: Option<Color>, pub value_color: Option<Color>, pub accuracy: Accuracy, } impl Default for Settings { fn default() -> Self { Self { background: DEFAULT_INFO_TEXT_GRADIENT, label_color: None, value_color: None, accuracy: Accuracy::Seconds, } } } #[derive(Serialize, Deserialize)] pub struct State { pub background: Gradient, pub label_color: Option<Color>, pub value_color: Option<Color>, pub text: String, pub time: String, } impl State { pub fn write_json<W>(&self, writer: W) -> Result<()> where W: Write, { to_writer(writer, self) } } impl Component { pub fn new() -> Self { Default::default() } pub fn with_settings(settings: Settings) -> Self { Self { settings, ..Default::default() } } pub fn settings(&self) -> &Settings { &self.settings } pub fn settings_mut(&mut self) -> &mut Settings { &mut self.settings } pub fn name(&self) -> Cow<str> { "Sum of Best Segments".into() } pub fn state(&self, timer: &Timer) -> State { let time =
; State { background: self.settings.background, label_color: self.settings.label_color, value_color: self.settings.value_color, text: String::from("Sum of Best Segments"), time: Regular::with_accuracy(self.settings.accuracy) .format(time) .to_string(), } } pub fn settings_description(&self) -> SettingsDescription { SettingsDescription::with_fields(vec![ Field::new("Background".into(), self.settings.background.into()), Field::new("Label Color".into(), self.settings.label_color.into()), Field::new("Value Color".into(), self.settings.value_color.into()), Field::new("Accuracy".into(), self.settings.accuracy.into()), ]) } pub fn set_value(&mut self, index: usize, value: Value) { match index { 0 => self.settings.background = value.into(), 1 => self.settings.label_color = value.into(), 2 => self.settings.value_color = value.into(), 3 => self.settings.accuracy = value.into(), _ => panic!("Unsupported Setting Index"), } } }
calculate_best( timer.run().segments(), false, true, timer.current_timing_method(), )
call_expression
[ { "content": "pub fn write<W: Write>(mut writer: W, classes: &BTreeMap<String, Class>) -> Result<()> {\n\n write!(\n\n writer,\n\n \"{}\",\n\n r#\"#ifndef LIVESPLIT_CORE_H\n\n#define LIVESPLIT_CORE_H\n\n\n\n#ifdef __cplusplus\n\n#define restrict __restrict\n\nnamespace LiveSplit {\n\next...
Rust
weight-gen/src/main.rs
ImbueNetwork/open-runtime-module-library
c439a50e01944aedeef33231e0824a17ed1813bc
use clap::{App, Arg}; use serde::{Deserialize, Serialize}; use std::io::Read; #[derive(Serialize, Deserialize, Default, Debug, Clone)] pub struct BenchData { pub name: String, pub weight: u64, pub reads: u32, pub writes: u32, pub comments: Vec<String>, } #[derive(Serialize, Default, Debug, Clone)] struct TemplateData { pub header: String, pub benchmarks: Vec<BenchData>, } #[derive(Clone, Copy)] struct UnderscoreHelper; impl handlebars::HelperDef for UnderscoreHelper { fn call<'reg: 'rc, 'rc>( &self, h: &handlebars::Helper, _: &handlebars::Handlebars, _: &handlebars::Context, _rc: &mut handlebars::RenderContext, out: &mut dyn handlebars::Output, ) -> handlebars::HelperResult { use handlebars::JsonRender; let param = h.param(0).expect("Unable to retrieve param from handlebars helper"); let underscore_param = underscore(param.value().render()); out.write(&underscore_param)?; Ok(()) } } fn underscore<Number>(i: Number) -> String where Number: std::string::ToString, { let mut s = String::new(); let i_str = i.to_string(); let a = i_str.chars().rev().enumerate(); for (idx, val) in a { if idx != 0 && idx % 3 == 0 { s.insert(0, '_'); } s.insert(0, val); } s } #[derive(Clone, Copy)] struct JoinHelper; impl handlebars::HelperDef for JoinHelper { fn call<'reg: 'rc, 'rc>( &self, h: &handlebars::Helper, _: &handlebars::Handlebars, _: &handlebars::Context, _rc: &mut handlebars::RenderContext, out: &mut dyn handlebars::Output, ) -> handlebars::HelperResult { use handlebars::JsonRender; let param = h.param(0).expect("Unable to retrieve param from handlebars helper"); let value = param.value(); let joined = if value.is_array() { value .as_array() .unwrap() .iter() .map(|v| v.render()) .collect::<Vec<String>>() .join(" ") } else { value.render() }; out.write(&joined)?; Ok(()) } } fn parse_stdio() -> Option<Vec<BenchData>> { let mut buffer = String::new(); std::io::stdin() .read_to_string(&mut buffer) .expect("Unable to read from stdin"); let file_path = buffer .split_ascii_whitespace() .last() .expect("Last line must be JOSN file path."); let reader = std::fs::File::open(std::path::Path::new(file_path)).unwrap(); serde_json::from_reader(&reader).ok() } fn main() { let matches = App::new("Weight Generator") .version("1.0") .author("Laminar Developers <hello@laminar.one>") .about("Generate rust weight info source file from JSON data generated by ORML bencher") .arg( Arg::with_name("input") .short("i") .long("input") .value_name("PATH") .help("Input JSON data file") .takes_value(true), ) .arg( Arg::with_name("template") .short("t") .long("template") .value_name("PATH") .help("Handlebars template file") .takes_value(true), ) .arg( Arg::with_name("header") .long("header") .value_name("PATH") .help("Header file path") .takes_value(true), ) .arg( Arg::with_name("output") .short("o") .long("output") .value_name("PATH") .help("Output file path") .takes_value(true), ) .get_matches(); let mut benchmarks: Vec<BenchData> = { if let Some(input_path) = matches.value_of("input") { let reader = std::fs::File::open(std::path::Path::new(&input_path.trim())).unwrap(); serde_json::from_reader(&reader).expect("Could not parse JSON data") } else { parse_stdio().expect("Could not parse JSON data") } }; benchmarks.iter_mut().for_each(|x| { x.comments.sort(); }); let mut handlebars = handlebars::Handlebars::new(); handlebars.register_helper("underscore", Box::new(UnderscoreHelper)); handlebars.register_helper("join", Box::new(JoinHelper)); handlebars.register_escape_fn(|s| -> String { s.to_string() }); let header = { if let Some(path) = matches.value_of("header") { ::std::fs::read_to_string(&path).expect("Header file not found") } else { String::from("") } }; let hbs_data = TemplateData { header, benchmarks }; const DEFAULT_TEMPLATE: &str = include_str!("./template.hbs"); let template = { if let Some(path) = matches.value_of("template") { ::std::fs::read_to_string(&path).expect("Template file not found") } else { String::from(DEFAULT_TEMPLATE) } }; if let Some(path) = matches.value_of("output") { let mut output_file = ::std::fs::File::create(&path).expect("Could not create output file"); handlebars .render_template_to_write(&template, &hbs_data, &mut output_file) .expect("Unable to render template"); println!(); println!("Weights file `{}` was generated.", path); } else { let template_string = handlebars .render_template(&template, &hbs_data) .expect("Unable to render template"); println!("{}", template_string); } }
use clap::{App, Arg}; use serde::{Deserialize, Serialize}; use std::io::Read; #[derive(Serialize, Deserialize, Default, Debug, Clone)] pub struct BenchData { pub name: String, pub weight: u64, pub reads: u32, pub writes: u32, pub comments: Vec<String>, } #[derive(Serialize, Default, Debug, Clone)] struct TemplateData { pub header: String, pub benchmarks: Vec<BenchData>, } #[derive(Clone, Copy)] struct UnderscoreHelper; impl handlebars::HelperDef for UnderscoreHelper { fn call<'reg: 'rc, 'rc>( &self, h: &handlebars::Helper, _: &handlebars::Handlebars, _: &handlebars::Context, _rc: &mut handlebars::RenderContext, out: &mut dyn handlebars::Output, ) -> handlebars::HelperResult { use handlebars::JsonRender; let param = h.param(0).expect("Unable to retrieve param from handlebars helper"); let underscore_param = underscore(param.value().render()); out.write(&underscore_param)?; Ok(()) } } fn underscore<Number>(i: Number) -> String where Number: std::string::ToString, { let mut s = String::new(); let i_str = i.to_string(); let a = i_str.chars().rev().enumerate(); for (idx, val) in a { if idx != 0 && idx % 3 == 0 { s.insert(0, '_'); } s.insert(0, val); } s } #[derive(Clone, Copy)] struct JoinHelper; impl handlebars::HelperDef for JoinHelper {
} fn parse_stdio() -> Option<Vec<BenchData>> { let mut buffer = String::new(); std::io::stdin() .read_to_string(&mut buffer) .expect("Unable to read from stdin"); let file_path = buffer .split_ascii_whitespace() .last() .expect("Last line must be JOSN file path."); let reader = std::fs::File::open(std::path::Path::new(file_path)).unwrap(); serde_json::from_reader(&reader).ok() } fn main() { let matches = App::new("Weight Generator") .version("1.0") .author("Laminar Developers <hello@laminar.one>") .about("Generate rust weight info source file from JSON data generated by ORML bencher") .arg( Arg::with_name("input") .short("i") .long("input") .value_name("PATH") .help("Input JSON data file") .takes_value(true), ) .arg( Arg::with_name("template") .short("t") .long("template") .value_name("PATH") .help("Handlebars template file") .takes_value(true), ) .arg( Arg::with_name("header") .long("header") .value_name("PATH") .help("Header file path") .takes_value(true), ) .arg( Arg::with_name("output") .short("o") .long("output") .value_name("PATH") .help("Output file path") .takes_value(true), ) .get_matches(); let mut benchmarks: Vec<BenchData> = { if let Some(input_path) = matches.value_of("input") { let reader = std::fs::File::open(std::path::Path::new(&input_path.trim())).unwrap(); serde_json::from_reader(&reader).expect("Could not parse JSON data") } else { parse_stdio().expect("Could not parse JSON data") } }; benchmarks.iter_mut().for_each(|x| { x.comments.sort(); }); let mut handlebars = handlebars::Handlebars::new(); handlebars.register_helper("underscore", Box::new(UnderscoreHelper)); handlebars.register_helper("join", Box::new(JoinHelper)); handlebars.register_escape_fn(|s| -> String { s.to_string() }); let header = { if let Some(path) = matches.value_of("header") { ::std::fs::read_to_string(&path).expect("Header file not found") } else { String::from("") } }; let hbs_data = TemplateData { header, benchmarks }; const DEFAULT_TEMPLATE: &str = include_str!("./template.hbs"); let template = { if let Some(path) = matches.value_of("template") { ::std::fs::read_to_string(&path).expect("Template file not found") } else { String::from(DEFAULT_TEMPLATE) } }; if let Some(path) = matches.value_of("output") { let mut output_file = ::std::fs::File::create(&path).expect("Could not create output file"); handlebars .render_template_to_write(&template, &hbs_data, &mut output_file) .expect("Unable to render template"); println!(); println!("Weights file `{}` was generated.", path); } else { let template_string = handlebars .render_template(&template, &hbs_data) .expect("Unable to render template"); println!("{}", template_string); } }
fn call<'reg: 'rc, 'rc>( &self, h: &handlebars::Helper, _: &handlebars::Handlebars, _: &handlebars::Context, _rc: &mut handlebars::RenderContext, out: &mut dyn handlebars::Output, ) -> handlebars::HelperResult { use handlebars::JsonRender; let param = h.param(0).expect("Unable to retrieve param from handlebars helper"); let value = param.value(); let joined = if value.is_array() { value .as_array() .unwrap() .iter() .map(|v| v.render()) .collect::<Vec<String>>() .join(" ") } else { value.render() }; out.write(&joined)?; Ok(()) }
function_block-full_function
[ { "content": "/// Increment used weight\n\npub fn using(weight: Weight) {\n\n\tMETER.with(|v| {\n\n\t\tlet mut meter = v.borrow_mut();\n\n\t\tmeter.used_weight = meter.used_weight.saturating_add(weight);\n\n\t})\n\n}\n\n\n", "file_path": "weight-meter/src/meter_std.rs", "rank": 0, "score": 271039.88...
Rust
src/lib.rs
kneasle/goldilocks-json-fmt
41a84437e933c67365e874b73405d4d1fc935849
/*! [![crates.io](https://img.shields.io/crates/v/goldilocks-json-fmt.svg)](https://crates.io/crates/goldilocks-json-fmt) A simple, portable, fast, pretty JSON formatter. No dependencies or unsafe code. The resulting JSON strikes a balance between 'too wide' (i.e. minified, all on one line) and 'too tall' (e.g. `serde_json`'s `pretty_print`). You give the formatter a line limit (defaults to 100 chars), and it keeps things as wide as possible whilst preserving that limit. Sometimes the limit is impossible to achieve (e.g. you have a string that's longer than the line limit), in which case the formatter will break the limit by as little as possible. The throughput of the Goldilocks formatter is about 300MB/s, which should be enough for most situations. It's about as fast as you can get without cracking out the big guns and using SIMD, which would break both simplicity and portability. # Example: ``` // Ewww so horrible let json = r#"{"test/cases/87s-at-back.toml":{"comps":[{"length":32, "string":"sHsH","avg_score":-0.45625},{"length":64,"string":"sHWsMH", "avg_score":-0.44062495},{"length":96,"string":"WMsWMHsH","avg_score": -0.33124998},{"length":96,"string":"WsMHWsMH","avg_score":-0.33124998}, {"length":96,"string":"sHWMsWMH","avg_score":-0.33124995},{"length":64, "string":"WsMHsH","avg_score":-0.284375}]}}"#; let perfect_json = goldilocks_json_fmt::format(&json).expect("Invalid JSON"); assert_eq!( &perfect_json, // So perfect! r#"{ "test/cases/87s-at-back.toml": { "comps": [ { "length": 32, "string": "sHsH", "avg_score": -0.45625 }, { "length": 64, "string": "sHWsMH", "avg_score": -0.44062495 }, { "length": 96, "string": "WMsWMHsH", "avg_score": -0.33124998 }, { "length": 96, "string": "WsMHWsMH", "avg_score": -0.33124998 }, { "length": 96, "string": "sHWMsWMH", "avg_score": -0.33124995 }, { "length": 64, "string": "WsMHsH", "avg_score": -0.284375 } ] } }"#, ); ``` */ mod formatting; mod parsing; pub fn format(s: &str) -> Result<String> { format_with_config(s, &Config::default()) } pub fn format_within_width(s: &str, width_limit: usize) -> Result<String> { let config = Config { width_limit, ..Config::default() }; format_with_config(s, &config) } pub fn format_with_config(s: &str, config: &Config) -> Result<String> { Node::parse(s).map(|ast| ast.format(config)) } #[derive(Debug, Clone)] pub struct Config { pub width_limit: usize, pub indent_width: usize, } impl Default for Config { fn default() -> Self { Self { width_limit: 100, indent_width: 2, } } } #[derive(Debug, Clone, PartialEq, Eq)] pub enum Error { /* Misc parsing */ ExpectedXsFoundY(Item, usize, &'static [Expected], char), ExpectedXsFoundEof(Item, &'static [Expected]), InvalidTrailingWhitespace(usize, char), /* String parsing */ EofDuringString(usize), InvalidEscape(usize, char), InvalidHexEscape(usize, usize, char), ControlCharInString(usize, char), /* Number parsing */ LeadingZero(usize), SecondDecimalPoint(usize), InvalidCharInExponent(usize, char), EmptyExponent(usize), } pub type Result<T> = std::result::Result<T, Error>; #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum Item { TopLevelValue, Literal(&'static str), Number, Array(usize), Object(usize), } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum Expected { Key, Value, Char(char), Digit, } impl Error { fn expected_xs_found( item: Item, expected: &'static [Expected], v: Option<(usize, char)>, ) -> Self { match v { Some((idx, c)) => Error::ExpectedXsFoundY(item, idx, expected, c), None => Error::ExpectedXsFoundEof(item, expected), } } } #[derive(Debug, Clone, PartialEq, Eq)] struct Node<'source> { unsplit_width: usize, kind: NodeKind<'source>, } #[derive(Debug, Clone, PartialEq, Eq)] enum NodeKind<'source> { Atom(&'source str), Array(Vec<Node<'source>>), Object(Vec<(&'source str, Node<'source>)>), } impl<'source> Node<'source> { fn new_atom(s: &'source str) -> Self { Self { unsplit_width: s.len(), kind: NodeKind::Atom(s), } } }
/*! [![crates.io](https://img.shields.io/crates/v/goldilocks-json-fmt.svg)](https://crates.io/crates/goldilocks-json-fmt) A simple, portable, fast, pretty JSON formatter. No dependencies or unsafe code. The resulting JSON strikes a balance between 'too wide' (i.e. minified, all on one line) and 'too tall' (e.g. `serde_json`'s `pretty_print`). You give the formatter a line limit (defaults to 100 chars), and it keeps things as wide as possible whilst preserving that limit. Sometimes the limit is impossible to achieve (e.g. you have a string that's longer than the line limit), in which case the formatter will break the limit by as little as possible. The throughput of the Goldilocks formatter is about 300MB/s, which should be enough for most situations. It's about as fast as you can get without cracking out the big guns and using SIMD, which would break both simplicity and portability. # Example: ``` // Ewww so horrible let json = r#"{"test/cases/87s-at-back.toml":{"comps":[{"length":32, "string":"sHsH","avg_score":-0.45625},{"length":64,"string":"sHWsMH", "avg_score":-0.44062495},{"length":96,"string":"WMsWMHsH","avg_score": -0.33124998},{"length":96,"string":"WsMHWsMH","avg_score":-0.33124998}, {"length":96,"string":"sHWMsWMH","avg_score":-0.33124995},{"length":64, "string":"WsMHsH","avg_score":-0.284375}]}}"#; let perfect_json = goldilocks_json_fmt::format(&json).expect("Invalid JSON"); assert_eq!( &perfect_json, // So perfect! r#"{ "test/cases/87s-at-back.toml": { "comps": [ { "length": 32, "string": "sHsH", "avg_score": -0.45625 }, { "length": 64, "string": "sHWsMH", "avg_score": -0.44062495 }, { "length": 96, "string": "WMsWMHsH", "avg_score": -0.33124998 }, { "length": 96, "string": "WsMHWsMH", "avg_score": -0.33124998 }, { "length": 96, "string": "sHWMsWMH", "avg_score": -0.33124995 }, { "length": 64, "string": "WsMHsH", "avg_score": -0.284375 } ] } }"#, ); ``` */ mod formatting; mod parsing; pub fn format(s: &str) -> Result<String> { format_with_config(s, &Config::default()) } pub fn format_within_width(s: &str, width_limit: usize) -> Result<String> { let config = Config { width_limit, ..Config::default() }; format_with_config(s, &config) } pub fn format_with_config(s: &str, config: &Config) -> Result<String> { Node::parse(s).map(|ast| ast.format(config)) } #[derive(Debug, Clone)] pub struct Config { pub width_limit: usize, pub indent_width: usize, } impl Default for Config { fn default() -> Self { Self { width_limit: 100, indent_width: 2, } } } #[derive(Debug, Clone, PartialEq, Eq)] pub enum Error { /* Misc parsing */ ExpectedXsFoundY(Item, usize, &'static [Expected], char), ExpectedXsFoundEof(Item, &'static [Expected]), InvalidTrailingWhitespace(usize, char), /* String parsing */ EofDuringString(usize), InvalidEscape(usize, char), InvalidHexEscape(usize, usize, char), ControlCharInStr
py, PartialEq, Eq)] pub enum Expected { Key, Value, Char(char), Digit, } impl Error { fn expected_xs_found( item: Item, expected: &'static [Expected], v: Option<(usize, char)>, ) -> Self { match v { Some((idx, c)) => Error::ExpectedXsFoundY(item, idx, expected, c), None => Error::ExpectedXsFoundEof(item, expected), } } } #[derive(Debug, Clone, PartialEq, Eq)] struct Node<'source> { unsplit_width: usize, kind: NodeKind<'source>, } #[derive(Debug, Clone, PartialEq, Eq)] enum NodeKind<'source> { Atom(&'source str), Array(Vec<Node<'source>>), Object(Vec<(&'source str, Node<'source>)>), } impl<'source> Node<'source> { fn new_atom(s: &'source str) -> Self { Self { unsplit_width: s.len(), kind: NodeKind::Atom(s), } } }
ing(usize, char), /* Number parsing */ LeadingZero(usize), SecondDecimalPoint(usize), InvalidCharInExponent(usize, char), EmptyExponent(usize), } pub type Result<T> = std::result::Result<T, Error>; #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum Item { TopLevelValue, Literal(&'static str), Number, Array(usize), Object(usize), } #[derive(Debug, Clone, Co
random
[ { "content": "/// Attempt to parse the chars in `iter` as an string, **assuming that the initial `\"` has\n\n/// been consumed**. This returns a string slice **from the JSON source code**, i.e. the fully\n\n/// escaped string complete with the enclosing `\"`s. We do not attempt to decode the string, we\n\n///...
Rust
crates/wasi-common/cap-std-sync/src/dir.rs
dheaton-arm/wasmtime
86611d3bbc92b781ed136dcda7cdba9ec2c1cbee
use crate::file::{filetype_from, File}; use cap_fs_ext::{DirEntryExt, DirExt, MetadataExt, SystemTimeSpec}; use std::any::Any; use std::path::{Path, PathBuf}; use system_interface::fs::GetSetFdFlags; use wasi_common::{ dir::{ReaddirCursor, ReaddirEntity, WasiDir}, file::{FdFlags, FileType, Filestat, OFlags, WasiFile}, Error, ErrorExt, }; pub struct Dir(cap_std::fs::Dir); impl Dir { pub fn from_cap_std(dir: cap_std::fs::Dir) -> Self { Dir(dir) } pub fn open_file_( &self, symlink_follow: bool, path: &str, oflags: OFlags, read: bool, write: bool, fdflags: FdFlags, ) -> Result<File, Error> { use cap_fs_ext::{FollowSymlinks, OpenOptionsFollowExt}; let mut opts = cap_std::fs::OpenOptions::new(); if oflags.contains(OFlags::CREATE | OFlags::EXCLUSIVE) { opts.create_new(true); opts.write(true); } else if oflags.contains(OFlags::CREATE) { opts.create(true); opts.write(true); } if oflags.contains(OFlags::TRUNCATE) { opts.truncate(true); } if read { opts.read(true); } if write { opts.write(true); } else { opts.read(true); } if fdflags.contains(FdFlags::APPEND) { opts.append(true); } if symlink_follow { opts.follow(FollowSymlinks::Yes); } else { opts.follow(FollowSymlinks::No); } if fdflags.intersects( wasi_common::file::FdFlags::DSYNC | wasi_common::file::FdFlags::SYNC | wasi_common::file::FdFlags::RSYNC, ) { return Err(Error::not_supported().context("SYNC family of FdFlags")); } let mut f = self.0.open_with(Path::new(path), &opts)?; if fdflags.contains(wasi_common::file::FdFlags::NONBLOCK) { let set_fd_flags = f.new_set_fd_flags(system_interface::fs::FdFlags::NONBLOCK)?; f.set_fd_flags(set_fd_flags)?; } Ok(File::from_cap_std(f)) } pub fn open_dir_(&self, symlink_follow: bool, path: &str) -> Result<Self, Error> { let d = if symlink_follow { self.0.open_dir(Path::new(path))? } else { self.0.open_dir_nofollow(Path::new(path))? }; Ok(Dir::from_cap_std(d)) } pub fn rename_(&self, src_path: &str, dest_dir: &Self, dest_path: &str) -> Result<(), Error> { self.0 .rename(Path::new(src_path), &dest_dir.0, Path::new(dest_path))?; Ok(()) } pub fn hard_link_( &self, src_path: &str, target_dir: &Self, target_path: &str, ) -> Result<(), Error> { let src_path = Path::new(src_path); let target_path = Path::new(target_path); self.0.hard_link(src_path, &target_dir.0, target_path)?; Ok(()) } } #[async_trait::async_trait] impl WasiDir for Dir { fn as_any(&self) -> &dyn Any { self } async fn open_file( &self, symlink_follow: bool, path: &str, oflags: OFlags, read: bool, write: bool, fdflags: FdFlags, ) -> Result<Box<dyn WasiFile>, Error> { let f = self.open_file_(symlink_follow, path, oflags, read, write, fdflags)?; Ok(Box::new(f)) } async fn open_dir(&self, symlink_follow: bool, path: &str) -> Result<Box<dyn WasiDir>, Error> { let d = self.open_dir_(symlink_follow, path)?; Ok(Box::new(d)) } async fn create_dir(&self, path: &str) -> Result<(), Error> { self.0.create_dir(Path::new(path))?; Ok(()) } async fn readdir( &self, cursor: ReaddirCursor, ) -> Result<Box<dyn Iterator<Item = Result<ReaddirEntity, Error>> + Send>, Error> { let dir_meta = self.0.dir_metadata()?; let rd = vec![ { let name = ".".to_owned(); Ok((FileType::Directory, dir_meta.ino(), name)) }, { let name = "..".to_owned(); Ok((FileType::Directory, dir_meta.ino(), name)) }, ] .into_iter() .chain({ let entries = self.0.entries()?.map(|entry| { let entry = entry?; let meta = entry.full_metadata()?; let inode = meta.ino(); let filetype = filetype_from(&meta.file_type()); let name = entry .file_name() .into_string() .map_err(|_| Error::illegal_byte_sequence().context("filename"))?; Ok((filetype, inode, name)) }); #[cfg(windows)] let entries = entries.filter(|entry: &Result<_, wasi_common::Error>| { use winapi::shared::winerror::{ERROR_ACCESS_DENIED, ERROR_SHARING_VIOLATION}; if let Err(err) = entry { if let Some(err) = err.downcast_ref::<std::io::Error>() { if err.raw_os_error() == Some(ERROR_SHARING_VIOLATION as i32) || err.raw_os_error() == Some(ERROR_ACCESS_DENIED as i32) { return false; } } } true }); entries }) .enumerate() .map(|(ix, r)| match r { Ok((filetype, inode, name)) => Ok(ReaddirEntity { next: ReaddirCursor::from(ix as u64 + 1), filetype, inode, name, }), Err(e) => Err(e), }) .skip(u64::from(cursor) as usize); Ok(Box::new(rd)) } async fn symlink(&self, src_path: &str, dest_path: &str) -> Result<(), Error> { self.0.symlink(src_path, dest_path)?; Ok(()) } async fn remove_dir(&self, path: &str) -> Result<(), Error> { self.0.remove_dir(Path::new(path))?; Ok(()) } async fn unlink_file(&self, path: &str) -> Result<(), Error> { self.0.remove_file_or_symlink(Path::new(path))?; Ok(()) } async fn read_link(&self, path: &str) -> Result<PathBuf, Error> { let link = self.0.read_link(Path::new(path))?; Ok(link) } async fn get_filestat(&self) -> Result<Filestat, Error> { let meta = self.0.dir_metadata()?; Ok(Filestat { device_id: meta.dev(), inode: meta.ino(), filetype: filetype_from(&meta.file_type()), nlink: meta.nlink(), size: meta.len(), atim: meta.accessed().map(|t| Some(t.into_std())).unwrap_or(None), mtim: meta.modified().map(|t| Some(t.into_std())).unwrap_or(None), ctim: meta.created().map(|t| Some(t.into_std())).unwrap_or(None), }) } async fn get_path_filestat( &self, path: &str, follow_symlinks: bool, ) -> Result<Filestat, Error> { let meta = if follow_symlinks { self.0.metadata(Path::new(path))? } else { self.0.symlink_metadata(Path::new(path))? }; Ok(Filestat { device_id: meta.dev(), inode: meta.ino(), filetype: filetype_from(&meta.file_type()), nlink: meta.nlink(), size: meta.len(), atim: meta.accessed().map(|t| Some(t.into_std())).unwrap_or(None), mtim: meta.modified().map(|t| Some(t.into_std())).unwrap_or(None), ctim: meta.created().map(|t| Some(t.into_std())).unwrap_or(None), }) } async fn rename( &self, src_path: &str, dest_dir: &dyn WasiDir, dest_path: &str, ) -> Result<(), Error> { let dest_dir = dest_dir .as_any() .downcast_ref::<Self>() .ok_or(Error::badf().context("failed downcast to cap-std Dir"))?; self.rename_(src_path, dest_dir, dest_path) } async fn hard_link( &self, src_path: &str, target_dir: &dyn WasiDir, target_path: &str, ) -> Result<(), Error> { let target_dir = target_dir .as_any() .downcast_ref::<Self>() .ok_or(Error::badf().context("failed downcast to cap-std Dir"))?; self.hard_link_(src_path, target_dir, target_path) } async fn set_times( &self, path: &str, atime: Option<wasi_common::SystemTimeSpec>, mtime: Option<wasi_common::SystemTimeSpec>, follow_symlinks: bool, ) -> Result<(), Error> { if follow_symlinks { self.0.set_times( Path::new(path), convert_systimespec(atime), convert_systimespec(mtime), )?; } else { self.0.set_symlink_times( Path::new(path), convert_systimespec(atime), convert_systimespec(mtime), )?; } Ok(()) } } fn convert_systimespec(t: Option<wasi_common::SystemTimeSpec>) -> Option<SystemTimeSpec> { match t { Some(wasi_common::SystemTimeSpec::Absolute(t)) => Some(SystemTimeSpec::Absolute(t)), Some(wasi_common::SystemTimeSpec::SymbolicNow) => Some(SystemTimeSpec::SymbolicNow), None => None, } } #[cfg(test)] mod test { use super::Dir; use cap_std::ambient_authority; #[test] fn scratch_dir() { let tempdir = tempfile::Builder::new() .prefix("cap-std-sync") .tempdir() .expect("create temporary dir"); let preopen_dir = cap_std::fs::Dir::open_ambient_dir(tempdir.path(), ambient_authority()) .expect("open ambient temporary dir"); let preopen_dir = Dir::from_cap_std(preopen_dir); run(wasi_common::WasiDir::open_dir(&preopen_dir, false, ".")) .expect("open the same directory via WasiDir abstraction"); } #[cfg(not(windows))] #[test] fn readdir() { use std::collections::HashMap; use wasi_common::dir::{ReaddirCursor, ReaddirEntity, WasiDir}; use wasi_common::file::{FdFlags, FileType, OFlags}; fn readdir_into_map(dir: &dyn WasiDir) -> HashMap<String, ReaddirEntity> { let mut out = HashMap::new(); for readdir_result in run(dir.readdir(ReaddirCursor::from(0))).expect("readdir succeeds") { let entity = readdir_result.expect("readdir entry is valid"); out.insert(entity.name.clone(), entity); } out } let tempdir = tempfile::Builder::new() .prefix("cap-std-sync") .tempdir() .expect("create temporary dir"); let preopen_dir = cap_std::fs::Dir::open_ambient_dir(tempdir.path(), ambient_authority()) .expect("open ambient temporary dir"); let preopen_dir = Dir::from_cap_std(preopen_dir); let entities = readdir_into_map(&preopen_dir); assert_eq!( entities.len(), 2, "should just be . and .. in empty dir: {:?}", entities ); assert!(entities.get(".").is_some()); assert!(entities.get("..").is_some()); run(preopen_dir.open_file( false, "file1", OFlags::CREATE, true, false, FdFlags::empty(), )) .expect("create file1"); let entities = readdir_into_map(&preopen_dir); assert_eq!(entities.len(), 3, "should be ., .., file1 {:?}", entities); assert_eq!( entities.get(".").expect(". entry").filetype, FileType::Directory ); assert_eq!( entities.get("..").expect(".. entry").filetype, FileType::Directory ); assert_eq!( entities.get("file1").expect("file1 entry").filetype, FileType::RegularFile ); } fn run<F: std::future::Future>(future: F) -> F::Output { use std::pin::Pin; use std::task::{Context, Poll, RawWaker, RawWakerVTable, Waker}; let mut f = Pin::from(Box::new(future)); let waker = dummy_waker(); let mut cx = Context::from_waker(&waker); match f.as_mut().poll(&mut cx) { Poll::Ready(val) => return val, Poll::Pending => { panic!("Cannot wait on pending future: must enable wiggle \"async\" future and execute on an async Store") } } fn dummy_waker() -> Waker { return unsafe { Waker::from_raw(clone(5 as *const _)) }; unsafe fn clone(ptr: *const ()) -> RawWaker { assert_eq!(ptr as usize, 5); const VTABLE: RawWakerVTable = RawWakerVTable::new(clone, wake, wake_by_ref, drop); RawWaker::new(ptr, &VTABLE) } unsafe fn wake(ptr: *const ()) { assert_eq!(ptr as usize, 5); } unsafe fn wake_by_ref(ptr: *const ()) { assert_eq!(ptr as usize, 5); } unsafe fn drop(ptr: *const ()) { assert_eq!(ptr as usize, 5); } } } }
use crate::file::{filetype_from, File}; use cap_fs_ext::{DirEntryExt, DirExt, MetadataExt, SystemTimeSpec}; use std::any::Any; use std::path::{Path, PathBuf}; use system_interface::fs::GetSetFdFlags; use wasi_common::{ dir::{ReaddirCursor, ReaddirEntity, WasiDir}, file::{FdFlags, FileType, Filestat, OFlags, WasiFile}, Error, ErrorExt, }; pub struct Dir(cap_std::fs::Dir); impl Dir { pub fn from_cap_std(dir: cap_std::fs::Dir) -> Self { Dir(dir) } pub fn open_file_( &self, symlink_follow: bool, path: &str, oflags: OFlags, read: bool, write: bool, fdflags: FdFlags, ) -> Result<File, Error> { use cap_fs_ext::{FollowSymlinks, OpenOptionsFollowExt}; let mut opts = cap_std::fs::OpenOptions::new(); if oflags.contains(OFlags::CREATE | OFlags::EXCLUSIVE) { opts.create_new(true); opts.write(true); } else if oflags.contains(OFlags::CREATE) { opts.create(true); opts.write(true); } if oflags.contains(OFlags::TRUNCATE) { opts.truncate(true); } if read { opts.read(true); } if write { opts.write(true); } else { opts.read(true); } if fdflags.contains(FdFlags::APPEND) { opts.append(true); } if symlink_follow { opts.follow(FollowSymlinks::Yes); } else { opts.follow(FollowSymlinks::No); } if fdflags.intersects( wasi_common::file::FdFlags::DSYNC | wasi_common::file::FdFlags::SYNC | wasi_common::file::FdFlags::RSYNC, ) { return Err(Error::not_supported().context("SYNC family of FdFlags")); } let mut f = self.0.open_with(Path::new(path), &opts)?; if fdflags.contains(wasi_common::file::FdFlags::NONBLOCK) { let set_fd_flags = f.new_set_fd_flags(system_interface::fs::FdFlags::NONBLOCK)?; f.set_fd_flags(set_fd_flags)?; } Ok(File::from_cap_std(f)) } pub fn open_dir_(&self, symlink_follow: bool, path: &str) -> Result<Self, Error> { let d = if symlink_follow { self.0.open_dir(Path::new(path))? } else { self.0.open_dir_nofollow(Path::new(path))? }; Ok(Dir::from_cap_std(d)) } pub fn rename_(&self, src_path: &str, dest_dir: &Self, dest_path: &str) -> Result<(), Error> { self.0 .rename(Path::new(src_path), &dest_dir.0, Path::new(dest_path))?; Ok(()) } pub fn hard_link_( &self, src_path: &str, target_dir: &Self, target_path: &str, ) -> Result<(), Error> { let src_path = Path::new(src_path); let target_path = Path::new(target_path); self.0.hard_link(src_path, &target_dir.0, target_path)?; Ok(()) } } #[async_trait::async_trait] impl WasiDir for Dir { fn as_any(&self) -> &dyn Any { self } async fn open_file( &self, symlink_follow: bool, path: &str, oflags: OFlags, read: bool, write: bool, fdflags: FdFlags, ) -> Result<Box<dyn WasiFile>, Error> { let f = self.open_file_(symlink_follow, path, oflags, read, write, fdflags)?; Ok(Box::new(f)) } async fn open_dir(&self, symlink_follow: bool, path: &str) -> Result<Box<dyn WasiDir>, Error> { let d = self.open_dir_(symlink_follow, path)?; Ok(Box::new(d)) } async fn create_dir(&self, path: &str) -> Result<(), Error> { self.0.create_dir(Path::new(path))?; Ok(()) } async fn readdir( &self, cursor: ReaddirCursor, ) -> Result<Box<dyn Iterator<Item = Result<ReaddirEntity, Error>> + Send>, Error> { let dir_meta = self.0.dir_metadata()?; let rd = vec![ { let name = ".".to_owned(); Ok((FileType::Directory, dir_meta.ino(), name)) }, { let name = "..".to_owned(); Ok((FileType::Directory, dir_meta.ino(), name)) }, ] .into_iter() .chain({ let entries = self.0.entries()?.map(|entry| { let entry = entry?; let meta = entry.full_metadata()?; let inode = meta.ino(); let filetype = filetype_from(&meta.file_type()); let name = entry .file_name() .into_string() .map_err(|_| Error::illegal_byte_sequence().context("filename"))?; Ok((filetype, inode, name)) }); #[cfg(windows)] let entries = entries.filter(|entry: &Result<_, wasi_common::Error>| { use winapi::shared::winerror::{ERROR_ACCESS_DENIED, ERROR_SHARING_VIOLATION}; if let Err(err) = entry { if let Some(err) = err.downcast_ref::<std::io::Error>() { if err.raw_os_error() == Some(ERROR_SHARING_VIOLATION as i32) || err.raw_os_error() == Some(ERROR_ACCESS_DENIED as i32) { return false; } } } true }); entries }) .enumerate() .map(|(ix, r)| match r { Ok((filetype, inode, name)) => Ok(ReaddirEntity { next: ReaddirCursor::from(ix as u64 + 1), filetype, inode, name, }), Err(e) => Err(e), }) .skip(u64::from(cursor) as usize); Ok(Box::new(rd)) } async fn symlink(&self, src_path: &str, dest_path: &str) -> Result<(), Error> { self.0.symlink(src_path, dest_path)?; Ok(()) } async fn remove_dir(&self, path: &str) -> Result<(), Error> { self.0.remove_dir(Path::new(path))?; Ok(()) } async fn unlink_file(&self, path: &str) -> Result<(), Error> { self.0.remove_file_or_symlink(Path::new(path))?; Ok(()) } async fn read_link(&self, path: &str) -> Result<PathBuf, Error> { let link = self.0.read_link(Path::new(path))?; Ok(link) } async fn get_filestat(&self) -> Result<Filestat, Error> { let meta = self.0.dir_metadata()?; Ok(Filestat { device_id: meta.dev(), inode: meta.ino(), filetype: filetype_from(&meta.file_type()), nlink: meta.nlink(), size: meta.len(), atim: meta.accessed().map(|t| Some(t.into_std())).unwrap_or(None), mtim: meta.modified().map(|t| Some(t.into_std())).unwrap_or(None), ctim: meta.created().map(|t| Some(t.into_std())).unwrap_or(None), }) } async fn get_path_filestat( &self, path: &str, follow_symlinks: bool, ) -> Result<Filestat, Error> { let meta = if follow_symlinks { self.0.metadata(Path::new(path))? } else { self.0.symlink_metadata(Path::new(path))? };
} async fn rename( &self, src_path: &str, dest_dir: &dyn WasiDir, dest_path: &str, ) -> Result<(), Error> { let dest_dir = dest_dir .as_any() .downcast_ref::<Self>() .ok_or(Error::badf().context("failed downcast to cap-std Dir"))?; self.rename_(src_path, dest_dir, dest_path) } async fn hard_link( &self, src_path: &str, target_dir: &dyn WasiDir, target_path: &str, ) -> Result<(), Error> { let target_dir = target_dir .as_any() .downcast_ref::<Self>() .ok_or(Error::badf().context("failed downcast to cap-std Dir"))?; self.hard_link_(src_path, target_dir, target_path) } async fn set_times( &self, path: &str, atime: Option<wasi_common::SystemTimeSpec>, mtime: Option<wasi_common::SystemTimeSpec>, follow_symlinks: bool, ) -> Result<(), Error> { if follow_symlinks { self.0.set_times( Path::new(path), convert_systimespec(atime), convert_systimespec(mtime), )?; } else { self.0.set_symlink_times( Path::new(path), convert_systimespec(atime), convert_systimespec(mtime), )?; } Ok(()) } } fn convert_systimespec(t: Option<wasi_common::SystemTimeSpec>) -> Option<SystemTimeSpec> { match t { Some(wasi_common::SystemTimeSpec::Absolute(t)) => Some(SystemTimeSpec::Absolute(t)), Some(wasi_common::SystemTimeSpec::SymbolicNow) => Some(SystemTimeSpec::SymbolicNow), None => None, } } #[cfg(test)] mod test { use super::Dir; use cap_std::ambient_authority; #[test] fn scratch_dir() { let tempdir = tempfile::Builder::new() .prefix("cap-std-sync") .tempdir() .expect("create temporary dir"); let preopen_dir = cap_std::fs::Dir::open_ambient_dir(tempdir.path(), ambient_authority()) .expect("open ambient temporary dir"); let preopen_dir = Dir::from_cap_std(preopen_dir); run(wasi_common::WasiDir::open_dir(&preopen_dir, false, ".")) .expect("open the same directory via WasiDir abstraction"); } #[cfg(not(windows))] #[test] fn readdir() { use std::collections::HashMap; use wasi_common::dir::{ReaddirCursor, ReaddirEntity, WasiDir}; use wasi_common::file::{FdFlags, FileType, OFlags}; fn readdir_into_map(dir: &dyn WasiDir) -> HashMap<String, ReaddirEntity> { let mut out = HashMap::new(); for readdir_result in run(dir.readdir(ReaddirCursor::from(0))).expect("readdir succeeds") { let entity = readdir_result.expect("readdir entry is valid"); out.insert(entity.name.clone(), entity); } out } let tempdir = tempfile::Builder::new() .prefix("cap-std-sync") .tempdir() .expect("create temporary dir"); let preopen_dir = cap_std::fs::Dir::open_ambient_dir(tempdir.path(), ambient_authority()) .expect("open ambient temporary dir"); let preopen_dir = Dir::from_cap_std(preopen_dir); let entities = readdir_into_map(&preopen_dir); assert_eq!( entities.len(), 2, "should just be . and .. in empty dir: {:?}", entities ); assert!(entities.get(".").is_some()); assert!(entities.get("..").is_some()); run(preopen_dir.open_file( false, "file1", OFlags::CREATE, true, false, FdFlags::empty(), )) .expect("create file1"); let entities = readdir_into_map(&preopen_dir); assert_eq!(entities.len(), 3, "should be ., .., file1 {:?}", entities); assert_eq!( entities.get(".").expect(". entry").filetype, FileType::Directory ); assert_eq!( entities.get("..").expect(".. entry").filetype, FileType::Directory ); assert_eq!( entities.get("file1").expect("file1 entry").filetype, FileType::RegularFile ); } fn run<F: std::future::Future>(future: F) -> F::Output { use std::pin::Pin; use std::task::{Context, Poll, RawWaker, RawWakerVTable, Waker}; let mut f = Pin::from(Box::new(future)); let waker = dummy_waker(); let mut cx = Context::from_waker(&waker); match f.as_mut().poll(&mut cx) { Poll::Ready(val) => return val, Poll::Pending => { panic!("Cannot wait on pending future: must enable wiggle \"async\" future and execute on an async Store") } } fn dummy_waker() -> Waker { return unsafe { Waker::from_raw(clone(5 as *const _)) }; unsafe fn clone(ptr: *const ()) -> RawWaker { assert_eq!(ptr as usize, 5); const VTABLE: RawWakerVTable = RawWakerVTable::new(clone, wake, wake_by_ref, drop); RawWaker::new(ptr, &VTABLE) } unsafe fn wake(ptr: *const ()) { assert_eq!(ptr as usize, 5); } unsafe fn wake_by_ref(ptr: *const ()) { assert_eq!(ptr as usize, 5); } unsafe fn drop(ptr: *const ()) { assert_eq!(ptr as usize, 5); } } } }
Ok(Filestat { device_id: meta.dev(), inode: meta.ino(), filetype: filetype_from(&meta.file_type()), nlink: meta.nlink(), size: meta.len(), atim: meta.accessed().map(|t| Some(t.into_std())).unwrap_or(None), mtim: meta.modified().map(|t| Some(t.into_std())).unwrap_or(None), ctim: meta.created().map(|t| Some(t.into_std())).unwrap_or(None), })
call_expression
[ { "content": "/// Generates all the Rust source files used in Cranelift from the meta-language.\n\npub fn generate(isas: &[isa::Isa], out_dir: &str, crate_dir: &Path) -> Result<(), error::Error> {\n\n // Create all the definitions:\n\n // - common definitions.\n\n let mut shared_defs = shared::define()...
Rust
src/format/stm/load.rs
cmatsuoka/oxdz
0b7371bf63967819315316a58629881b2169f570
use format::{ProbeInfo, Format, Loader}; use format::stm::{StmData, StmPatterns, StmInstrument}; use module::{Module, Sample}; use module::sample::SampleType; use util::BinaryRead; use ::*; pub struct StmLoader; impl Loader for StmLoader { fn name(&self) -> &'static str { "Scream Tracker 2" } fn probe(&self, b: &[u8], player_id: &str) -> Result<ProbeInfo, Error> { if b.len() < 1084 { return Err(Error::Format(format!("file too short ({})", b.len()))); } player::check_accepted(player_id, "stm")?; let magic = b.read_string(20, 10)?; if magic == "!Scream!\x1a\x02" || magic == "BMOD2STM\x1a\x02" || magic == "WUZAMOD!\x1a\x02" || magic == "SWavePro\x1a\x02" { Ok(ProbeInfo{format: Format::Stm, title: b.read_string(0, 20)?}) } else { Err(Error::Format(format!("bad magic {:?}", magic))) } } fn load(self: Box<Self>, b: &[u8], info: ProbeInfo) -> Result<Module, Error> { if info.format != Format::Stm { return Err(Error::Format("unsupported format".to_owned())); } let name = b.read_string(0, 20)?; let version_major = b.read8(30)?; let version_minor = b.read8(31)?; if version_major != 2 || version_minor < 21 { return Err(Error::Format(format!("unsupported version {}.{}", version_major, version_minor))); } let speed = b.read8(32)?; let num_patterns = b.read8(33)?; let global_vol = b.read8(34)?; let origin = b.read_string(20, 8)?; let mut instruments = Vec::<StmInstrument>::new(); let mut samples = Vec::<Sample>::new(); for i in 0..31 { let ins = load_instrument(b, i)?; instruments.push(ins); } let orders = b.slice(1040, 128)?; let patterns = StmPatterns::from_slice(num_patterns as usize, b.slice(1168, 1024*num_patterns as usize)?)?; let mut ofs = 1168 + 1024*num_patterns as usize; for i in 0..31 { let size = instruments[i].size as usize; let smp = load_sample(b.slice(ofs, size)?, ofs, i, &instruments[i]); samples.push(smp); ofs += size; } let mut data = StmData{ name, speed, num_patterns, global_vol, instruments, orders: [0; 128], patterns, samples, }; data.orders.copy_from_slice(orders); let m = Module { format_id : "stm", description: format!("Scream Tracker 2 STM"), creator : match origin.as_ref() { "!Scream!" => format!("Scream Tracker {}.{}", version_major, version_minor), "BMOD2STM" => "BMOD2STM".to_owned(), "WUZAMOD!" => "WUZAMOD".to_owned(), "SWavePro" => "SWavePro".to_owned(), _ => "unknown".to_owned(), }, channels : 4, player : "st2", data : Box::new(data), }; Ok(m) } } fn load_instrument(b: &[u8], i: usize) -> Result<StmInstrument, Error> { let mut ins = StmInstrument::new(); let ofs = 48 + i * 32; ins.name = b.read_string(ofs, 12)?; ins.size = b.read16l(ofs + 16)?; ins.loop_start = b.read16l(ofs + 18)?; ins.loop_end = b.read16l(ofs + 20)?; ins.volume = b.read8(ofs + 22)?; ins.c2spd = b.read16l(ofs + 24)?; Ok(ins) } fn load_sample(b: &[u8], ofs: usize, i: usize, ins: &StmInstrument) -> Sample { let mut smp = Sample::new(); smp.num = i + 1; smp.address = ofs as u32; smp.name = ins.name.to_owned(); smp.rate = ins.c2spd as f64 / 8448.0; smp.size = ins.size as u32; if smp.size > 0 { smp.sample_type = SampleType::Sample8; } smp.store(b); smp }
use format::{ProbeInfo, Format, Loader}; use format::stm::{StmData, StmPatterns, StmInstrument}; use module::{Module, Sample}; use module::sample::SampleType; use util::BinaryRead; use ::*; pub struct StmLoader; impl Loader for StmLoader { fn name(&self) -> &'static str { "Scream Tracker 2" } fn probe(&self, b: &[u8], player_id: &str) -> Result<ProbeInfo, Error> { if b.len() < 1084 { return Err(Error::Format(format!("file too short ({})", b.len()))); } player::check_accepted(player_id, "stm")?; let magic = b.read_string(20, 10)?; if magic == "!Scream!\x1a\x02" || magic == "BMOD2STM\x1a\x02" || magic == "WUZAMOD!\x1a\x02" || magic == "SWavePro\x1a\x02" { Ok(ProbeInfo{format: Format::Stm, title: b.read_string(0, 20)?}) } else { Err(Error::Format(format!("bad magic {:?}", magic))) } } fn load(self: Box<Self>, b: &[u8], info: ProbeInfo) -> Result<Module, Error> { if info.format != Format::Stm { return Err(Error::Format("unsupported format".to_owned())); } let name = b.read_string(0, 20)?; let version_major = b.read8(30)?;
mPatterns::from_slice(num_patterns as usize, b.slice(1168, 1024*num_patterns as usize)?)?; let mut ofs = 1168 + 1024*num_patterns as usize; for i in 0..31 { let size = instruments[i].size as usize; let smp = load_sample(b.slice(ofs, size)?, ofs, i, &instruments[i]); samples.push(smp); ofs += size; } let mut data = StmData{ name, speed, num_patterns, global_vol, instruments, orders: [0; 128], patterns, samples, }; data.orders.copy_from_slice(orders); let m = Module { format_id : "stm", description: format!("Scream Tracker 2 STM"), creator : match origin.as_ref() { "!Scream!" => format!("Scream Tracker {}.{}", version_major, version_minor), "BMOD2STM" => "BMOD2STM".to_owned(), "WUZAMOD!" => "WUZAMOD".to_owned(), "SWavePro" => "SWavePro".to_owned(), _ => "unknown".to_owned(), }, channels : 4, player : "st2", data : Box::new(data), }; Ok(m) } } fn load_instrument(b: &[u8], i: usize) -> Result<StmInstrument, Error> { let mut ins = StmInstrument::new(); let ofs = 48 + i * 32; ins.name = b.read_string(ofs, 12)?; ins.size = b.read16l(ofs + 16)?; ins.loop_start = b.read16l(ofs + 18)?; ins.loop_end = b.read16l(ofs + 20)?; ins.volume = b.read8(ofs + 22)?; ins.c2spd = b.read16l(ofs + 24)?; Ok(ins) } fn load_sample(b: &[u8], ofs: usize, i: usize, ins: &StmInstrument) -> Sample { let mut smp = Sample::new(); smp.num = i + 1; smp.address = ofs as u32; smp.name = ins.name.to_owned(); smp.rate = ins.c2spd as f64 / 8448.0; smp.size = ins.size as u32; if smp.size > 0 { smp.sample_type = SampleType::Sample8; } smp.store(b); smp }
let version_minor = b.read8(31)?; if version_major != 2 || version_minor < 21 { return Err(Error::Format(format!("unsupported version {}.{}", version_major, version_minor))); } let speed = b.read8(32)?; let num_patterns = b.read8(33)?; let global_vol = b.read8(34)?; let origin = b.read_string(20, 8)?; let mut instruments = Vec::<StmInstrument>::new(); let mut samples = Vec::<Sample>::new(); for i in 0..31 { let ins = load_instrument(b, i)?; instruments.push(ins); } let orders = b.slice(1040, 128)?; let patterns = St
function_block-random_span
[ { "content": "pub fn load(b: &[u8], player_id: &str) -> Result<Module, Error> {\n\n\n\n for f in loader_list() {\n\n debug!(\"Probing format: {}\", f.name());\n\n\n\n let info = match f.probe(b, player_id) {\n\n Ok(val) => val,\n\n Err(_) => continue,\n\n };\n\n\n\...
Rust
common/rs/src/mtc/battle/organizer.rs
OpenEmojiBattler/open-emoji-battler
c5054753525d2880602cd406837f01a8a82c7577
use crate::{ codec_types::*, mtc::battle::{common::BattleEmo, march::march}, }; use anyhow::{anyhow, ensure, Result}; use rand::{seq::SliceRandom, SeedableRng}; use rand_pcg::Pcg64Mcg; use sp_std::{cmp, prelude::*}; pub fn battle_all( board: &mtc::Board, health: &mut u8, ghost_states: &mut [mtc::GhostState], grade: u8, ghosts: &[mtc::Ghost], battle_ghost_index: u8, turn: u8, seed: u64, emo_bases: &emo::Bases, ) -> Result<Option<u8>> { let pre_health = *health; let pre_ghost_states = ghost_states.to_vec(); if ghost_states .iter() .filter(|s| matches!(s, mtc::GhostState::Active { health: _ })) .count() > 1 { battle_pvg_and_gvg( board, grade, health, ghosts, ghost_states, battle_ghost_index, turn, seed, emo_bases, )?; } else { let (ghost_index, ghost_state) = ghost_states .iter_mut() .enumerate() .find(|(_, s)| matches!(s, mtc::GhostState::Active { health: _ })) .ok_or_else(|| anyhow!("battle_all: invalid"))?; battle_pvg( grade, health, board, ghost_state, &ghosts[ghost_index].history, turn, seed, emo_bases, )?; }; let final_place = calc_final_place(*health, pre_health, ghost_states, &pre_ghost_states); Ok(final_place) } pub fn march_pvg( board: &mtc::Board, ghost_board: &mtc::GhostBoard, seed: u64, emo_bases: &emo::Bases, ) -> Result<(u8, u8, mtc::battle::Logs)> { march( build_battle_emos_from_board(board, emo_bases)?, build_battle_emos_from_ghost_board(ghost_board, emo_bases)?, seed, emo_bases, ) } pub fn march_gvg( ghost_board0: &mtc::GhostBoard, ghost_board1: &mtc::GhostBoard, seed: u64, emo_bases: &emo::Bases, ) -> Result<(u8, u8, mtc::battle::Logs)> { march( build_battle_emos_from_ghost_board(ghost_board0, emo_bases)?, build_battle_emos_from_ghost_board(ghost_board1, emo_bases)?, seed, emo_bases, ) } static EMPTY_GRADE_AND_GHOST_BOARD: mtc::GradeAndGhostBoard = mtc::GradeAndGhostBoard { grade: 1, board: mtc::GhostBoard(vec![]), }; pub fn get_grade_and_ghost_board<'a>( grade_and_ghost_boards: &'a [mtc::GradeAndGhostBoard], state: &mtc::GhostState, turn: u8, ) -> &'a mtc::GradeAndGhostBoard { let hist_len = grade_and_ghost_boards.len() as u8; if hist_len == 0 { &EMPTY_GRADE_AND_GHOST_BOARD } else { let effective_turn = if let mtc::GhostState::Retired { final_turn } = state { cmp::min(turn, *final_turn) } else { turn }; if hist_len >= effective_turn { &grade_and_ghost_boards[effective_turn as usize - 1] } else { &grade_and_ghost_boards[hist_len as usize - 1] } } } pub fn select_battle_ghost_index( states: &[mtc::GhostState], previous_index: u8, seed: u64, ) -> Result<u8> { let live_indexes = states .iter() .zip(0u8..) .filter(|(s, _)| matches!(s, mtc::GhostState::Active { health: _ })) .map(|(_, i)| i) .collect::<Vec<_>>(); let len = live_indexes.len(); ensure!(len != 0, "select_battle_ghost_index: live zero"); if len == 1 { return Ok(live_indexes[0]); } let mut rng = Pcg64Mcg::seed_from_u64(seed); live_indexes .into_iter() .filter(|&i| i != previous_index) .collect::<Vec<_>>() .choose(&mut rng) .copied() .ok_or_else(|| anyhow!("choose failed")) } fn build_battle_emos_from_board( board: &mtc::Board, emo_bases: &emo::Bases, ) -> Result<Vec<BattleEmo>> { let mut emos = Vec::with_capacity(board.0.len()); for emo in board.0.iter() { emos.push(BattleEmo::new_with_attributes( emo_bases.find(emo.base_id)?, emo.attributes.clone(), )); } Ok(emos) } fn build_battle_emos_from_ghost_board( ghost_board: &mtc::GhostBoard, emo_bases: &emo::Bases, ) -> Result<Vec<BattleEmo>> { let mut emos = Vec::with_capacity(ghost_board.0.len()); for emo in ghost_board.0.iter() { emos.push(BattleEmo::new_with_attributes( emo_bases.find(emo.base_id)?, emo.attributes.clone(), )); } Ok(emos) } fn calc_final_place( health: u8, pre_health: u8, ghost_states: &[mtc::GhostState], pre_ghost_states: &[mtc::GhostState], ) -> Option<u8> { let are_all_ghosts_retired = ghost_states .iter() .all(|s| matches!(s, mtc::GhostState::Retired { final_turn: _ })); if health == 0 || are_all_ghosts_retired { if are_all_ghosts_retired { Some(1) } else { let mut place = 1; for (i, ghost_state) in ghost_states.iter().enumerate() { if let mtc::GhostState::Active { health: _ } = ghost_state { place += 1; continue; } if let mtc::GhostState::Active { health: g_health } = pre_ghost_states[i] { if g_health > pre_health { place += 1; continue; } } } Some(place) } } else { None } } struct GhostSet<'a> { index: u8, ghost: mtc::Ghost, state: &'a mut mtc::GhostState, } fn battle_pvg_and_gvg( board: &mtc::Board, grade: u8, health: &mut u8, ghosts: &[mtc::Ghost], ghost_states: &mut [mtc::GhostState], battle_ghost_index: u8, turn: u8, seed: u64, emo_bases: &emo::Bases, ) -> Result<()> { let mut ghost_sets = ghosts .iter() .zip(ghost_states.iter_mut()) .enumerate() .map(|(i, (g, gs))| GhostSet { index: i as u8, ghost: g.clone(), state: gs, }) .collect::<Vec<GhostSet>>(); if battle_ghost_index != 0 { ghost_sets.swap(0, battle_ghost_index as usize); } let (ghost_set0, gs) = ghost_sets .split_first_mut() .ok_or_else(|| anyhow!("failed to split ghost_sets"))?; let (ghost_set1, gs) = gs .split_first_mut() .ok_or_else(|| anyhow!("failed to split ghost_sets"))?; let (ghost_set2, _) = gs .split_first_mut() .ok_or_else(|| anyhow!("failed to split ghost_sets"))?; battle_pvg( grade, health, board, ghost_set0.state, &ghost_set0.ghost.history, turn, seed, emo_bases, )?; battle_gvg( ghost_set1.state, &ghost_set1.ghost.history, ghost_set2.state, &ghost_set2.ghost.history, turn, seed, emo_bases, )?; ghost_sets.sort_unstable_by_key(|g| g.index); Ok(()) } fn damage_ghost_health(board_grade: u8, grade: u8, ghost_state: &mut mtc::GhostState, turn: u8) { if let mtc::GhostState::Active { ref mut health } = ghost_state { damage_health(board_grade, grade, health); if *health == 0 { *ghost_state = mtc::GhostState::Retired { final_turn: turn }; } } } fn damage_player_health(board_grade: u8, grade: u8, health: &mut u8) { damage_health(board_grade, grade, health) } fn damage_health(board_grade: u8, grade: u8, health: &mut u8) { if board_grade > 0 { *health = health.saturating_sub(board_grade + grade); } } fn battle_pvg( grade: u8, health: &mut u8, board: &mtc::Board, ghost_state: &mut mtc::GhostState, ghost_history: &[mtc::GradeAndGhostBoard], turn: u8, seed: u64, emo_bases: &emo::Bases, ) -> Result<()> { let ghost_grade_and_ghost_board = get_grade_and_ghost_board(ghost_history, ghost_state, turn); let (player_board_grade, ghost_board_grade, _) = march_pvg(board, &ghost_grade_and_ghost_board.board, seed, emo_bases)?; damage_ghost_health(player_board_grade, grade, ghost_state, turn); damage_player_health(ghost_board_grade, ghost_grade_and_ghost_board.grade, health); Ok(()) } fn battle_gvg( ghost0_state: &mut mtc::GhostState, ghost0_history: &[mtc::GradeAndGhostBoard], ghost1_state: &mut mtc::GhostState, ghost1_history: &[mtc::GradeAndGhostBoard], turn: u8, seed: u64, emo_bases: &emo::Bases, ) -> Result<()> { let ghost0_grade_and_ghost_board = get_grade_and_ghost_board(ghost0_history, ghost0_state, turn); let ghost1_grade_and_ghost_board = get_grade_and_ghost_board(ghost1_history, ghost1_state, turn); let (ghost0_board_grade, ghost1_board_grade, _) = march_gvg( &ghost0_grade_and_ghost_board.board, &ghost1_grade_and_ghost_board.board, seed, emo_bases, )?; damage_ghost_health( ghost1_board_grade, ghost1_grade_and_ghost_board.grade, ghost0_state, turn, ); damage_ghost_health( ghost0_board_grade, ghost0_grade_and_ghost_board.grade, ghost1_state, turn, ); Ok(()) }
use crate::{ codec_types::*, mtc::battle::{common::BattleEmo, march::march}, }; use anyhow::{anyhow, ensure, Result}; use rand::{seq::SliceRandom, SeedableRng}; use rand_pcg::Pcg64Mcg; use sp_std::{cmp, prelude::*}; pub fn battle_all( board: &mtc::Board, health: &mut u8, ghost_states: &mut [mtc::GhostState], grade: u8, ghosts: &[mtc::Ghost], battle_ghost_index: u8, turn: u8, seed: u64, emo_bases: &emo::Bases, ) -> Result<Option<u8>> { let pre_health = *health; let pre_ghost_states = ghost_states.to_vec(); if ghost_states .iter() .filter(|s| matches!(s, mtc::GhostState::Active { health: _ })) .count() > 1 { battle_pvg_and_gvg( board, grade, health, ghosts, ghost_states, battle_ghost_index, turn, seed, emo_bases, )?; } else { let (ghost_index, ghost_state) = ghost_states .iter_mut() .enumerate() .find(|(_, s)| matches!(s, mtc::GhostState::Active { health: _ })) .ok_or_else(|| anyhow!("battle_all: invalid"))?; battle_pvg( grade, health, board, ghost_state, &ghosts[ghost_index].history, turn, seed, emo_bases, )?; }; let final_place = calc_final_place(*health, pre_health, ghost_states, &pre_ghost_states); Ok(final_place) } pub fn march_pvg( board: &mtc::Board, ghost_board: &mtc::GhostBoard, seed: u64, emo_bases: &emo::Bases, ) -> Result<(u8, u8, mtc::battle::Logs)> { march( build_battle_emos_from_board(board, emo_bases)?, build_battle_emos_from_ghost_board(ghost_board, emo_bases)?, seed, emo_bases, ) } pub fn march_gvg( ghost_board0: &mtc::GhostBoard, ghost_board1: &mtc::GhostBoard, seed: u64, emo_bases: &emo::Bases, ) -> Result<(u8, u8, mtc::battle::Logs)> { march( build_battle_emos_from_ghost_board(ghost_board0, emo_bases)?, build_battle_emos_from_ghost_board(ghost_board1, emo_bases)?, seed, emo_bases, ) } static EMPTY_GRADE_AND_GHOST_BOARD: mtc::GradeAndGhostBoard = mtc::GradeAndGhostBoard { grade: 1, board: mtc::GhostBoard(vec![]), }; pub fn get_grade_and_ghost_board<'a>( grade_and_ghost_boards: &'a [mtc::GradeAndGhostBoard], state: &mtc::GhostState, turn: u8, ) -> &'a mtc::GradeAndGhostBoard { let hist_len = grade_and_ghost_boards.len() as u8; if hist_len == 0 { &EMPTY_GRADE_AND_GHOST_BOARD } else { let effective_turn = if let mtc::GhostState::Retired { final_turn } = state { cmp::min(turn, *final_turn) } else { turn }; if hist_len >= effective_turn { &grade_and_ghost_boards[effective_turn as usize - 1] } else { &grade_and_ghost_boards[hist_len as usize - 1] } } } pub fn select_battle_ghost_index( states: &[mtc::GhostState], previous_index: u8, seed: u64, ) -> Result<u8> { let live_indexes = states .iter() .zip(0u8..) .filter(|(s, _)| matches!(s, mtc::GhostState::Active { health: _ })) .map(|(_, i)| i) .collect::<Vec<_>>(); let len = live_indexes.len(); ensure!(len != 0, "select_battle_ghost_index: live zero"); if len == 1 { return Ok(live_indexes[0]); } let mut rng = Pcg64Mcg::seed_from_u64(seed); live_indexes .into_iter() .filter(|&i| i != previous_index) .collect::<Vec<_>>() .choose(&mut rng) .copied() .ok_or_else(|| anyhow!("choose failed")) } fn build_battle_emos_from_board( board: &mtc::Board, emo_bases: &emo::Bases, ) -> Result<Vec<BattleEmo>> { let mut emos = Vec::with_capacity(board.0.len()); for emo in board.0.iter() { emos.push(BattleEmo::new_with_attributes( emo_bases.find(emo.base_id)?, emo.attributes.clone(), )); } Ok(emos) } fn build_battle_emos_from_ghost_board( ghost_board: &mtc::GhostBoard, emo_bases: &emo::Bases, ) -> Result<Vec<BattleEmo>> { let mut emos = Vec::with_capacity(ghost_board.0.len()); for emo in ghost_board.0.iter() { emos.push(BattleEmo::new_with_attributes( emo_bases.find(emo.base_id)?, emo.attributes.clone(), )); } Ok(emos) } fn calc_final_place( health: u8, pre_health: u8, ghost_states: &[mtc::GhostState], pre_ghost_states: &[mtc::GhostState], ) -> Option<u8> { let are_all_ghosts_retired = ghost_states .iter() .all(|s| matches!(s, mtc::GhostState::Retired { final_turn: _ })); if health == 0 || are_all_ghosts_retired { if are_all_ghosts_retired { Some(1) } else { let mut place = 1; for (i, ghost_state) in ghost_states.iter().enumerate() { if let mtc::GhostState::Active { health: _ } = ghost_state { place += 1; continue; } if let mtc::GhostState::Active { health: g_health } = pre_ghost_states[i] { if g_health > pre_health { place += 1; continue; } } } Some(place) } } else { None } } struct GhostSet<'a> { index: u8, ghost: mtc::Ghost, state: &'a mut mtc::GhostState, } fn battle_pvg_and_gvg( board: &mtc::Board, grade: u8, health: &mut u8, ghosts: &[mtc::Ghost], ghost_states: &mut [mtc::GhostState], battle_ghost_index: u8, turn: u8, seed: u64, emo_bases: &emo::Bases, ) -> Result<()> { let mut ghost_sets = ghosts .iter() .zip(ghost_states.iter_mut()) .enumerate() .map(|(i, (g, gs))| GhostSet { index: i as u8, ghost: g.clone(), state: gs, }) .collect::<Vec<GhostSet>>(); if battle_ghost_index != 0 { ghost_sets.swap(0, battle_ghost_index as usize); } let (ghost_set0, gs) = ghost_sets .split_first_mut() .ok_or_else(|| anyhow!("failed to split ghost_sets"))?; let (ghost_set1, gs) = gs .split_first_mut() .ok_or_else(|| anyhow!("failed to split ghost_sets"))?; let (ghost_set2, _) = gs .split_first_mut() .ok_or_else(|| anyhow!("failed to split ghost_sets"))?; battle_pvg( grade, health, board, ghost_set0.state, &ghost_set0.ghost.history, turn, seed, emo_bases, )?; battle_gvg( ghost_set1.state, &ghost_set1.ghost.history, ghost_set2.state, &ghost_set2.ghost.history, turn, seed, emo_bases, )?; ghost_sets.sort_unstable_by_key(|g| g.index); Ok(()) } fn damage_ghost_health(board_grade: u8, grade: u8, ghost_state: &mut mtc::GhostState, turn: u8) { if let mtc::GhostState::Active { ref mut health } = ghost_state { damage_health(board_grade, grade, health); if *health == 0 { *ghost_state = mtc::GhostState::Retired { final_turn: turn }; } } } fn damage_player_health(board_grade: u8, grade: u8, health: &mut u8) { damage_health(board_grade, grade, health) } fn damage_health(board_grade: u8, grade: u8, health: &mut u8) { if board_grade > 0 { *health = health.saturating_sub(board_grade + grade); } } fn battle_pvg( grade: u8, health: &mut u8, board: &mtc::Board, ghost_state: &mut mtc::GhostState, ghost_history: &[mtc::GradeAndGhostBoard],
fn battle_gvg( ghost0_state: &mut mtc::GhostState, ghost0_history: &[mtc::GradeAndGhostBoard], ghost1_state: &mut mtc::GhostState, ghost1_history: &[mtc::GradeAndGhostBoard], turn: u8, seed: u64, emo_bases: &emo::Bases, ) -> Result<()> { let ghost0_grade_and_ghost_board = get_grade_and_ghost_board(ghost0_history, ghost0_state, turn); let ghost1_grade_and_ghost_board = get_grade_and_ghost_board(ghost1_history, ghost1_state, turn); let (ghost0_board_grade, ghost1_board_grade, _) = march_gvg( &ghost0_grade_and_ghost_board.board, &ghost1_grade_and_ghost_board.board, seed, emo_bases, )?; damage_ghost_health( ghost1_board_grade, ghost1_grade_and_ghost_board.grade, ghost0_state, turn, ); damage_ghost_health( ghost0_board_grade, ghost0_grade_and_ghost_board.grade, ghost1_state, turn, ); Ok(()) }
turn: u8, seed: u64, emo_bases: &emo::Bases, ) -> Result<()> { let ghost_grade_and_ghost_board = get_grade_and_ghost_board(ghost_history, ghost_state, turn); let (player_board_grade, ghost_board_grade, _) = march_pvg(board, &ghost_grade_and_ghost_board.board, seed, emo_bases)?; damage_ghost_health(player_board_grade, grade, ghost_state, turn); damage_player_health(ghost_board_grade, ghost_grade_and_ghost_board.grade, health); Ok(()) }
function_block-function_prefix_line
[ { "content": "#[wasm_bindgen]\n\npub fn march_pvg(board: &[u8], ghost_board: &[u8], seed: &str, emo_bases: &[u8]) -> Vec<u8> {\n\n mtc::battle::organizer::march_pvg(\n\n &mtc::decoders::decode_board(board),\n\n &mtc::decoders::decode_ghost_board(ghost_board),\n\n seed.parse().unwrap(),\n...