text
stringlengths
8
4.13M
//! Hooks which can be execute by the native os shell. use std::io::{BufRead as _, BufReader}; use std::path::Path; use std::process::{Command, Stdio}; use color_eyre::eyre::Result; use serde::{Deserialize, Serialize}; use thiserror::Error; /// An enum of errors which can occur during the execution of a [`Hook`]. #[derive(Error, Debug)] pub enum HookError { /// An [`std::io::Error`] which occurred during the execution of a hook. #[error("IO Error")] IoError(#[from] std::io::Error), /// The hook failed to execute successfully. #[error("Process failed with status `{0}`")] ExitStatusError(std::process::ExitStatus), } impl From<std::process::ExitStatus> for HookError { fn from(value: std::process::ExitStatus) -> Self { Self::ExitStatusError(value) } } // TODO: Replace once `exit_ok` becomes stable /// Maps a value to an Result. This is mainly used as a replacement for /// [`std::process::ExitStatus::exit_ok`] until it becomes stable. trait ExitOk { /// Error type of the returned result. type Error; /// Converts `self` to an result. fn exit_ok(self) -> Result<(), Self::Error>; } impl ExitOk for std::process::ExitStatus { type Error = HookError; fn exit_ok(self) -> Result<(), <Self as ExitOk>::Error> { if self.success() { Ok(()) } else { Err(self.into()) } } } /// Implements the `Hook` trait, which is used to run a command after or before a build. #[derive(Default, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] pub struct Hook(String); impl Hook { /// Creates a new Hook for the given command. The command must be executable by the native shell. pub fn new<S: Into<String>>(command: S) -> Self { Self(command.into()) } /// Runs the hook command. pub fn command(&self) -> &str { &self.0 } /// Executes the hook command. pub fn execute(&self, cwd: &Path) -> Result<()> { let mut child = self .prepare_command()? .current_dir(cwd) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn()?; // No need to call kill here as the program will immediately exit // and thereby kill all spawned children let stdout = child.stdout.take().expect("Failed to get stdout from hook"); for line in BufReader::new(stdout).lines() { match line { Ok(line) => log::info!("hook::stdout > {}", line), Err(err) => { // Result is explicitly ignored as an error was already // encountered let _ = child.kill(); return Err(err.into()); } } } // No need to call kill here as the program will immediately exit // and thereby kill all spawned children let stderr = child.stderr.take().expect("Failed to get stderr from hook"); for line in BufReader::new(stderr).lines() { match line { Ok(line) => log::error!("hook::stderr > {}", line), Err(err) => { // Result is explicitly ignored as an error was already // encountered let _ = child.kill(); return Err(err.into()); } } } child .wait_with_output()? .status .exit_ok() .map_err(Into::into) } /// Prepares the command for execution depending on the platform. fn prepare_command(&self) -> Result<Command> { cfg_if::cfg_if! { if #[cfg(target_family = "windows")] { let mut cmd = Command::new("cmd"); cmd.args(["/C", &self.0]); Ok(cmd) } else if #[cfg(target_family = "unix")] { let mut cmd = Command::new("sh"); cmd.args(["-c", &self.0]); Ok(cmd) } else { Err(std::io::Error::new(std::io::ErrorKind::Other, "Hooks are only supported on Windows and Unix-based systems")) } } } }
use crate::code_generator::intermediate::{Access, Constant, Context, Instruction, Label, UniqueVariable, Variable, VariableIndex, OperationType}; use ::virtual_machine::instruction::Instruction as VmInstruction; use std::cmp::Ordering; use std::collections::BTreeMap; #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] struct MemoryLocation(pub u64); impl std::ops::Add<u64> for MemoryLocation { type Output = MemoryLocation; fn add(self, rhs: u64) -> Self::Output { MemoryLocation(self.0 + rhs) } } impl std::ops::AddAssign<u64> for MemoryLocation { fn add_assign(&mut self, rhs: u64) { self.0 += rhs } } type MemoryStorage = BTreeMap<VariableIndex, (MemoryLocation, Option<i64>)>; #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] struct MemoryRange(MemoryLocation, MemoryLocation); impl MemoryRange { pub fn new(start: u64, end: u64) -> Self { MemoryRange(MemoryLocation(start), MemoryLocation(end)) } } #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] struct Segments { arrays: Option<MemoryRange>, variables: Option<MemoryRange>, temporaries: Option<MemoryRange>, } #[derive(Debug)] struct Memory { storage: MemoryStorage, segments: Segments, } impl Memory { pub fn new() -> Self { Memory { storage: MemoryStorage::new(), segments: Segments { arrays: None, variables: None, temporaries: None, }, } } fn add_variable(&mut self, index: VariableIndex, value: Option<i64>) -> MemoryLocation { let last = if let Some(MemoryRange(_, ref mut end)) = self.segments.variables { *end += 1; *end } else { let last = self .segments .arrays .as_ref() .map_or(MemoryLocation(0), |s| s.1) + 1; self.segments.variables = Some(MemoryRange(last, last)); last }; self.storage.insert(index, (last, value)); last } fn get_location(&self, index: VariableIndex) -> MemoryLocation { self.storage[&index].0 } } fn compare_variables(a: &UniqueVariable, b: &UniqueVariable) -> Ordering { match (a.variable(), b.variable()) { (v1 @ Variable::Array { .. }, v2 @ Variable::Array { .. }) => v1.size().cmp(&v2.size()), (Variable::Unit { .. }, Variable::Unit { .. }) => Ordering::Equal, (Variable::Array { .. }, Variable::Unit { .. }) => Ordering::Less, (Variable::Unit { .. }, Variable::Array { .. }) => Ordering::Greater, } } struct InstructionManager { target_instructions: Vec<VmInstruction>, label_positions: BTreeMap<Label, u64>, back_patches_list: BTreeMap<Label, Vec<usize>>, } #[allow(non_snake_case)] impl InstructionManager { fn fix_label(&mut self, instruction_ptr: usize, target_pointer: u64) { match self.target_instructions[instruction_ptr] { VmInstruction::Jump(ref mut target) | VmInstruction::Jpos(ref mut target) | VmInstruction::Jneg(ref mut target) | VmInstruction::Jzero(ref mut target) => { *target = target_pointer; } _ => unreachable!(), } } fn translate_jump<F: FnOnce(u64) -> VmInstruction>(&mut self, label: &Label, create: F) { if let Some(pos) = self.label_positions.get(label) { self.target_instructions.push(create(*pos)); } else { let pos = self.target_instructions.len(); self.back_patches_list.entry(*label).or_default().push(pos); self.target_instructions.push(create(u64::max_value())); } } fn translate_label(&mut self, label: &Label) { let target = self.target_instructions.len() as u64; self.label_positions.insert(*label, target); if let Some(backlist) = self.back_patches_list.remove(&label) { for pos in backlist { self.fix_label(pos, target); } } } fn instr_Get(&mut self) { self.target_instructions.push(VmInstruction::Get); } fn instr_Put(&mut self) { self.target_instructions.push(VmInstruction::Put); } fn instr_Load(&mut self, operand: MemoryLocation) { self.target_instructions .push(VmInstruction::Load(operand.0)); } fn instr_Loadi(&mut self, operand: MemoryLocation) { self.target_instructions .push(VmInstruction::Loadi(operand.0)); } fn instr_Store(&mut self, operand: MemoryLocation) { self.target_instructions .push(VmInstruction::Store(operand.0)); } fn instr_Storei(&mut self, operand: MemoryLocation) { self.target_instructions .push(VmInstruction::Storei(operand.0)); } fn instr_Add(&mut self, operand: MemoryLocation) { self.target_instructions.push(VmInstruction::Add(operand.0)); } fn instr_Sub(&mut self, operand: MemoryLocation) { self.target_instructions.push(VmInstruction::Sub(operand.0)); } fn instr_Shift(&mut self, operand: MemoryLocation) { self.target_instructions .push(VmInstruction::Shift(operand.0)); } fn instr_Inc(&mut self) { self.target_instructions.push(VmInstruction::Inc); } fn instr_Dec(&mut self) { self.target_instructions.push(VmInstruction::Dec); } fn instr_Halt(&mut self) { self.target_instructions.push(VmInstruction::Halt); } } pub struct Generator { context: Context, memory: Memory, instruction_manager: InstructionManager, } #[allow(dead_code)] impl Generator { pub fn new(context: Context) -> Self { let cap = context.instructions().len() * 4; Generator { context, memory: Memory::new(), instruction_manager: InstructionManager { target_instructions: Vec::with_capacity(cap), label_positions: BTreeMap::new(), back_patches_list: BTreeMap::new(), }, } } fn allocate_memory(&mut self) { if self.context.variables().is_empty() { return; } let mut variables: Vec<_> = self.context.variables().iter().map(|v| v).collect(); variables.sort_unstable_by(|&a, &b| compare_variables(a, b)); let middle = variables .binary_search_by(|&a| match a.variable() { Variable::Array { .. } => Ordering::Less, Variable::Unit { .. } => Ordering::Greater, }) .expect_err("incorrect ordering function"); let iter = if middle > 0 { let arrays_segment_end: usize = variables .iter() .take(middle) .map(|&arr| arr.variable().size()) .sum(); self.memory.segments.arrays = Some(MemoryRange::new(1, arrays_segment_end as u64)); let mut iter = variables.iter(); let arrays = iter.by_ref().take(middle); let array_base_indexes = arrays.scan(1, |first, &arr| { let start_index = *first; *first += arr.variable().size(); if let Variable::Array { start, .. } = arr.variable() { Some((arr, start_index as i64 - *start)) } else { panic!("incorrect variable order"); } }); for (arr, base_index) in array_base_indexes { self.memory.add_variable(arr.id(), Some(base_index)); } iter } else { variables.iter() }; for &var in iter { self.memory.add_variable(var.id(), None); } } fn get_constant_location(&self, value: i64) -> MemoryLocation { let ind = self.context.get_constant_index(&Constant(value)); self.memory .storage .get(&ind) .expect(format!("constant {} has not been generated", value).as_str()) .0 } fn generate_constant(&mut self, value: i64, location: MemoryLocation) { let abs = value.abs() as u64; if abs < 10 { let (grow_instr, shrink_instr) = if value.is_positive() { (VmInstruction::Inc, VmInstruction::Dec) } else { (VmInstruction::Dec, VmInstruction::Inc) }; for _ in 0..value.abs() { self.instruction_manager .target_instructions .push(grow_instr); } self.instruction_manager.instr_Store(location); for _ in 0..value.abs() { self.instruction_manager .target_instructions .push(shrink_instr); } } else { let leading_zeros = abs.leading_zeros(); let mut abs = abs.reverse_bits(); let one_const = self.get_constant_location(1); let grow_instr = if value.is_positive() { VmInstruction::Inc } else { VmInstruction::Dec }; while abs & 1 == 0 { abs >>= 1; } for _ in 0..(64 - leading_zeros - 1) { if abs & 1 == 1 { self.instruction_manager .target_instructions .push(grow_instr); } self.instruction_manager.instr_Shift(one_const); abs >>= 1; } if abs & 1 == 1 { self.instruction_manager .target_instructions .push(grow_instr); } self.instruction_manager.instr_Store(location); self.instruction_manager.instr_Sub(MemoryLocation(0)); } } fn generate_constants(&mut self) { for (constant, index) in self.context.constants() { let value = self .memory .storage .get_mut(index) .expect("constant not in memory"); value.1 = Some(constant.value()); } let mut to_generate: Vec<_> = self .memory .storage .iter() .filter_map(|(_, &(loc, val))| val.map(|val| (loc, val))) .collect(); to_generate.sort_unstable_by(|(_, val1), (_, val2)| { let cmp = val1.abs().cmp(&val2.abs()); if cmp == Ordering::Equal { val1.cmp(val2) } else { cmp } }); if cfg!(debug_assertions) { println!("generating constants: {:?}", to_generate); } self.instruction_manager.instr_Sub(MemoryLocation(0)); for (loc, val) in to_generate { self.generate_constant(val, loc); } } fn register_temp(&mut self, name: &str) -> VariableIndex { self.context.add_variable(Variable::Unit { name: String::from("tmp$") + name, }) } fn get_or_register_temp(&mut self, name: &str) -> MemoryLocation { let location = self .context .find_variable_by_name((String::from("tmp$") + name).as_str()) .map(|v| self.memory.get_location(v.id())) .unwrap_or_else(|| { let new_ind = self.register_temp(name); self.memory.add_variable(new_ind, None) }); location } fn translate_load_zero(&mut self) { self.instruction_manager.instr_Sub(MemoryLocation(0)); } fn translate_optimized_multiplication(&mut self, left: &Access, right: &Access) -> bool { match (left, right) { (Access::Constant(c), other) | (other, Access::Constant(c)) => { match c.value() { 0 => { self.instruction_manager.instr_Sub(MemoryLocation(0)); true }, 1 => { self.translate_load_access(other); true }, -1 => { self.translate_load_access(other); self.translate_neg_tmp(); true } 2 => { self.translate_load_access(other); self.instruction_manager.instr_Shift(self.get_constant_location(1)); true } -2 => { self.translate_load_access(other); self.instruction_manager.instr_Shift(self.get_constant_location(1)); self.translate_neg_tmp(); true } // n if (n.abs() as u64).is_power_of_two() => {} _ => false } } _ => false, } } fn translate_optimized_div_mod(&mut self, left: &Access, right: &Access, div: bool) -> bool { match (left, right) { (Access::Constant(c1), Access::Constant(c2)) => { if div { let v1 = c1.value(); let v2 = c2.value(); match (v1.signum(), v2.signum()) { (0, _) | (_, 0) => { self.translate_load_zero(); true }, _ => false } } else { false } } (Access::Constant(c), _other) => { match c.value() { 0 => { self.instruction_manager.instr_Sub(MemoryLocation(0)); true } // n if (n.abs() as u64).is_power_of_two() => {} _ => false } } (other, Access::Constant(c)) => { match c.value() { 0 => { self.translate_load_zero(); true }, 1 => { self.translate_load_access(other); true }, -1 => { self.translate_load_access(other); self.translate_neg_tmp(); true } 2 => { if div { self.translate_load_access(other); self.instruction_manager.instr_Shift(self.get_constant_location(-1)); true } else { false } } -2 => { if div { self.translate_load_access(other); self.instruction_manager.instr_Shift(self.get_constant_location(-1)); self.translate_neg_tmp(); true } else { false } } // n if (n.abs() as u64).is_power_of_two() => {} _ => false } } _ => false, } } fn translate_multiplication(&mut self, left: &Access, right: &Access) { /* if b == 0 { goto end } if b < 0 { b = -b a = -a } result = 0 while b > 0 { if lsb(b) == 1 { result += a } b >>= 1 a <<= 1 } p0 = result end: return p0 */ if self.translate_optimized_multiplication(left, right) { return; } let left_tmp = self.get_or_register_temp("mul_left"); let right_tmp = self.get_or_register_temp("mul_right"); let tmp = self.get_or_register_temp("1"); let result = self.get_or_register_temp("mul_result"); let const_1 = self.get_constant_location(1); let const_neg_1 = self.get_constant_location(-1); let label_start = self.context.new_label(); let label_main = self.context.new_label(); let label_step = self.context.new_label(); let label_end = self.context.new_label(); let label_real_end = self.context.new_label(); self.translate_load_access(left); self.instruction_manager.instr_Store(left_tmp); self.translate_load_access(right); self.instruction_manager.instr_Store(right_tmp); self.instruction_manager .translate_jump(&label_real_end, VmInstruction::Jzero); self.instruction_manager .translate_jump(&label_start, VmInstruction::Jpos); self.translate_neg_tmp(); self.instruction_manager.instr_Store(right_tmp); self.instruction_manager.instr_Load(left_tmp); self.translate_neg(left_tmp); self.instruction_manager.instr_Store(left_tmp); self.instruction_manager.instr_Load(right_tmp); self.instruction_manager.translate_label(&label_start); self.instruction_manager.instr_Sub(MemoryLocation(0)); self.instruction_manager.instr_Store(result); self.instruction_manager.translate_label(&label_main); self.instruction_manager.instr_Load(right_tmp); self.instruction_manager.instr_Store(tmp); self.instruction_manager.instr_Shift(const_neg_1); self.instruction_manager.instr_Shift(const_1); self.instruction_manager.instr_Sub(tmp); self.instruction_manager .translate_jump(&label_step, VmInstruction::Jzero); self.instruction_manager.instr_Load(left_tmp); self.instruction_manager.instr_Add(result); self.instruction_manager.instr_Store(result); self.instruction_manager.translate_label(&label_step); self.instruction_manager.instr_Load(right_tmp); self.instruction_manager.instr_Shift(const_neg_1); self.instruction_manager .translate_jump(&label_end, VmInstruction::Jzero); self.instruction_manager.instr_Store(right_tmp); self.instruction_manager.instr_Load(left_tmp); self.instruction_manager.instr_Shift(const_1); self.instruction_manager.instr_Store(left_tmp); self.instruction_manager .translate_jump(&label_main, VmInstruction::Jump); self.instruction_manager.translate_label(&label_end); self.instruction_manager.instr_Load(result); self.instruction_manager.translate_label(&label_real_end); } fn translate_log(&mut self) { let num = self.get_or_register_temp("num"); let value = self.get_or_register_temp("value"); let const_neg_1 = self.get_constant_location(-1); let label_start = self.context.new_label(); let label_end = self.context.new_label(); self.instruction_manager.instr_Store(num); self.instruction_manager.instr_Sub(MemoryLocation(0)); self.instruction_manager.instr_Store(value); self.instruction_manager.translate_label(&label_start); self.instruction_manager.instr_Load(num); self.instruction_manager.instr_Shift(const_neg_1); self.instruction_manager.instr_Store(num); self.instruction_manager .translate_jump(&label_end, VmInstruction::Jzero); self.instruction_manager.instr_Load(value); self.instruction_manager.instr_Inc(); self.instruction_manager.instr_Store(value); self.instruction_manager .translate_jump(&label_start, VmInstruction::Jump); self.instruction_manager.translate_label(&label_end); self.instruction_manager.instr_Load(value); } fn translate_abs(&mut self, original: MemoryLocation) { let label_pos = self.context.new_label(); self.instruction_manager .translate_jump(&label_pos, VmInstruction::Jpos); self.translate_neg(original); self.instruction_manager.translate_label(&label_pos); } fn translate_abs_tmp(&mut self) { let tmp = self.get_or_register_temp("abs"); self.instruction_manager.instr_Store(tmp); self.translate_abs(tmp); } fn translate_neg(&mut self, original: MemoryLocation) { self.instruction_manager.instr_Sub(original); self.instruction_manager.instr_Sub(original); } fn translate_neg_tmp(&mut self) { let tmp = self.get_or_register_temp("neg"); self.instruction_manager.instr_Store(tmp); self.translate_neg(tmp); } fn translate_div_mod(&mut self, left: &Access, right: &Access, div: bool) { /* if(divisor == 0) return (0, 0); remain = dividend; // The left-hand side of division, i.e. what is being divided scaled_divisor = divisor; // The right-hand side of division result = 0; multiple = 1; while(scaled_divisor < dividend) { scaled_divisor = scaled_divisor + scaled_divisor; // Multiply by two. multiple = multiple + multiple; // Multiply by two. // You can also use binary shift-by-left here (i.e. multiple = multiple << 1). } do { if(remain >= scaled_divisor) { remain = remain - scaled_divisor; result = result + multiple; } scaled_divisor = scaled_divisor >> 1; // Divide by two. multiple = multiple >> 1; } while(multiple != 0); return (result, remain) */ if self.translate_optimized_div_mod(left, right, div) { return; } let label_while_condition = self.context.new_label(); let label_while_body = self.context.new_label(); // let label_after_while = self.context.new_label(); let label_after_if = self.context.new_label(); let label_do_body = self.context.new_label(); let label_after_do = self.context.new_label(); // let label_divisor_1 = self.context.new_label(); // let label_divisor_2 = self.context.new_label(); // let label_divisor_neg_1 = self.context.new_label(); // let label_divisor_neg_2 = self.context.new_label(); let label_end = self.context.new_label(); let const_1 = self.get_constant_location(1); let const_neg_1 = self.get_constant_location(-1); let original_dividend = self.get_or_register_temp("original_dividend"); let original_divisor = self.get_or_register_temp("original_divisor"); let dividend_abs = self.get_or_register_temp("dividend_abs"); let scaled_divisor = self.get_or_register_temp("scaled_divisor"); let remain = self.get_or_register_temp("remain"); let result = self.get_or_register_temp("div_result"); let multiple = self.get_or_register_temp("div_multiple"); self.translate_load_access(left); self.instruction_manager.instr_Store(original_dividend); self.translate_abs(original_dividend); self.instruction_manager.instr_Store(dividend_abs); self.instruction_manager.instr_Store(remain); self.translate_load_access(right); self.instruction_manager .translate_jump(&label_end, VmInstruction::Jzero); // self.instruction_manager.instr_Dec(); // self.instruction_manager.translate_jump(&label_divisor_1, VmInstruction::Jzero); // self.instruction_manager.instr_Dec(); // self.instruction_manager.translate_jump(&label_divisor_2, VmInstruction::Jzero); // self.instruction_manager.instr_Inc(); // self.instruction_manager.instr_Inc(); // self.instruction_manager.instr_Inc(); // self.instruction_manager.translate_jump(&label_divisor_neg_1, VmInstruction::Jzero); // self.instruction_manager.instr_Inc(); // self.instruction_manager.translate_jump(&label_divisor_neg_2, VmInstruction::Jzero); // self.instruction_manager.instr_Dec(); // self.instruction_manager.instr_Dec(); self.instruction_manager.instr_Store(original_divisor); self.translate_abs(original_divisor); self.instruction_manager.instr_Store(scaled_divisor); self.instruction_manager.instr_Sub(MemoryLocation(0)); self.instruction_manager.instr_Store(result); self.instruction_manager.instr_Inc(); self.instruction_manager.instr_Store(multiple); self.instruction_manager.instr_Load(scaled_divisor); self.instruction_manager .translate_jump(&label_while_condition, VmInstruction::Jump); self.instruction_manager.translate_label(&label_while_body); self.instruction_manager.instr_Load(multiple); self.instruction_manager.instr_Shift(const_1); self.instruction_manager.instr_Store(multiple); self.instruction_manager.instr_Load(scaled_divisor); self.instruction_manager.instr_Shift(const_1); self.instruction_manager.instr_Store(scaled_divisor); self.instruction_manager .translate_label(&label_while_condition); // assert(p0 == scaled_divisor) self.instruction_manager.instr_Sub(dividend_abs); self.instruction_manager .translate_jump(&label_while_body, VmInstruction::Jneg); // self.instruction_manager.translate_jump(&label_after_while, VmInstruction::Jump); // self.instruction_manager.translate_label(&label_after_while); self.instruction_manager.translate_label(&label_do_body); self.instruction_manager.instr_Load(remain); self.instruction_manager.instr_Sub(scaled_divisor); self.instruction_manager .translate_jump(&label_after_if, VmInstruction::Jneg); // assert(p0 == remain - scaled_divisor) self.instruction_manager.instr_Store(remain); self.instruction_manager.instr_Load(result); self.instruction_manager.instr_Add(multiple); self.instruction_manager.instr_Store(result); self.instruction_manager.translate_label(&label_after_if); self.instruction_manager.instr_Load(multiple); self.instruction_manager.instr_Shift(const_neg_1); self.instruction_manager .translate_jump(&label_after_do, VmInstruction::Jzero); self.instruction_manager.instr_Store(multiple); self.instruction_manager.instr_Load(scaled_divisor); self.instruction_manager.instr_Shift(const_neg_1); self.instruction_manager.instr_Store(scaled_divisor); self.instruction_manager .translate_jump(&label_do_body, VmInstruction::Jump); self.instruction_manager.translate_label(&label_after_do); if div { let label_remain_zero = self.context.new_label(); let label_dividend_neg = self.context.new_label(); let label_only_divisor_neg = self.context.new_label(); let label_both_neg = self.context.new_label(); self.instruction_manager.instr_Load(original_dividend); self.instruction_manager .translate_jump(&label_dividend_neg, VmInstruction::Jneg); // fallthrough // (+ / ?) self.instruction_manager.instr_Load(original_divisor); self.instruction_manager .translate_jump(&label_only_divisor_neg, VmInstruction::Jneg); // fallthrough // (+ / +) or (- / -) self.instruction_manager.translate_label(&label_both_neg); self.instruction_manager.instr_Load(result); self.instruction_manager .translate_jump(&label_end, VmInstruction::Jump); // (- / ?) self.instruction_manager .translate_label(&label_dividend_neg); self.instruction_manager.instr_Load(original_divisor); self.instruction_manager .translate_jump(&label_both_neg, VmInstruction::Jneg); // fallthrough // (+ / -) or (- / +) self.instruction_manager .translate_label(&label_only_divisor_neg); self.instruction_manager.instr_Load(remain); self.instruction_manager .translate_jump(&label_remain_zero, VmInstruction::Jzero); self.instruction_manager.instr_Load(result); self.translate_neg(result); self.instruction_manager.instr_Dec(); self.instruction_manager .translate_jump(&label_end, VmInstruction::Jump); self.instruction_manager.translate_label(&label_remain_zero); self.instruction_manager.instr_Load(result); self.translate_neg(result); self.instruction_manager .translate_jump(&label_end, VmInstruction::Jump); } else { let label_dividend_neg = self.context.new_label(); let label_only_divisor_neg = self.context.new_label(); let label_both_neg = self.context.new_label(); self.instruction_manager.instr_Load(remain); self.instruction_manager .translate_jump(&label_end, VmInstruction::Jzero); self.instruction_manager.instr_Load(original_dividend); self.instruction_manager .translate_jump(&label_dividend_neg, VmInstruction::Jneg); // fallthrough // (+ % ?) self.instruction_manager.instr_Load(original_divisor); self.instruction_manager .translate_jump(&label_only_divisor_neg, VmInstruction::Jneg); // fallthrough // (+ % +) self.instruction_manager.instr_Load(remain); self.instruction_manager .translate_jump(&label_end, VmInstruction::Jump); // (- % ?) self.instruction_manager .translate_label(&label_dividend_neg); self.instruction_manager.instr_Load(original_divisor); self.instruction_manager .translate_jump(&label_both_neg, VmInstruction::Jneg); // fallthrough // (- % +) self.instruction_manager.instr_Load(remain); self.instruction_manager.instr_Sub(original_divisor); self.translate_neg_tmp(); self.instruction_manager .translate_jump(&label_end, VmInstruction::Jump); // (+ % -) self.instruction_manager .translate_label(&label_only_divisor_neg); self.instruction_manager.instr_Load(remain); self.instruction_manager.instr_Add(original_divisor); self.instruction_manager .translate_jump(&label_end, VmInstruction::Jump); // (- % -) self.instruction_manager.translate_label(&label_both_neg); self.instruction_manager.instr_Load(remain); self.translate_neg(remain); self.instruction_manager .translate_jump(&label_end, VmInstruction::Jump); } self.instruction_manager.translate_label(&label_end); } fn translate_load_access(&mut self, access: &Access) { match access { Access::Constant(c) => { let loc = self.get_constant_location(c.value()); self.instruction_manager.instr_Load(loc); } Access::Variable(ind) => { let loc = self.memory.get_location(*ind); self.instruction_manager.instr_Load(loc); } Access::ArrayStatic(arr, c) => { let real_arr_loc = self.memory.storage[arr].1.expect("unallocated array"); self.instruction_manager .instr_Load(MemoryLocation((real_arr_loc + c.value()) as u64)); } Access::ArrayDynamic(arr, ind) => { let arr_loc = self.memory.get_location(*arr); let ind_loc = self.memory.get_location(*ind); self.instruction_manager.instr_Load(arr_loc); self.instruction_manager.instr_Add(ind_loc); self.instruction_manager.instr_Loadi(MemoryLocation(0)); } } } fn translate_store_access(&mut self, access: &Access) { match access { Access::Constant(_) => panic!("can't store into a constant"), Access::Variable(ind) => { let loc = self.memory.get_location(*ind); self.instruction_manager.instr_Store(loc); } Access::ArrayStatic(arr, c) => { let real_arr_loc = self.memory.storage[arr].1.expect("unallocated array"); self.instruction_manager .instr_Store(MemoryLocation((real_arr_loc + c.value()) as u64)); } Access::ArrayDynamic(arr, ind) => { let tmp1 = self.get_or_register_temp("store_tmp1"); self.instruction_manager.instr_Store(tmp1); let arr_loc = self.memory.get_location(*arr); let ind_loc = self.memory.get_location(*ind); self.instruction_manager.instr_Load(arr_loc); self.instruction_manager.instr_Add(ind_loc); let tmp2 = self.get_or_register_temp("store_tmp2"); self.instruction_manager.instr_Store(tmp2); self.instruction_manager.instr_Load(tmp1); self.instruction_manager.instr_Storei(tmp2); } } } fn translate_plus(&mut self, left: &Access, right: &Access) { let optimized = match (left, right) { (Access::Constant(c), other) | (other, Access::Constant(c)) => { match c.value() { 0 => { self.translate_load_access(other); true }, n if n > 0 && n <= 10 => { self.translate_load_access(other); for _ in 0..n { self.instruction_manager.instr_Inc(); } true } n if n < 0 && n >= -10 => { self.translate_load_access(other); for _ in 0..n.abs() { self.instruction_manager.instr_Dec(); } true } _ => false } } _ => false, }; if !optimized { self.translate_simple_bin_op(left, right, InstructionManager::instr_Add); } } fn translate_minus(&mut self, left: &Access, right: &Access) { let optimized = match (left, right) { (other, Access::Constant(c)) => { match c.value() { 0 => { self.translate_load_access(other); true }, n if n > 0 && n <= 10 => { self.translate_load_access(other); for _ in 0..n { self.instruction_manager.instr_Dec(); } true } n if n < 0 && n >= -10 => { self.translate_load_access(other); for _ in 0..n.abs() { self.instruction_manager.instr_Inc(); } true } _ => false } } (Access::Constant(c), other) => { match c.value() { 0 => { self.translate_load_access(other); self.translate_neg_tmp(); true } _ => false, } } _ => false, }; if !optimized { self.translate_simple_bin_op(left, right, InstructionManager::instr_Sub); } } fn translate_simple_bin_op(&mut self, left: &Access, right: &Access, op: fn(&mut InstructionManager, MemoryLocation)) { self.translate_load_access(right); let tmp = self.get_or_register_temp("bin_op"); self.instruction_manager.instr_Store(tmp); self.translate_load_access(left); op(&mut self.instruction_manager, tmp); } pub fn translate(mut self) -> Vec<VmInstruction> { let simple_constants = vec![ Constant(0), Constant(1), Constant(-1), Constant(2), Constant(-2), ]; for c in &simple_constants { self.context.register_constant(c.clone()); } self.allocate_memory(); self.generate_constants(); let ir_instructions = self.context.instructions().to_vec(); for instruction in &ir_instructions { match instruction { Instruction::Label { label } => { self.instruction_manager.translate_label(label); } Instruction::Load { access } => self.translate_load_access(access), Instruction::PreStore { access } => { match access { Access::Constant(_) | Access::Variable(_) | Access::ArrayStatic(_, _) => (), Access::ArrayDynamic(_, _) => (), // unimplemented!(), } } Instruction::Store { access } => self.translate_store_access(access), Instruction::Operation { left, op, right } => { match op { OperationType::Plus => { self.translate_plus(left, right) } OperationType::Minus => { self.translate_minus(left, right) } OperationType::Shift => { self.translate_simple_bin_op(left, right, InstructionManager::instr_Shift) } OperationType::Times => { self.translate_multiplication(left, right); } OperationType::Div => { self.translate_div_mod(left, right, true); } OperationType::Mod => { self.translate_div_mod(left, right, false); } } } Instruction::Jump { label } => { self.instruction_manager .translate_jump(label, VmInstruction::Jump); } Instruction::JNegative { label } => { self.instruction_manager .translate_jump(label, VmInstruction::Jneg); } Instruction::JPositive { label } => { self.instruction_manager .translate_jump(label, VmInstruction::Jpos); } Instruction::JZero { label } => { self.instruction_manager .translate_jump(label, VmInstruction::Jzero); } Instruction::Get => self.instruction_manager.instr_Get(), Instruction::Put => self.instruction_manager.instr_Put(), } } self.instruction_manager.instr_Halt(); if cfg!(debug_assertions) { let vars = self .context .variables() .iter() .map(|v| (v, self.memory.get_location(v.id()))); for x in vars { println!("{:?}", x); } println!("{:?}", self.instruction_manager.label_positions); } self.instruction_manager.target_instructions } }
// Copyright 2020 IOTA Stiftung // SPDX-License-Identifier: Apache-2.0 #![allow(dead_code)] use wasmlib::*; pub const SC_NAME: &str = "dividend"; pub const SC_HNAME: ScHname = ScHname(0xcce2e239); pub const PARAM_ADDRESS: &str = "address"; pub const PARAM_FACTOR: &str = "factor"; pub const VAR_MEMBERS: &str = "members"; pub const VAR_TOTAL_FACTOR: &str = "totalFactor"; pub const FUNC_DIVIDE: &str = "divide"; pub const FUNC_MEMBER: &str = "member"; pub const HFUNC_DIVIDE: ScHname = ScHname(0xc7878107); pub const HFUNC_MEMBER: ScHname = ScHname(0xc07da2cb);
//! Contexts-related tests. #![feature(plugin)] #![plugin(stainless)] describe! memory { // TODO: Test memory usage / allocations / freeing }
#[doc = "Reader of register LTDC_GC2R"] pub type R = crate::R<u32, super::LTDC_GC2R>; #[doc = "Reader of field `EDCEN`"] pub type EDCEN_R = crate::R<bool, bool>; #[doc = "Reader of field `STSAEN`"] pub type STSAEN_R = crate::R<bool, bool>; #[doc = "Reader of field `DVAEN`"] pub type DVAEN_R = crate::R<bool, bool>; #[doc = "Reader of field `DPAEN`"] pub type DPAEN_R = crate::R<bool, bool>; #[doc = "Reader of field `BW`"] pub type BW_R = crate::R<u8, u8>; #[doc = "Reader of field `EDCA`"] pub type EDCA_R = crate::R<bool, bool>; impl R { #[doc = "Bit 0 - EDCEN"] #[inline(always)] pub fn edcen(&self) -> EDCEN_R { EDCEN_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - STSAEN"] #[inline(always)] pub fn stsaen(&self) -> STSAEN_R { STSAEN_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - DVAEN"] #[inline(always)] pub fn dvaen(&self) -> DVAEN_R { DVAEN_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 3 - DPAEN"] #[inline(always)] pub fn dpaen(&self) -> DPAEN_R { DPAEN_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bits 4:6 - BW"] #[inline(always)] pub fn bw(&self) -> BW_R { BW_R::new(((self.bits >> 4) & 0x07) as u8) } #[doc = "Bit 7 - EDCA"] #[inline(always)] pub fn edca(&self) -> EDCA_R { EDCA_R::new(((self.bits >> 7) & 0x01) != 0) } }
pub mod http_check; pub mod http_analyzer;
#[doc = "Register `DMACR` reader"] pub type R = crate::R<DMACR_SPEC>; #[doc = "Register `DMACR` writer"] pub type W = crate::W<DMACR_SPEC>; #[doc = "Field `DIEN` reader - DMA input enable"] pub type DIEN_R = crate::BitReader; #[doc = "Field `DIEN` writer - DMA input enable"] pub type DIEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `DOEN` reader - DMA output enable"] pub type DOEN_R = crate::BitReader; #[doc = "Field `DOEN` writer - DMA output enable"] pub type DOEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl R { #[doc = "Bit 0 - DMA input enable"] #[inline(always)] pub fn dien(&self) -> DIEN_R { DIEN_R::new((self.bits & 1) != 0) } #[doc = "Bit 1 - DMA output enable"] #[inline(always)] pub fn doen(&self) -> DOEN_R { DOEN_R::new(((self.bits >> 1) & 1) != 0) } } impl W { #[doc = "Bit 0 - DMA input enable"] #[inline(always)] #[must_use] pub fn dien(&mut self) -> DIEN_W<DMACR_SPEC, 0> { DIEN_W::new(self) } #[doc = "Bit 1 - DMA output enable"] #[inline(always)] #[must_use] pub fn doen(&mut self) -> DOEN_W<DMACR_SPEC, 1> { DOEN_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "DMA control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dmacr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`dmacr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct DMACR_SPEC; impl crate::RegisterSpec for DMACR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`dmacr::R`](R) reader structure"] impl crate::Readable for DMACR_SPEC {} #[doc = "`write(|w| ..)` method takes [`dmacr::W`](W) writer structure"] impl crate::Writable for DMACR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets DMACR to value 0"] impl crate::Resettable for DMACR_SPEC { const RESET_VALUE: Self::Ux = 0; }
use crate::utils::wait_until; use crate::{Net, Spec}; use ckb_app_config::CKBAppConfig; use log::info; pub struct Discovery; impl Spec for Discovery { crate::name!("discovery"); crate::setup!(num_nodes: 3); fn run(&self, net: &mut Net) { let node0_id = net.nodes[0].node_id(); let node2 = &net.nodes[2]; let rpc_client = node2.rpc_client(); info!("Waiting for discovering"); let ret = wait_until(10, || { rpc_client .get_peers() .iter() .any(|peer| peer.node_id == node0_id) }); assert!( ret, "the address of node0 should be discovered by node2 and connected" ); } fn modify_ckb_config(&self) -> Box<dyn Fn(&mut CKBAppConfig) -> ()> { // enable outbound peer service to connect discovered peers Box::new(|config| { config.network.connect_outbound_interval_secs = 1; config.network.discovery_local_address = true; }) } }
use super::bicycle::BicycleRepoInterface; use crate::{ bikes::BicycleDomain, datasource::db, diesel::RunQueryDsl, error::Error, schema::{bicycles, bicycles::dsl::*}, }; use diesel::{ExpressionMethods, QueryDsl}; use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Insertable)] #[table_name = "bicycles"] pub struct NewBicycle { pub wheel_size: i32, pub description: String, } impl NewBicycle { fn from_domain(origin: BicycleDomain) -> Self { Self { description: origin.description, wheel_size: origin.wheel_size, } } } #[derive(Serialize, Deserialize, Queryable, Identifiable, AsChangeset)] struct Bicycle { #[primary_key] pub id: i32, pub wheel_size: i32, pub description: String, } impl Bicycle { fn from_domain(origin: &BicycleDomain) -> Self { Self { id: origin.id.unwrap(), description: String::from(&origin.description), wheel_size: origin.wheel_size, } } } impl BicycleDomain { fn from_bicycle(origin: Bicycle) -> Self { Self { id: Some(origin.id), description: origin.description, wheel_size: origin.wheel_size, } } } #[derive(Clone)] pub struct BicycleRepoPostgres {} impl BicycleRepoInterface for BicycleRepoPostgres { fn create(&self, bike: BicycleDomain) -> Result<BicycleDomain, Error> { let conn = db::connection()?; let result = diesel::insert_into(bicycles) .values(NewBicycle::from_domain(bike)) .get_result(&conn)?; Ok(BicycleDomain::from_bicycle(result)) } fn update(&self, bike: BicycleDomain) -> Result<BicycleDomain, Error> { let conn = db::connection()?; let to_update = Bicycle::from_domain(&bike); let updated = diesel::update(bicycles.filter(bicycles::id.eq(to_update.id))) .set(to_update) .get_result(&conn)?; Ok(BicycleDomain::from_bicycle(updated)) } fn delete(&self, bike_id: i32) -> Result<bool, Error> { let conn = db::connection()?; let result = diesel::delete(bicycles.filter(bicycles::id.eq(bike_id))).execute(&conn)?; Ok(result > 0) } fn find_all(&self) -> Result<Vec<BicycleDomain>, Error> { let conn = db::connection()?; let db_results = bicycles.load::<Bicycle>(&conn)?; let results: Vec<BicycleDomain> = db_results .into_iter() .map(|db_data| BicycleDomain { id: Some(db_data.id), description: db_data.description, wheel_size: db_data.wheel_size, }) .collect(); Ok(results) } fn find_by_id(&self, bike_id: i32) -> Result<BicycleDomain, Error> { let conn = db::connection()?; let db_result = bicycles.find(bike_id).first(&conn)?; Ok(BicycleDomain::from_bicycle(db_result)) } }
use std::convert::TryFrom; static FILENAME: &str = "input/data"; const OCCUPIED: char = '#'; const EMPTY: char = 'L'; type Matrix = Vec<Vec<char>>; trait MatrixMethods { fn get_char(&self, x: isize, y: isize) -> Option<char>; } impl MatrixMethods for Matrix { fn get_char(&self, x: isize, y: isize) -> Option<char> { fn in_bounds(matrix: &Matrix, x: isize, y: isize) -> bool { return (y >= 0 && y < matrix.len() as isize) && (x >= 0 && x < matrix[y as usize].len() as isize); } if !in_bounds(self, x, y) { return None; } Some(self[y as usize][x as usize]) } } fn main() { let data = read_input(FILENAME); println!("part one: {}", part_one(&data)); println!("part two: {}", part_two(&data)); } fn part_one(data: &str) -> usize { find_occupied(data, 4, occupied_adjacent_seats) } fn part_two(data: &str) -> usize { find_occupied(data, 5, occupied_seats) } fn find_occupied( data: &str, occupied_seats_threshold: usize, occupied_seats_fn: fn(y: usize, x: usize, matrix: &Matrix) -> usize, ) -> usize { let mut seatings = data .split('\n') .map(|row| row.chars().collect()) .collect::<Matrix>(); let y_size = seatings.len(); let x_size = seatings[0].len(); let mut count = 0; let mut equilibrium = false; while !equilibrium { seatings = shift( &seatings, y_size, x_size, occupied_seats_threshold, occupied_seats_fn, ); let occupied = seatings.iter().flatten().fold(0, |count, &seat| { count + if seat == OCCUPIED { 1 } else { 0 } }); equilibrium = occupied == count; count = occupied; } count } fn shift( matrix: &Matrix, y_size: usize, x_size: usize, occupied_seats_threshold: usize, occupied_seats_fn: fn(y: usize, x: usize, matrix: &Matrix) -> usize, ) -> Matrix { let mut next_matrix: Vec<Vec<char>> = Vec::new(); for y in 0..y_size { let mut nxt_vec: Vec<char> = Vec::new(); for x in 0..x_size { let occupied_seats = occupied_seats_fn(y, x, &matrix); match matrix[y][x] { EMPTY => { if occupied_seats == 0 { nxt_vec.push(OCCUPIED); } else { nxt_vec.push(matrix[y][x]) } } OCCUPIED => { if occupied_seats >= occupied_seats_threshold { nxt_vec.push(EMPTY); } else { nxt_vec.push(matrix[y][x]) } } _ => nxt_vec.push(matrix[y][x]), } } next_matrix.push(nxt_vec); } next_matrix } fn occupied_adjacent_seats(y: usize, x: usize, matrix: &Matrix) -> usize { let ix = isize::try_from(x).unwrap(); let iy = isize::try_from(y).unwrap(); let mut occupied = 0; for dy in -1..=1 { for dx in -1..=1 { if dx == 0 && dy == 0 { continue; } if let Some(c) = matrix.get_char(ix + dx, iy + dy) { if c == OCCUPIED { occupied += 1 } } } } occupied } fn occupied_seats(y: usize, x: usize, matrix: &Matrix) -> usize { let ix = isize::try_from(x).unwrap(); let iy = isize::try_from(y).unwrap(); let mut occupied = 0; fn occupied_seat(y: isize, x: isize, y_dir: isize, x_dir: isize, matrix: &Matrix) -> usize { match matrix.get_char(x, y) { Some(c) => match c { EMPTY => return 0, OCCUPIED => return 1, _ => return occupied_seat(y + y_dir, x + x_dir, y_dir, x_dir, matrix), }, None => return 0, } }; for dy in -1..=1 { for dx in -1..=1 { if dx == 0 && dy == 0 { continue; } occupied += occupied_seat(iy + dy, ix + dx, dy, dx, matrix); } } occupied } fn read_input(filename: &str) -> String { let mut data = std::fs::read_to_string(filename).expect("could not read file"); data.truncate(data.len() - 1); data } mod tests { #[test] fn test_occupied_seats() { assert_eq!( 8, super::occupied_seats( 4, 3, &vec![ vec!['.', '.', '.', '.', '.', '.', '.', '#', '.'], vec!['.', '.', '.', '#', '.', '.', '.', '.', '.'], vec!['.', '#', '.', '.', '.', '.', '.', '.', '.'], vec!['.', '.', '.', '.', '.', '.', '.', '.', '.'], vec!['.', '.', '#', 'L', '.', '.', '.', '.', '#'], vec!['.', '.', '.', '.', '#', '.', '.', '.', '.'], vec!['.', '.', '.', '.', '.', '.', '.', '.', '.'], vec!['#', '.', '.', '.', '.', '.', '.', '.', '.'], vec!['.', '.', '.', '#', '.', '.', '.', '.', '.'], ], ) ); } #[test] fn test_part_one() { let data = super::read_input("input/data"); assert_eq!(2406, super::part_one(&data)); } #[test] fn test_part_two() { let data = super::read_input("input/data"); assert_eq!(2149, super::part_two(&data)); } }
use anyhow::Result; use pcd_rs::{PcdDeserialize, Reader}; #[derive(PcdDeserialize)] pub struct Point { pub x: f32, pub y: f32, pub z: f32, pub rgb: f32, } pub fn main() -> Result<()> { let reader = Reader::open("test_files/ascii.pcd")?; let points: Result<Vec<Point>> = reader.collect(); println!("{} points found", points?.len()); Ok(()) }
pub mod print;
pub mod branching_simplify; pub mod const_folding; pub mod dead_code_eliminator; pub mod loop_unroll; pub mod util;
pub mod blocks; pub mod movies; pub mod requests;
use crate::crypto::multi_party_schnorr::Parameters; use crate::federation::{Federation, Federations}; use crate::net::SignerID; use crate::rpc::TapyrusApi; use std::convert::TryInto; use std::sync::Arc; use tapyrus::{Address, PublicKey}; pub struct NodeParameters<T: TapyrusApi> { pub rpc: std::sync::Arc<T>, pub address: Address, /// Own Signer ID. Actually it is signer own public key. pub signer_id: SignerID, pub round_duration: u64, pub round_limit: u64, pub skip_waiting_ibd: bool, federations: Federations, } impl<T: TapyrusApi> NodeParameters<T> { pub fn new( to_address: Address, public_key: PublicKey, rpc: T, round_duration: u64, round_limit: u64, skip_waiting_ibd: bool, federations: Federations, ) -> NodeParameters<T> { let signer_id = SignerID { pubkey: public_key }; NodeParameters { rpc: Arc::new(rpc), address: to_address, signer_id, round_duration, round_limit, skip_waiting_ibd, federations, } } pub fn get_federation_by_block_height(&self, block_height: u64) -> &Federation { self.federations.get_by_block_height(block_height) } pub fn get_signer_id_by_index(&self, block_height: u64, index: usize) -> SignerID { SignerID { pubkey: self.pubkey_list(block_height)[index].clone(), } } pub fn sharing_params(&self, block_height: u64) -> Parameters { let t = (self.threshold(block_height) - 1 as u8).try_into().unwrap(); let n: usize = (self.pubkey_list(block_height).len() as u8) .try_into() .unwrap(); Parameters { threshold: t, share_count: n.clone(), } } pub fn sort_publickey(pubkeys: &mut Vec<PublicKey>) { pubkeys.sort_by(|a, b| { let a = a.key.serialize(); let b = b.key.serialize(); Ord::cmp(&a[..], &b[..]) }); } pub fn threshold(&self, block_height: u64) -> u8 { let federation = self.get_federation_by_block_height(block_height); federation .threshold() .expect("threshold should not be None") } pub fn self_node_index(&self, block_height: u64) -> usize { let federation = self.get_federation_by_block_height(block_height); federation.node_index() } pub fn pubkey_list(&self, block_height: u64) -> Vec<PublicKey> { let federation = self.get_federation_by_block_height(block_height); federation.signers().iter().map(|s| s.pubkey).collect() } pub fn aggregated_public_key(&self, block_height: u64) -> PublicKey { let federation = self.get_federation_by_block_height(block_height); federation.aggregated_public_key() } } #[cfg(test)] mod tests { use crate::signer_node::NodeParameters; use crate::tests::helper::keys::TEST_KEYS; use crate::tests::helper::rpc::MockRpc; use tapyrus::PublicKey; #[test] fn test_sort_publickey() { let mut pubkeys: Vec<PublicKey> = TEST_KEYS.unsorted_pubkeys(); NodeParameters::<MockRpc>::sort_publickey(&mut pubkeys); assert_eq!(pubkeys, TEST_KEYS.pubkeys()); } }
// Copyright 2018-2020 Parity Technologies (UK) Ltd. // // Licensed under the Apache License, Version 2.0 or MIT license, at your option. // // A copy of the Apache License, Version 2.0 is included in the software as // LICENSE-APACHE and a copy of the MIT license is included in the software // as LICENSE-MIT. You may also obtain a copy of the Apache License, Version 2.0 // at https://www.apache.org/licenses/LICENSE-2.0 and a copy of the MIT license // at https://opensource.org/licenses/MIT. #![cfg_attr(not(feature = "std"), no_std)] use core::{fmt, hash::{BuildHasherDefault, Hasher}, marker::PhantomData}; /// A `HashMap` with an integer domain, using `NoHashHasher` to perform no hashing at all. /// /// # Examples /// /// See [`IsEnabled`] for use with custom types. /// /// ``` /// use nohash::IntMap; /// /// let mut m: IntMap<u32, bool> = IntMap::default(); /// /// m.insert(0, false); /// m.insert(1, true); /// /// assert!(m.contains_key(&0)); /// assert!(m.contains_key(&1)); /// ``` #[cfg(feature = "std")] pub type IntMap<K, V> = std::collections::HashMap<K, V, BuildNoHashHasher<K>>; /// A `HashSet` of integers, using `NoHashHasher` to perform no hashing at all. /// /// # Examples /// /// See [`IsEnabled`] for use with custom types. /// /// ``` /// use nohash::IntSet; /// /// let mut m = IntSet::default(); /// /// m.insert(0u32); /// m.insert(1u32); /// /// assert!(m.contains(&0)); /// assert!(m.contains(&1)); /// ``` #[cfg(feature = "std")] pub type IntSet<T> = std::collections::HashSet<T, BuildNoHashHasher<T>>; /// An alias for `BuildHasherDefault` for use with `NoHashHasher`. /// /// # Examples /// /// See also [`IntMap`] and [`IntSet`] for some easier usage examples. /// /// ``` /// use nohash::BuildNoHashHasher; /// use std::collections::HashMap; /// /// let mut m: HashMap::<u8, char, BuildNoHashHasher<u8>> = /// HashMap::with_capacity_and_hasher(2, BuildNoHashHasher::default()); /// /// m.insert(0, 'a'); /// m.insert(1, 'b'); /// /// assert_eq!(Some(&'a'), m.get(&0)); /// assert_eq!(Some(&'b'), m.get(&1)); /// ``` pub type BuildNoHashHasher<T> = BuildHasherDefault<NoHashHasher<T>>; /// For an enabled type `T`, a `NoHashHasher<T>` implements `std::hash::Hasher` and /// uses the value set by one of the `write_{u8, u16, u32, u64, usize, i8, i16, i32, /// i64, isize}` methods as its hash output. /// /// `NoHashHasher` does not implement any hashing algorithm and can only be used /// with types which can be mapped directly to a numeric value. Out of the box /// `NoHashHasher` is enabled for `u8`, `u16`, `u32`, `u64`, `usize`, `i8`, `i16`, /// `i32`, `i64`, and `isize`. Types that should be used with `NoHashHasher` need /// to implement [`IsEnabled`] and by doing so assert that their `Hash` impl invokes /// *only one* of the `Hasher::write_{u8, u16, u32, u64, usize, i8, i16, i32, i64, /// isize}` methods *exactly once*. /// /// # Examples /// /// See also [`BuildNoHashHasher`], [`IntMap`] and [`IntSet`] for some easier /// usage examples. See [`IsEnabled`] for use with custom types. /// /// ``` /// use nohash::NoHashHasher; /// use std::{collections::HashMap, hash::BuildHasherDefault}; /// /// let mut m: HashMap::<u8, char, BuildHasherDefault<NoHashHasher<u8>>> = /// HashMap::with_capacity_and_hasher(2, BuildHasherDefault::default()); /// /// m.insert(0, 'a'); /// m.insert(1, 'b'); /// /// assert_eq!(Some(&'a'), m.get(&0)); /// assert_eq!(Some(&'b'), m.get(&1)); /// ``` #[cfg(debug_assertions)] pub struct NoHashHasher<T>(u64, bool, PhantomData<T>); #[cfg(not(debug_assertions))] pub struct NoHashHasher<T>(u64, PhantomData<T>); impl<T> fmt::Debug for NoHashHasher<T> { #[cfg(debug_assertions)] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_tuple("NoHashHasher").field(&self.0).field(&self.1).finish() } #[cfg(not(debug_assertions))] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_tuple("NoHashHasher").field(&self.0).finish() } } impl<T> Default for NoHashHasher<T> { #[cfg(debug_assertions)] fn default() -> Self { NoHashHasher(0, false, PhantomData) } #[cfg(not(debug_assertions))] fn default() -> Self { NoHashHasher(0, PhantomData) } } impl<T> Clone for NoHashHasher<T> { #[cfg(debug_assertions)] fn clone(&self) -> Self { NoHashHasher(self.0, self.1, self.2) } #[cfg(not(debug_assertions))] fn clone(&self) -> Self { NoHashHasher(self.0, self.1) } } impl<T> Copy for NoHashHasher<T> {} /// Types which are safe to use with `NoHashHasher`. /// /// This marker trait is an option for types to enable themselves for use /// with `NoHashHasher`. In order to be safe, the `Hash` impl needs to /// satisfy the following constraint: /// /// > **One of the `Hasher::write_{u8,u16,u32,u64,usize,i8,i16,i32,i64,isize}` /// methods is invoked exactly once.** /// /// The best way to ensure this is to write a custom `Hash` impl even when /// deriving `Hash` for a simple newtype of a single type which itself /// implements `IsEnabled` may work as well. /// /// # Example /// /// ``` /// #[derive(PartialEq, Eq)] /// struct SomeType(u32); /// /// impl std::hash::Hash for SomeType { /// fn hash<H: std::hash::Hasher>(&self, hasher: &mut H) { /// hasher.write_u32(self.0) /// } /// } /// /// impl nohash::IsEnabled for SomeType {} /// /// let mut m = nohash::IntMap::default(); /// /// m.insert(SomeType(1), 't'); /// m.insert(SomeType(0), 'f'); /// /// assert_eq!(Some(&'t'), m.get(&SomeType(1))); /// assert_eq!(Some(&'f'), m.get(&SomeType(0))); /// ``` pub trait IsEnabled {} impl IsEnabled for u8 {} impl IsEnabled for u16 {} impl IsEnabled for u32 {} impl IsEnabled for u64 {} impl IsEnabled for usize {} impl IsEnabled for i8 {} impl IsEnabled for i16 {} impl IsEnabled for i32 {} impl IsEnabled for i64 {} impl IsEnabled for isize {} #[cfg(not(debug_assertions))] impl<T: IsEnabled> Hasher for NoHashHasher<T> { fn write(&mut self, _: &[u8]) { panic!("Invalid use of NoHashHasher") } fn write_u8(&mut self, n: u8) { self.0 = u64::from(n) } fn write_u16(&mut self, n: u16) { self.0 = u64::from(n) } fn write_u32(&mut self, n: u32) { self.0 = u64::from(n) } fn write_u64(&mut self, n: u64) { self.0 = n } fn write_usize(&mut self, n: usize) { self.0 = n as u64 } fn write_i8(&mut self, n: i8) { self.0 = n as u64 } fn write_i16(&mut self, n: i16) { self.0 = n as u64 } fn write_i32(&mut self, n: i32) { self.0 = n as u64 } fn write_i64(&mut self, n: i64) { self.0 = n as u64 } fn write_isize(&mut self, n: isize) { self.0 = n as u64 } fn finish(&self) -> u64 { self.0 } } #[cfg(debug_assertions)] impl<T: IsEnabled> Hasher for NoHashHasher<T> { fn write(&mut self, _: &[u8]) { panic!("Invalid use of NoHashHasher") } fn write_u8(&mut self, n: u8) { assert!(!self.1, "NoHashHasher: second write attempt detected."); self.0 = u64::from(n); self.1 = true } fn write_u16(&mut self, n: u16) { assert!(!self.1, "NoHashHasher: second write attempt detected."); self.0 = u64::from(n); self.1 = true } fn write_u32(&mut self, n: u32) { assert!(!self.1, "NoHashHasher: second write attempt detected."); self.0 = u64::from(n); self.1 = true } fn write_u64(&mut self, n: u64) { assert!(!self.1, "NoHashHasher: second write attempt detected."); self.0 = n; self.1 = true } fn write_usize(&mut self, n: usize) { assert!(!self.1, "NoHashHasher: second write attempt detected."); self.0 = n as u64; self.1 = true } fn write_i8(&mut self, n: i8) { assert!(!self.1, "NoHashHasher: second write attempt detected."); self.0 = n as u64; self.1 = true } fn write_i16(&mut self, n: i16) { assert!(!self.1, "NoHashHasher: second write attempt detected."); self.0 = n as u64; self.1 = true } fn write_i32(&mut self, n: i32) { assert!(!self.1, "NoHashHasher: second write attempt detected."); self.0 = n as u64; self.1 = true } fn write_i64(&mut self, n: i64) { assert!(!self.1, "NoHashHasher: second write attempt detected."); self.0 = n as u64; self.1 = true } fn write_isize(&mut self, n: isize) { assert!(!self.1, "NoHashHasher: second write attempt detected."); self.0 = n as u64; self.1 = true } fn finish(&self) -> u64 { self.0 } } #[cfg(test)] mod tests { use super::*; #[test] fn ok() { let mut h1 = NoHashHasher::<u8>::default(); h1.write_u8(42); assert_eq!(42, h1.finish()); let mut h2 = NoHashHasher::<u16>::default(); h2.write_u16(42); assert_eq!(42, h2.finish()); let mut h3 = NoHashHasher::<u32>::default(); h3.write_u32(42); assert_eq!(42, h3.finish()); let mut h4 = NoHashHasher::<u64>::default(); h4.write_u64(42); assert_eq!(42, h4.finish()); let mut h5 = NoHashHasher::<usize>::default(); h5.write_usize(42); assert_eq!(42, h5.finish()); let mut h6 = NoHashHasher::<i8>::default(); h6.write_i8(42); assert_eq!(42, h6.finish()); let mut h7 = NoHashHasher::<i16>::default(); h7.write_i16(42); assert_eq!(42, h7.finish()); let mut h8 = NoHashHasher::<i32>::default(); h8.write_i32(42); assert_eq!(42, h8.finish()); let mut h9 = NoHashHasher::<i64>::default(); h9.write_i64(42); assert_eq!(42, h9.finish()); let mut h10 = NoHashHasher::<isize>::default(); h10.write_isize(42); assert_eq!(42, h10.finish()) } #[cfg(debug_assertions)] #[test] #[should_panic] fn u8_double_usage() { let mut h = NoHashHasher::<u8>::default(); h.write_u8(42); h.write_u8(43); } #[cfg(debug_assertions)] #[test] #[should_panic] fn u16_double_usage() { let mut h = NoHashHasher::<u16>::default(); h.write_u16(42); h.write_u16(43); } #[cfg(debug_assertions)] #[test] #[should_panic] fn u32_double_usage() { let mut h = NoHashHasher::<u32>::default(); h.write_u32(42); h.write_u32(43); } #[cfg(debug_assertions)] #[test] #[should_panic] fn u64_double_usage() { let mut h = NoHashHasher::<u64>::default(); h.write_u64(42); h.write_u64(43); } #[cfg(debug_assertions)] #[test] #[should_panic] fn usize_double_usage() { let mut h = NoHashHasher::<usize>::default(); h.write_usize(42); h.write_usize(43); } #[cfg(debug_assertions)] #[test] #[should_panic] fn i8_double_usage() { let mut h = NoHashHasher::<i8>::default(); h.write_i8(42); h.write_i8(43); } #[cfg(debug_assertions)] #[test] #[should_panic] fn i16_double_usage() { let mut h = NoHashHasher::<i16>::default(); h.write_i16(42); h.write_i16(43); } #[cfg(debug_assertions)] #[test] #[should_panic] fn i32_double_usage() { let mut h = NoHashHasher::<i32>::default(); h.write_i32(42); h.write_i32(43); } #[cfg(debug_assertions)] #[test] #[should_panic] fn i64_double_usage() { let mut h = NoHashHasher::<i64>::default(); h.write_i64(42); h.write_i64(43); } #[cfg(debug_assertions)] #[test] #[should_panic] fn isize_double_usage() { let mut h = NoHashHasher::<isize>::default(); h.write_isize(42); h.write_isize(43); } }
use crate::protocol::Command; use bytes::{ Bytes, BytesMut, BufMut }; use std::sync::Arc; pub fn generate_response(val: Bytes, erorr_code: u16) -> crate::Result<Bytes> { /* Generate status code and header for the response. */ let resp_str = format!("HTTP/1.1 {}\r\nContent-Length: {}\r\n\r\n", erorr_code, val.len()); /* Now add the actual response body. */ let resp_bin = resp_str.as_bytes(); let mut response = BytesMut::with_capacity(resp_bin.len() + val.len() + 5); response.put(resp_str.as_bytes()); response.put(val); Ok(response.freeze()) } pub fn generate_request(cmd: Arc<Command>) -> crate::Result<Option<Bytes>> { let body = match cmd.as_ref() { Command::Set {..} => { generate_set_request_body(cmd)? }, Command::ReplPing {..} => { generate_repl_ping_request_body(cmd)? } _ => { return Ok(None); } }; let header = format!("POST / HTTP/1.1\r\nContent-Type: application/json\r\nConnection: keep-alive\r\nContent-Length: {}\r\n\r\n", body.len()); let request = [Bytes::from(header), body].concat(); Ok(Some(Bytes::from(request))) } fn generate_set_request_body(cmd: Arc<Command>) -> crate::Result<Bytes> { if let Command::Set{ key, value, id } = cmd.as_ref() { let body = [ Bytes::from(format!("{{\"command\":\"SET\",\"key\":\"{}\",\"value\":\"", key)), value.clone(), Bytes::from(format!("\",\"id\":{}}}", id)) ].concat(); return Ok(Bytes::from(body)); } /* Actually this shouldn't have happened because we have filtered command type * from generate_request_body().*/ Err("Invalid command type".into()) } fn generate_repl_ping_request_body(cmd: Arc<Command>) -> crate::Result<Bytes> { if let Command::ReplPing{ id } = cmd.as_ref() { let body = format!("{{\"command\":\"REPL_PING\",\"id\":{}}}", id); return Ok(Bytes::from(body)); } /* Actually this shouldn't have happened because we have filtered command type * from generate_request_body().*/ Err("Invalid command type".into()) }
use std::fs::File; use std::io::{Cursor, Read, Write}; use std::path::{Path, PathBuf}; use actix_web::http::header::ContentEncoding; use libflate::gzip::Encoder; use serde::Deserialize; use strum::{Display, EnumIter, EnumString}; use tar::Builder; use zip::{write, ZipWriter}; use crate::errors::ContextualError; /// Available archive methods #[derive(Deserialize, Clone, Copy, EnumIter, EnumString, Display)] #[serde(rename_all = "snake_case")] #[strum(serialize_all = "snake_case")] pub enum ArchiveMethod { /// Gzipped tarball TarGz, /// Regular tarball Tar, /// Regular zip Zip, } impl ArchiveMethod { pub fn extension(self) -> String { match self { ArchiveMethod::TarGz => "tar.gz", ArchiveMethod::Tar => "tar", ArchiveMethod::Zip => "zip", } .to_string() } pub fn content_type(self) -> String { match self { ArchiveMethod::TarGz => "application/gzip", ArchiveMethod::Tar => "application/tar", ArchiveMethod::Zip => "application/zip", } .to_string() } pub fn content_encoding(self) -> ContentEncoding { match self { ArchiveMethod::TarGz => ContentEncoding::Gzip, ArchiveMethod::Tar => ContentEncoding::Identity, ArchiveMethod::Zip => ContentEncoding::Identity, } } pub fn is_enabled(self, tar_enabled: bool, tar_gz_enabled: bool, zip_enabled: bool) -> bool { match self { ArchiveMethod::TarGz => tar_gz_enabled, ArchiveMethod::Tar => tar_enabled, ArchiveMethod::Zip => zip_enabled, } } /// Make an archive out of the given directory, and write the output to the given writer. /// /// Recursively includes all files and subdirectories. /// /// If `skip_symlinks` is `true`, symlinks fill not be followed and will just be ignored. pub fn create_archive<T, W>( self, dir: T, skip_symlinks: bool, out: W, ) -> Result<(), ContextualError> where T: AsRef<Path>, W: std::io::Write, { let dir = dir.as_ref(); match self { ArchiveMethod::TarGz => tar_gz(dir, skip_symlinks, out), ArchiveMethod::Tar => tar_dir(dir, skip_symlinks, out), ArchiveMethod::Zip => zip_dir(dir, skip_symlinks, out), } } } /// Write a gzipped tarball of `dir` in `out`. fn tar_gz<W>(dir: &Path, skip_symlinks: bool, out: W) -> Result<(), ContextualError> where W: std::io::Write, { let mut out = Encoder::new(out).map_err(|e| ContextualError::IoError("GZIP".to_string(), e))?; tar_dir(dir, skip_symlinks, &mut out)?; out.finish() .into_result() .map_err(|e| ContextualError::IoError("GZIP finish".to_string(), e))?; Ok(()) } /// Write a tarball of `dir` in `out`. /// /// The target directory will be saved as a top-level directory in the archive. /// /// For example, consider this directory structure: /// /// ```ignore /// a /// └── b /// └── c /// ├── e /// ├── f /// └── g /// ``` /// /// Making a tarball out of `"a/b/c"` will result in this archive content: /// /// ```ignore /// c /// ├── e /// ├── f /// └── g /// ``` fn tar_dir<W>(dir: &Path, skip_symlinks: bool, out: W) -> Result<(), ContextualError> where W: std::io::Write, { let inner_folder = dir.file_name().ok_or_else(|| { ContextualError::InvalidPathError("Directory name terminates in \"..\"".to_string()) })?; let directory = inner_folder.to_str().ok_or_else(|| { ContextualError::InvalidPathError( "Directory name contains invalid UTF-8 characters".to_string(), ) })?; tar(dir, directory.to_string(), skip_symlinks, out) .map_err(|e| ContextualError::ArchiveCreationError("tarball".to_string(), Box::new(e))) } /// Writes a tarball of `dir` in `out`. /// /// The content of `src_dir` will be saved in the archive as a folder named `inner_folder`. fn tar<W>( src_dir: &Path, inner_folder: String, skip_symlinks: bool, out: W, ) -> Result<(), ContextualError> where W: std::io::Write, { let mut tar_builder = Builder::new(out); tar_builder.follow_symlinks(!skip_symlinks); // Recursively adds the content of src_dir into the archive stream tar_builder .append_dir_all(inner_folder, src_dir) .map_err(|e| { ContextualError::IoError( format!( "Failed to append the content of {} to the TAR archive", src_dir.to_str().unwrap_or("file") ), e, ) })?; // Finish the archive tar_builder.into_inner().map_err(|e| { ContextualError::IoError("Failed to finish writing the TAR archive".to_string(), e) })?; Ok(()) } /// Write a zip of `dir` in `out`. /// /// The target directory will be saved as a top-level directory in the archive. /// /// For example, consider this directory structure: /// /// ```ignore /// a /// └── b /// └── c /// ├── e /// ├── f /// └── g /// ``` /// /// Making a zip out of `"a/b/c"` will result in this archive content: /// /// ```ignore /// c /// ├── e /// ├── f /// └── g /// ``` fn create_zip_from_directory<W>( out: W, directory: &Path, skip_symlinks: bool, ) -> Result<(), ContextualError> where W: std::io::Write + std::io::Seek, { let options = write::FileOptions::default().compression_method(zip::CompressionMethod::Stored); let mut paths_queue: Vec<PathBuf> = vec![directory.to_path_buf()]; let zip_root_folder_name = directory.file_name().ok_or_else(|| { ContextualError::InvalidPathError("Directory name terminates in \"..\"".to_string()) })?; let mut zip_writer = ZipWriter::new(out); let mut buffer = Vec::new(); while !paths_queue.is_empty() { let next = paths_queue.pop().ok_or_else(|| { ContextualError::ArchiveCreationDetailError("Could not get path from queue".to_string()) })?; let current_dir = next.as_path(); let directory_entry_iterator = std::fs::read_dir(current_dir) .map_err(|e| ContextualError::IoError("Could not read directory".to_string(), e))?; let zip_directory = Path::new(zip_root_folder_name).join( current_dir.strip_prefix(directory).map_err(|_| { ContextualError::ArchiveCreationDetailError( "Could not append base directory".to_string(), ) })?, ); for entry in directory_entry_iterator { let entry_path = entry .ok() .ok_or_else(|| { ContextualError::InvalidPathError( "Directory name terminates in \"..\"".to_string(), ) })? .path(); let entry_metadata = std::fs::metadata(entry_path.clone()).map_err(|e| { ContextualError::IoError("Could not get file metadata".to_string(), e) })?; if entry_metadata.file_type().is_symlink() && skip_symlinks { continue; } let current_entry_name = entry_path.file_name().ok_or_else(|| { ContextualError::InvalidPathError("Invalid file or directory name".to_string()) })?; if entry_metadata.is_file() { let mut f = File::open(&entry_path) .map_err(|e| ContextualError::IoError("Could not open file".to_string(), e))?; f.read_to_end(&mut buffer).map_err(|e| { ContextualError::IoError("Could not read from file".to_string(), e) })?; let relative_path = zip_directory.join(current_entry_name).into_os_string(); zip_writer .start_file(relative_path.to_string_lossy(), options) .map_err(|_| { ContextualError::ArchiveCreationDetailError( "Could not add file path to ZIP".to_string(), ) })?; zip_writer.write(buffer.as_ref()).map_err(|_| { ContextualError::ArchiveCreationDetailError( "Could not write file to ZIP".to_string(), ) })?; buffer.clear(); } else if entry_metadata.is_dir() { let relative_path = zip_directory.join(current_entry_name).into_os_string(); zip_writer .add_directory(relative_path.to_string_lossy(), options) .map_err(|_| { ContextualError::ArchiveCreationDetailError( "Could not add directory path to ZIP".to_string(), ) })?; paths_queue.push(entry_path.clone()); } } } zip_writer.finish().map_err(|_| { ContextualError::ArchiveCreationDetailError( "Could not finish writing ZIP archive".to_string(), ) })?; Ok(()) } /// Writes a zip of `dir` in `out`. /// /// The content of `src_dir` will be saved in the archive as the folder named . fn zip_data<W>(src_dir: &Path, skip_symlinks: bool, mut out: W) -> Result<(), ContextualError> where W: std::io::Write, { let mut data = Vec::new(); let memory_file = Cursor::new(&mut data); create_zip_from_directory(memory_file, src_dir, skip_symlinks).map_err(|e| { ContextualError::ArchiveCreationError( "Failed to create the ZIP archive".to_string(), Box::new(e), ) })?; out.write_all(data.as_mut_slice()) .map_err(|e| ContextualError::IoError("Failed to write the ZIP archive".to_string(), e))?; Ok(()) } fn zip_dir<W>(dir: &Path, skip_symlinks: bool, out: W) -> Result<(), ContextualError> where W: std::io::Write, { let inner_folder = dir.file_name().ok_or_else(|| { ContextualError::InvalidPathError("Directory name terminates in \"..\"".to_string()) })?; inner_folder.to_str().ok_or_else(|| { ContextualError::InvalidPathError( "Directory name contains invalid UTF-8 characters".to_string(), ) })?; zip_data(dir, skip_symlinks, out) .map_err(|e| ContextualError::ArchiveCreationError("zip".to_string(), Box::new(e))) }
//! Tests auto-converted from "sass-spec/spec/non_conformant/parser/interpolate/44_selector/todo_single_escape" #[allow(unused)] use super::rsass; // From "sass-spec/spec/non_conformant/parser/interpolate/44_selector/todo_single_escape/11_escaped_interpolated_value.hrx" // Ignoring "t11_escaped_interpolated_value", error tests are not supported yet. // From "sass-spec/spec/non_conformant/parser/interpolate/44_selector/todo_single_escape/21_escaped_interpolated_variable.hrx" // Ignoring "t21_escaped_interpolated_variable", error tests are not supported yet. // From "sass-spec/spec/non_conformant/parser/interpolate/44_selector/todo_single_escape/31_escaped_literal.hrx" // Ignoring "t31_escaped_literal", error tests are not supported yet.
use termion::event::Key; use widget::{Bound, BoundSize, Widget}; use {Position, Size}; use std::fmt; mod config; mod consts; mod edit; mod history; mod keymap; mod kill_ring; mod line_buffer; mod process; mod state; mod undo; use self::consts::KeyPress; pub use self::edit::Editor; use self::keymap::InputState; use self::process::{process_char, process_command}; pub use self::state::State; #[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] pub struct Offset { pub col: usize, pub row: usize, } impl Offset { pub fn new(col: usize, row: usize) -> Self { Offset { col, row } } } pub struct Readline { pub state: State, pub editor: Editor, pub input_state: InputState, } impl fmt::Debug for Readline { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("Readline") .field("input_state", &self.input_state) .field("state", &self.state) .field("editor", &self.editor) .finish() } } impl Readline { pub fn new() -> Self { let mut state = State::new(0); let editor = Editor::new(); let input_state = InputState::new(&editor.config, editor.custom_bindings.clone()); state.line.set_delete_listener(editor.kill_ring.clone()); state.line.set_change_listener(state.changes.clone()); Readline { state, editor, input_state, } } pub fn update(&mut self, buf: &str, pos: usize) { self.state.line.update(buf, pos); self.state.refresh(); } pub fn width(&mut self, width: usize) { self.state.width = width; self.state.refresh(); } pub fn process_char(&mut self, ch: char) { process_char(&mut self.state, &mut self.editor, ch, &mut self.input_state); } pub fn process_keypress(&mut self, kp: KeyPress) { let cmd = self.input_state.next_cmd(kp, &mut self.state, true); process_command( &mut self.state, &mut self.editor, cmd, &mut self.input_state, ); } pub fn finalize(&mut self) -> String { let line = self.state.finalize(); self.editor.add_history_entry(line.clone()); line } pub fn set_line(&mut self, text: &str) { self.state.set_line(text); } } // text-ui integration to be broken out later fn key_to_keypress(key: Key) -> KeyPress { match key { Key::Backspace => KeyPress::Backspace, Key::Left => KeyPress::Left, Key::Right => KeyPress::Right, Key::Up => KeyPress::Up, Key::Down => KeyPress::Down, Key::Home => KeyPress::Home, Key::End => KeyPress::End, Key::PageUp => KeyPress::PageUp, Key::PageDown => KeyPress::PageDown, Key::Delete => KeyPress::Delete, Key::Insert => KeyPress::Insert, Key::F(u8) => KeyPress::F(u8), Key::Char(char) => KeyPress::Char(char), Key::Alt(char) => KeyPress::Meta(char.to_ascii_uppercase()), Key::Ctrl(char) => KeyPress::Ctrl(char.to_ascii_uppercase()), Key::Null => KeyPress::Null, Key::Esc => KeyPress::Esc, _ => KeyPress::UnknownEscSeq, } } impl Readline { pub fn process_key(&mut self, key: Key) { self.process_keypress(key_to_keypress(key)) } } impl Widget for Readline { fn render_content(&self, size: Size) -> Option<Vec<String>> { /*if self.state.width != size.width { self.state.width = size.width ; }*/ Some(self.state.render_width(size.width)) } fn render_bounds(&self) -> BoundSize { BoundSize { width: Bound::Fixed(self.state.width), height: Bound::Fixed(self.state.rows), } } fn render_focus(&self, _size: Size) -> Option<Position> { let pos = self.state.cursor; Some(Position::new(pos.col, pos.row)) } fn render_style(&self) -> Option<String> { Some("input".to_string()) } }
use string_repr::StringRepr; #[macro_export] macro_rules! path { ($path:expr) => { Path::new($path) }; } pub struct Path<'a>(&'a str); impl<'a> Path<'a> { /// Create new Path. /// # Example: /// ```rust /// use wdg_uri::path::Path; /// let path = Path::new("/"); /// ``` pub fn new(data: &str) -> Path { Path(data) } } impl<'a> StringRepr for Path<'a> { fn string_repr(&self) -> String { String::from(self.0) } }
use crate::parser::lexer::TokenStream; use crate::parser::tokens::{Token, TokenType, TokenType::*}; use std::rc::Rc; use std::result; use std::str; use std::{collections::HashMap, path::PathBuf}; use thiserror::Error; use crate::parser::span::Span; use crate::parser::ast::*; use serde::{Deserialize, Serialize}; use std::convert::TryFrom; use crate::rerrs::{ErrorKind, SteelErr}; use crate::rvals::SteelVal; use crate::rvals::SteelVal::*; use super::ast; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct SyntaxObject { pub(crate) ty: TokenType, pub(crate) span: Span, pub(crate) source: Option<Rc<PathBuf>>, } impl PartialEq for SyntaxObject { fn eq(&self, other: &Self) -> bool { self.ty == other.ty } } impl SyntaxObject { pub fn new(ty: TokenType, span: Span) -> Self { SyntaxObject { ty, span, source: None, } } pub fn default(ty: TokenType) -> Self { SyntaxObject { ty, span: Span::new(0, 0), source: None, } } pub fn set_span(&mut self, span: Span) { self.span = span } pub fn from_token_with_source(val: &Token, source: &Option<Rc<PathBuf>>) -> Self { SyntaxObject { ty: val.ty.clone(), span: val.span, source: source.as_ref().map(Rc::clone), } } } impl From<&Token<'_>> for SyntaxObject { fn from(val: &Token) -> SyntaxObject { SyntaxObject::new(val.ty.clone(), val.span) } } impl TryFrom<SyntaxObject> for SteelVal { type Error = SteelErr; fn try_from(e: SyntaxObject) -> std::result::Result<Self, Self::Error> { let span = e.span; match e.ty { OpenParen => { Err(SteelErr::new(ErrorKind::UnexpectedToken, "(".to_string()).with_span(span)) } CloseParen => { Err(SteelErr::new(ErrorKind::UnexpectedToken, ")".to_string()).with_span(span)) } CharacterLiteral(x) => Ok(CharV(x)), BooleanLiteral(x) => Ok(BoolV(x)), Identifier(x) => Ok(SymbolV(x.into())), NumberLiteral(x) => Ok(NumV(x)), IntegerLiteral(x) => Ok(IntV(x)), StringLiteral(x) => Ok(StringV(x.into())), QuoteTick => { Err(SteelErr::new(ErrorKind::UnexpectedToken, "'".to_string()).with_span(span)) } Unquote => { Err(SteelErr::new(ErrorKind::UnexpectedToken, ",".to_string()).with_span(span)) } QuasiQuote => { Err(SteelErr::new(ErrorKind::UnexpectedToken, "`".to_string()).with_span(span)) } UnquoteSplice => { Err(SteelErr::new(ErrorKind::UnexpectedToken, ",@".to_string()).with_span(span)) } Error => { Err(SteelErr::new(ErrorKind::UnexpectedToken, "error".to_string()).with_span(span)) } Comment => Err( SteelErr::new(ErrorKind::UnexpectedToken, "comment".to_string()).with_span(span), ), Hash => Err(SteelErr::new(ErrorKind::UnexpectedToken, "#".to_string()).with_span(span)), If => Ok(SymbolV("if".into())), Define => Ok(SymbolV("define".into())), Let => Ok(SymbolV("let".into())), Transduce => Ok(SymbolV("transduce".into())), Execute => Ok(SymbolV("execute".into())), Return => Ok(SymbolV("return!".into())), Begin => Ok(SymbolV("begin".into())), Panic => Ok(SymbolV("panic!".into())), Lambda => Ok(SymbolV("lambda".into())), Quote => Ok(SymbolV("quote".into())), DefineSyntax => Ok(SymbolV("define-syntax".into())), SyntaxRules => Ok(SymbolV("syntax-rules".into())), Ellipses => Ok(SymbolV("...".into())), Struct => Ok(SymbolV("struct".into())), Apply => Ok(SymbolV("apply".into())), Set => Ok(SymbolV("set!".into())), Read => Ok(SymbolV("read".into())), Eval => Ok(SymbolV("eval".into())), Require => Ok(SymbolV("require".into())), CallCC => Ok(SymbolV("call/cc".into())), } } } #[derive(Clone, Debug, PartialEq, Error)] pub enum ParseError { // #[error("Parse: Error reading tokens: {0}")] // TokenError(#[from] TokenError), #[error("Parse: Unexpected token: {0:?}")] Unexpected(TokenType, Option<Rc<PathBuf>>), #[error("Parse: Unexpected EOF")] UnexpectedEOF(Option<Rc<PathBuf>>), #[error("Parse: Unexpected character: {0:?}")] UnexpectedChar(char, Span, Option<Rc<PathBuf>>), #[error("Parse: Incomplete String: {0}")] IncompleteString(String, Span, Option<Rc<PathBuf>>), #[error("Parse: Syntax Error: {0}")] SyntaxError(String, Span, Option<Rc<PathBuf>>), #[error("Parse: Arity mismatch: {0}")] ArityMismatch(String, Span, Option<Rc<PathBuf>>), } impl ParseError { pub fn span(&self) -> Option<Span> { match self { // ParseError::TokenError(_) => None, ParseError::Unexpected(_, _) => None, ParseError::UnexpectedEOF(_) => None, ParseError::UnexpectedChar(_, s, _) => Some(*s), ParseError::IncompleteString(_, s, _) => Some(*s), ParseError::SyntaxError(_, s, _) => Some(*s), ParseError::ArityMismatch(_, s, _) => Some(*s), } } pub fn set_source(self, source: Option<Rc<PathBuf>>) -> Self { use ParseError::*; match self { ParseError::Unexpected(l, _) => Unexpected(l, source), ParseError::UnexpectedEOF(_) => UnexpectedEOF(source), ParseError::UnexpectedChar(l, s, _) => UnexpectedChar(l, s, source), ParseError::IncompleteString(l, s, _) => IncompleteString(l, s, source), ParseError::SyntaxError(l, s, _) => SyntaxError(l, s, source), ParseError::ArityMismatch(l, s, _) => ArityMismatch(l, s, source), } } } #[derive(Debug)] pub struct Parser<'a> { tokenizer: TokenStream<'a>, intern: &'a mut HashMap<String, Rc<TokenType>>, quote_stack: Vec<usize>, shorthand_quote_stack: Vec<usize>, source_name: Option<Rc<PathBuf>>, } impl<'a> Parser<'a> { #[cfg(test)] pub fn parse(expr: &str) -> Result<Vec<ExprKind>> { let mut intern = HashMap::new(); Parser::new(expr, &mut intern).collect() } } pub type Result<T> = result::Result<T, ParseError>; fn tokentype_error_to_parse_error(t: &Token) -> ParseError { if let TokenType::Error = t.ty { println!("Found an error: {}", t); if t.source.starts_with('\"') { ParseError::IncompleteString(t.source.to_string(), t.span, None) } else { ParseError::UnexpectedChar(t.source.chars().next().unwrap(), t.span, None) } } else { ParseError::UnexpectedEOF(None) } } impl<'a> Parser<'a> { pub fn new(input: &'a str, intern: &'a mut HashMap<String, Rc<TokenType>>) -> Self { Parser { tokenizer: TokenStream::new(input, true), intern, quote_stack: Vec::new(), shorthand_quote_stack: Vec::new(), source_name: None, } } pub fn new_from_source( input: &'a str, intern: &'a mut HashMap<String, Rc<TokenType>>, source_name: PathBuf, ) -> Self { Parser { tokenizer: TokenStream::new(input, true), intern, quote_stack: Vec::new(), shorthand_quote_stack: Vec::new(), source_name: Some(Rc::from(source_name)), } } // TODO this is definitely wrong fn construct_quote(&mut self, val: ExprKind, span: Span) -> ExprKind { // let q = { // let rc_val = TokenType::Quote; // ExprKind::Atom(Atom::new(SyntaxObject::new(rc_val, span))) // // let val = ExprKind::Atom(Atom::new(SyntaxObject::new(rc_val, span))); // // // self.intern.insert("quote".to_string(), rc_val); // // val // }; // ExprKind::List(List::new(vec![q, val])) ExprKind::Quote(Box::new(ast::Quote::new( val, SyntaxObject::new(TokenType::Quote, span), ))) } fn construct_quote_vec(&mut self, val: ExprKind, span: Span) -> Vec<ExprKind> { let q = { let rc_val = TokenType::Quote; ExprKind::Atom(Atom::new(SyntaxObject::new(rc_val, span))) // let val = ExprKind::Atom(Atom::new(SyntaxObject::new(rc_val, span))); // // self.intern.insert("quote".to_string(), rc_val); // val }; vec![q, val] } // Reader macro for ` fn construct_quasiquote(&mut self, val: ExprKind, span: Span) -> ExprKind { let q = { let rc_val = TokenType::Identifier("quasiquote".to_string()); ExprKind::Atom(Atom::new(SyntaxObject::new(rc_val, span))) }; ExprKind::List(List::new(vec![q, val])) } // Reader macro for , fn construct_unquote(&mut self, val: ExprKind, span: Span) -> ExprKind { let q = { let rc_val = TokenType::Identifier("unquote".to_string()); ExprKind::Atom(Atom::new(SyntaxObject::new(rc_val, span))) }; ExprKind::List(List::new(vec![q, val])) } // Reader macro for ,@ fn construct_unquote_splicing(&mut self, val: ExprKind, span: Span) -> ExprKind { let q = { let rc_val = TokenType::Identifier("unquote-splicing".to_string()); ExprKind::Atom(Atom::new(SyntaxObject::new(rc_val, span))) }; ExprKind::List(List::new(vec![q, val])) } // Reader macro for # fn construct_lambda_shorthand(&mut self, val: ExprKind, span: Span) -> ExprKind { let q = { let rc_val = TokenType::Identifier("lambda-hash".to_string()); ExprKind::Atom(Atom::new(SyntaxObject::new(rc_val, span))) }; ExprKind::List(List::new(vec![q, val])) } fn read_from_tokens(&mut self) -> Result<ExprKind> { let mut stack: Vec<Vec<ExprKind>> = Vec::new(); let mut current_frame: Vec<ExprKind> = Vec::new(); self.quote_stack = Vec::new(); loop { match self.tokenizer.next() { Some(token) => { match token.ty { TokenType::Error => return Err(tokentype_error_to_parse_error(&token)), // TODO TokenType::QuoteTick => { // quote_count += 1; self.quote_stack.push(current_frame.len()); let quote_inner = self .next() .unwrap_or(Err(ParseError::UnexpectedEOF(self.source_name.clone()))) .map(|x| self.construct_quote(x, token.span)); self.quote_stack.pop(); current_frame.push(quote_inner?); } TokenType::Unquote => { let quote_inner = self .next() .unwrap_or(Err(ParseError::UnexpectedEOF(self.source_name.clone()))) .map(|x| self.construct_unquote(x, token.span)); current_frame.push(quote_inner?); } TokenType::QuasiQuote => { let quote_inner = self .next() .unwrap_or(Err(ParseError::UnexpectedEOF(self.source_name.clone()))) .map(|x| self.construct_quasiquote(x, token.span)); current_frame.push(quote_inner?); } TokenType::UnquoteSplice => { let quote_inner = self .next() .unwrap_or(Err(ParseError::UnexpectedEOF(self.source_name.clone()))) .map(|x| self.construct_unquote_splicing(x, token.span)); current_frame.push(quote_inner?); } TokenType::Hash => { let quote_inner = self .next() .unwrap_or(Err(ParseError::UnexpectedEOF(self.source_name.clone()))) .map(|x| self.construct_lambda_shorthand(x, token.span)); current_frame.push(quote_inner?); } TokenType::OpenParen => { stack.push(current_frame); current_frame = Vec::new(); } TokenType::CloseParen => { if let Some(mut prev_frame) = stack.pop() { match self.quote_stack.last() { Some(last_quote_index) if stack.len() == *last_quote_index && self.quote_stack.len() > 1 => { self.quote_stack.pop(); prev_frame.push(ExprKind::List(List::new(current_frame))) } Some(last_quote_index) if stack.len() == *last_quote_index && self.quote_stack.len() == 1 => { self.quote_stack.pop(); match current_frame.first() { Some(ExprKind::Atom(Atom { syn: SyntaxObject { ty: TokenType::Quote, .. }, })) => { prev_frame.push( ExprKind::try_from(current_frame).map_err( |x| x.set_source(self.source_name.clone()), )?, ); } _ => prev_frame .push(ExprKind::List(List::new(current_frame))), } } Some(_) => { prev_frame.push(ExprKind::List(List::new(current_frame))) } _ => match self.shorthand_quote_stack.last() { Some(_) => prev_frame .push(ExprKind::List(List::new(current_frame))), _ => { prev_frame.push( ExprKind::try_from(current_frame).map_err(|x| { x.set_source(self.source_name.clone()) })?, ); } }, } current_frame = prev_frame; } else { match self.shorthand_quote_stack.last() { Some(last_quote_index) if stack.len() == *last_quote_index => { self.shorthand_quote_stack.pop(); return Ok(ExprKind::List(List::new(current_frame))); } _ => { return ExprKind::try_from(current_frame) .map_err(|x| x.set_source(self.source_name.clone())) } } } } _ => { if let TokenType::Quote = &token.ty { // self.quote_stack.push(current_frame.len()); self.quote_stack.push(stack.len()); } // println!("{}", token); current_frame.push(ExprKind::Atom(Atom::new( SyntaxObject::from_token_with_source( &token, &self.source_name.clone(), ), ))) } } } None => return Err(ParseError::UnexpectedEOF(self.source_name.clone())), } } } } impl<'a> Iterator for Parser<'a> { type Item = Result<ExprKind>; fn next(&mut self) -> Option<Self::Item> { // self.shorthand_quote_stack = Vec::new(); // self.quote_stack = Vec::new(); self.tokenizer.next().map(|res| match res.ty { // Err(e) => Err(ParseError::TokenError(e)), TokenType::QuoteTick => { // See if this does the job self.shorthand_quote_stack.push(0); let value = self .next() .unwrap_or(Err(ParseError::UnexpectedEOF(self.source_name.clone()))) .map(|x| self.construct_quote_vec(x, res.span)); self.shorthand_quote_stack.pop(); match value { Ok(v) => ExprKind::try_from(v), Err(e) => Err(e), } } TokenType::Unquote => self .next() .unwrap_or(Err(ParseError::UnexpectedEOF(self.source_name.clone()))) .map(|x| self.construct_unquote(x, res.span)), TokenType::UnquoteSplice => self .next() .unwrap_or(Err(ParseError::UnexpectedEOF(self.source_name.clone()))) .map(|x| self.construct_unquote_splicing(x, res.span)), TokenType::QuasiQuote => self .next() .unwrap_or(Err(ParseError::UnexpectedEOF(self.source_name.clone()))) .map(|x| self.construct_quasiquote(x, res.span)), TokenType::Hash => self .next() .unwrap_or(Err(ParseError::UnexpectedEOF(self.source_name.clone()))) .map(|x| self.construct_lambda_shorthand(x, res.span)), TokenType::OpenParen => self.read_from_tokens(), TokenType::CloseParen => Err(ParseError::Unexpected( TokenType::CloseParen, self.source_name.clone().clone(), )), TokenType::Error => Err(tokentype_error_to_parse_error(&res)), _ => Ok(ExprKind::Atom(Atom::new(SyntaxObject::from(&res)))), }) } } #[cfg(test)] mod parser_tests { // use super::TokenType::*; use super::*; use crate::parser::ast::ExprKind; use crate::parser::ast::{ Begin, Define, Execute, If, LambdaFunction, Panic, Quote, Return, Transduce, }; fn assert_parse(s: &str, result: &[ExprKind]) { let mut cache: HashMap<String, Rc<TokenType>> = HashMap::new(); let a: Result<Vec<ExprKind>> = Parser::new(s, &mut cache).collect(); let a = a.unwrap(); assert_eq!(a.as_slice(), result); } fn assert_parse_err(s: &str, err: ParseError) { let mut cache: HashMap<String, Rc<TokenType>> = HashMap::new(); let a: Result<Vec<ExprKind>> = Parser::new(s, &mut cache).collect(); assert_eq!(a, Err(err)); } fn assert_parse_is_err(s: &str) { let mut cache: HashMap<String, Rc<TokenType>> = HashMap::new(); let a: Result<Vec<ExprKind>> = Parser::new(s, &mut cache).collect(); assert!(a.is_err()); } #[test] fn parse_unicode() { assert_parse( "#\\¡", &[ExprKind::Atom(Atom::new(SyntaxObject::default( TokenType::CharacterLiteral('¡'), )))], ); assert_parse( "#\\\\u{b}", &[ExprKind::Atom(Atom::new(SyntaxObject::default( TokenType::CharacterLiteral('\u{b}'), )))], ); } #[test] fn parse_more_unicode() { assert_parse( "#\\\\u{a0}", &[ExprKind::Atom(Atom::new(SyntaxObject::default( TokenType::CharacterLiteral('\u{a0}'), )))], ); } #[test] fn parse_strange_characters() { assert_parse( "#\\^", &[ExprKind::Atom(Atom::new(SyntaxObject::default( TokenType::CharacterLiteral('^'), )))], ); } #[test] fn parse_character_sequence() { assert_parse( "#\\¡ #\\SPACE #\\g", &[ ExprKind::Atom(Atom::new(SyntaxObject::default( TokenType::CharacterLiteral('¡'), ))), ExprKind::Atom(Atom::new(SyntaxObject::default( TokenType::CharacterLiteral(' '), ))), ExprKind::Atom(Atom::new(SyntaxObject::default( TokenType::CharacterLiteral('g'), ))), ], ) } #[test] fn parse_character_sequence_inside_if() { assert_parse( "(if #\\¡ #\\SPACE #\\g)", &[ExprKind::If(Box::new(If::new( ExprKind::Atom(Atom::new(SyntaxObject::default( TokenType::CharacterLiteral('¡'), ))), ExprKind::Atom(Atom::new(SyntaxObject::default( TokenType::CharacterLiteral(' '), ))), ExprKind::Atom(Atom::new(SyntaxObject::default( TokenType::CharacterLiteral('g'), ))), SyntaxObject::default(TokenType::If), )))], ) } #[test] fn parse_close_paren_character() { assert_parse( "#\\)", &[ExprKind::Atom(Atom::new(SyntaxObject::default( TokenType::CharacterLiteral(')'), )))], ); assert_parse( "#\\]", &[ExprKind::Atom(Atom::new(SyntaxObject::default( TokenType::CharacterLiteral(']'), )))], ) } #[test] fn parse_open_paren_character() { assert_parse( "#\\(", &[ExprKind::Atom(Atom::new(SyntaxObject::default( TokenType::CharacterLiteral('('), )))], ) } #[test] fn test_error() { assert_parse_err("(", ParseError::UnexpectedEOF(None)); assert_parse_err("(abc", ParseError::UnexpectedEOF(None)); assert_parse_err("(ab 1 2", ParseError::UnexpectedEOF(None)); assert_parse_err("((((ab 1 2) (", ParseError::UnexpectedEOF(None)); assert_parse_err("())", ParseError::Unexpected(TokenType::CloseParen, None)); assert_parse_err("() ((((", ParseError::UnexpectedEOF(None)); assert_parse_err("')", ParseError::Unexpected(TokenType::CloseParen, None)); assert_parse_err("(')", ParseError::Unexpected(TokenType::CloseParen, None)); assert_parse_err("('", ParseError::UnexpectedEOF(None)); } #[test] fn test_panic_should_err() { assert_parse_is_err("(panic!)"); assert_parse_is_err("(panic! 1 2)") } #[test] fn test_eval_should_err() { assert_parse_is_err("(eval)"); assert_parse_is_err("(eval 1 2)"); } #[test] fn test_read_should_err() { assert_parse_is_err("(read)"); assert_parse_is_err("(read 1 2)"); } #[test] fn test_let_should_err() { assert_parse_is_err("(let)"); assert_parse_is_err("(let (a) 10)"); } #[test] fn test_execute_should_err() { assert_parse_is_err("(execute)"); assert_parse_is_err("(execute 1)"); assert_parse_is_err("(execute 1 2 3 4)"); } #[test] fn test_if_should_err() { assert_parse_is_err("(if)"); assert_parse_is_err("(if 1)"); assert_parse_is_err("(if 1 2)"); assert_parse_is_err("(if 1 2 3 4)"); } #[test] fn test_transduce_should_err() { assert_parse_is_err("(transduce)"); assert_parse_is_err("(transduce 1)"); assert_parse_is_err("(transduce 1 2)"); assert_parse_is_err("(transduce 1 2 3)"); assert_parse_is_err("(transduce 1 2 3 4 5)"); } #[test] fn test_define_should_err() { assert_parse_is_err("(define)"); assert_parse_is_err("(define blagh)"); assert_parse_is_err("(define test 1 2)"); assert_parse_is_err("(define () test"); } #[test] fn test_lambda_should_err() { assert_parse_is_err("(lambda)"); assert_parse_is_err("(lambda (x))"); } #[test] fn test_empty() { assert_parse("", &[]); assert_parse("()", &[ExprKind::List(List::new(vec![]))]); } #[test] fn test_empty_quote_inside_if() { assert_parse( "(if #\\¡ (quote ()) #\\g)", &[ExprKind::If(Box::new(If::new( ExprKind::Atom(Atom::new(SyntaxObject::default( TokenType::CharacterLiteral('¡'), ))), ExprKind::Quote( Quote::new( List::new(vec![]).into(), SyntaxObject::default(TokenType::Quote), ) .into(), ), ExprKind::Atom(Atom::new(SyntaxObject::default( TokenType::CharacterLiteral('g'), ))), SyntaxObject::default(TokenType::If), )))], ) } #[test] fn test_empty_quote() { assert_parse( "'()", &[ExprKind::Quote( Quote::new( List::new(vec![]).into(), SyntaxObject::default(TokenType::Quote), ) .into(), )], ) } #[test] fn test_empty_quote_nested() { assert_parse( "(list '())", &[ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(TokenType::Identifier( "list".to_string(), )))), ExprKind::Quote( Quote::new( List::new(vec![]).into(), SyntaxObject::default(TokenType::Quote), ) .into(), ), ]))], ) } #[test] fn test_multi_parse_simple() { assert_parse( "a b +", &[ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "a".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "b".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "+".to_string(), )))), ], ); } #[test] fn test_multi_parse_complicated() { assert_parse( "a b (funcall 1 (+ 2 3.5))", &[ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "a".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "b".to_string(), )))), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "funcall".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(1)))), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "+".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(2)))), ExprKind::Atom(Atom::new(SyntaxObject::default(NumberLiteral(3.5)))), ])), ])), ], ) } #[test] fn test_parse_simple() { assert_parse( "(+ 1 2 3) (- 4 3)", &[ ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "+".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(1)))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(2)))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(3)))), ])), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "-".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(4)))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(3)))), ])), ], ); } #[test] fn test_parse_nested() { assert_parse( "(+ 1 (foo (bar 2 3)))", &[ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "+".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(1)))), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "foo".to_string(), )))), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "bar".to_owned(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(2)))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(3)))), ])), ])), ]))], ); assert_parse( "(+ 1 (+ 2 3) (foo (bar 2 3)))", &[ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "+".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(1)))), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "+".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(2)))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(3)))), ])), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "foo".to_string(), )))), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "bar".to_owned(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(2)))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(3)))), ])), ])), ]))], ); } #[test] fn test_if() { assert_parse( "(+ 1 (if 2 3 4) (foo (+ (bar 1 1) 3) 5))", &[ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "+".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(1)))), ExprKind::If(Box::new(If::new( ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(2)))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(3)))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(4)))), SyntaxObject::default(If), ))), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "foo".to_string(), )))), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "+".to_string(), )))), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "bar".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(1)))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(1)))), ])), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(3)))), ])), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(5)))), ])), ]))], ); } #[test] fn test_quote() { assert_parse( "(quote (if 1 2))", &[ExprKind::Quote(Box::new(Quote::new( ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(TokenType::If))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(1)))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(2)))), ])), SyntaxObject::default(TokenType::Quote), )))], ) } #[test] fn test_quote_shorthand() { assert_parse( "'(if 1 2)", &[ExprKind::Quote(Box::new(Quote::new( ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(TokenType::If))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(1)))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(2)))), ])), SyntaxObject::default(TokenType::Quote), )))], ) } #[test] fn test_quote_nested() { assert_parse( "(quote (if (if 1 2) 3))", &[ExprKind::Quote(Box::new(Quote::new( ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(TokenType::If))), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(TokenType::If))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(1)))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(2)))), ])), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(3)))), ])), SyntaxObject::default(TokenType::Quote), )))], ) } #[test] fn test_quote_shorthand_nested() { assert_parse( "'(if (if 1 2) 3)", &[ExprKind::Quote(Box::new(Quote::new( ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(TokenType::If))), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(TokenType::If))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(1)))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(2)))), ])), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(3)))), ])), SyntaxObject::default(TokenType::Quote), )))], ) } #[test] fn test_quote_shorthand_multiple_exprs() { assert_parse( "'(if (if 1 2) 3) (+ 1 (if 2 3 4) (foo (+ (bar 1 1) 3) 5))", &[ ExprKind::Quote(Box::new(Quote::new( ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(TokenType::If))), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(TokenType::If))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(1)))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(2)))), ])), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(3)))), ])), SyntaxObject::default(TokenType::Quote), ))), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "+".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(1)))), ExprKind::If(Box::new(If::new( ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(2)))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(3)))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(4)))), SyntaxObject::default(If), ))), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "foo".to_string(), )))), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "+".to_string(), )))), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "bar".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(1)))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(1)))), ])), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(3)))), ])), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(5)))), ])), ])), ], ) } #[test] fn test_quote_inner() { assert_parse( "'(applesauce 'one)", &[ExprKind::Quote(Box::new(Quote::new( ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "applesauce".to_string(), )))), ExprKind::Quote(Box::new(Quote::new( ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "one".to_string(), )))), SyntaxObject::default(TokenType::Quote), ))), ])), SyntaxObject::default(TokenType::Quote), )))], ) } #[test] fn test_quote_inner_without_shorthand() { assert_parse( "(quote (applesauce 'one))", &[ExprKind::Quote(Box::new(Quote::new( ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "applesauce".to_string(), )))), ExprKind::Quote(Box::new(Quote::new( ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "one".to_string(), )))), SyntaxObject::default(TokenType::Quote), ))), ])), SyntaxObject::default(TokenType::Quote), )))], ) } #[test] fn test_quasiquote_shorthand() { assert_parse( "`(+ 1 2)", &[ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "quasiquote".to_string(), )))), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "+".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(1)))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(2)))), ])), ]))], ) } #[test] fn test_quasiquote_normal() { assert_parse( "(quasiquote (+ 1 2))", &[ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "quasiquote".to_string(), )))), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "+".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(1)))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(2)))), ])), ]))], ) } #[test] fn test_unquote_shorthand() { assert_parse( ",(+ 1 2)", &[ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "unquote".to_string(), )))), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "+".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(1)))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(2)))), ])), ]))], ) } #[test] fn test_unquote_normal() { assert_parse( "(unquote (+ 1 2))", &[ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "unquote".to_string(), )))), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "+".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(1)))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(2)))), ])), ]))], ) } #[test] fn test_unquote_splicing_shorthand() { assert_parse( ",@(+ 1 2)", &[ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "unquote-splicing".to_string(), )))), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "+".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(1)))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(2)))), ])), ]))], ) } #[test] fn test_unquote_splicing_normal() { assert_parse( "(unquote-splicing (+ 1 2))", &[ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "unquote-splicing".to_string(), )))), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "+".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(1)))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(2)))), ])), ]))], ) } #[test] fn test_transduce() { assert_parse( "(transduce a b c d)", &[ExprKind::Transduce(Box::new(Transduce::new( ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "a".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "b".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "c".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "d".to_string(), )))), SyntaxObject::default(TokenType::Transduce), )))], ) } #[test] fn test_transduce_complex() { assert_parse( "(if #t (transduce a b c d) (if #f 10 20))", &[ExprKind::If(Box::new(If::new( ExprKind::Atom(Atom::new(SyntaxObject::default(BooleanLiteral(true)))), ExprKind::Transduce(Box::new(Transduce::new( ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "a".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "b".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "c".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "d".to_string(), )))), SyntaxObject::default(TokenType::Transduce), ))), ExprKind::If(Box::new(If::new( ExprKind::Atom(Atom::new(SyntaxObject::default(BooleanLiteral(false)))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(10)))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(20)))), SyntaxObject::default(If), ))), SyntaxObject::default(If), )))], ) } #[test] fn test_define_simple() { assert_parse( "(define a 10)", &[ExprKind::Define(Box::new(Define::new( ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "a".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(10)))), SyntaxObject::default(TokenType::Define), )))], ) } #[test] fn test_define_func_simple() { assert_parse( "(define (foo x) (+ x 10))", &[ExprKind::Define(Box::new(Define::new( ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "foo".to_string(), )))), ExprKind::LambdaFunction(Box::new(LambdaFunction::new( vec![ExprKind::Atom(Atom::new(SyntaxObject::default( Identifier("x".to_string()), )))], ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "+".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "x".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(10)))), ])), SyntaxObject::default(TokenType::Lambda), ))), SyntaxObject::default(TokenType::Define), )))], ) } #[test] fn test_define_func_multiple_args() { assert_parse( "(define (foo x y z) (+ x 10))", &[ExprKind::Define(Box::new(Define::new( ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "foo".to_string(), )))), ExprKind::LambdaFunction(Box::new(LambdaFunction::new( vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "x".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "y".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "z".to_string(), )))), ], ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "+".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "x".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(10)))), ])), SyntaxObject::default(TokenType::Lambda), ))), SyntaxObject::default(TokenType::Define), )))], ) } #[test] fn test_define_func_multiple_args_multiple_body_exprs() { assert_parse( "(define (foo x y z) (+ x 10) (+ y 20) (+ z 30))", &[ExprKind::Define(Box::new(Define::new( ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "foo".to_string(), )))), ExprKind::LambdaFunction(Box::new(LambdaFunction::new( vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "x".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "y".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "z".to_string(), )))), ], ExprKind::Begin(Begin::new( vec![ ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "+".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "x".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral( 10, )))), ])), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "+".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "y".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral( 20, )))), ])), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "+".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "z".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral( 30, )))), ])), ], SyntaxObject::default(TokenType::Begin), )), SyntaxObject::default(TokenType::Lambda), ))), SyntaxObject::default(TokenType::Define), )))], ) } #[test] fn test_recursive_function() { assert_parse( "(define (test) (define (foo) (bar)) (define (bar) (foo)))", &[ExprKind::Define(Box::new(Define::new( ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "test".to_string(), )))), ExprKind::LambdaFunction(Box::new(LambdaFunction::new( vec![], ExprKind::Begin(Begin::new( vec![ ExprKind::Define(Box::new(Define::new( ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "foo".to_string(), )))), ExprKind::LambdaFunction(Box::new(LambdaFunction::new( vec![], ExprKind::List(List::new(vec![ExprKind::Atom(Atom::new( SyntaxObject::default(Identifier("bar".to_string())), ))])), SyntaxObject::default(TokenType::Lambda), ))), SyntaxObject::default(TokenType::Define), ))), ExprKind::Define(Box::new(Define::new( ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "bar".to_string(), )))), ExprKind::LambdaFunction(Box::new(LambdaFunction::new( vec![], ExprKind::List(List::new(vec![ExprKind::Atom(Atom::new( SyntaxObject::default(Identifier("foo".to_string())), ))])), SyntaxObject::default(TokenType::Lambda), ))), SyntaxObject::default(TokenType::Define), ))), ], SyntaxObject::default(TokenType::Begin), )), SyntaxObject::default(TokenType::Lambda), ))), SyntaxObject::default(TokenType::Define), )))], ) } #[test] fn test_execute_two_arguments() { assert_parse( "(execute a b)", &[ExprKind::Execute(Box::new(Execute::new( ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "a".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "b".to_string(), )))), None, SyntaxObject::default(TokenType::Execute), )))], ) } #[test] fn test_execute_three_arguments() { assert_parse( "(execute a b c)", &[ExprKind::Execute(Box::new(Execute::new( ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "a".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "b".to_string(), )))), Some(ExprKind::Atom(Atom::new(SyntaxObject::default( Identifier("c".to_string()), )))), SyntaxObject::default(TokenType::Execute), )))], ) } #[test] fn test_return_normal() { assert_parse( "(return! 10)", &[ExprKind::Return(Box::new(Return::new( ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(10)))), SyntaxObject::default(TokenType::Return), )))], ) } #[test] fn test_begin() { assert_parse( "(begin 1 2 3)", &[ExprKind::Begin(Begin::new( vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(1)))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(2)))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(3)))), ], SyntaxObject::default(TokenType::Begin), ))], ) } #[test] fn test_panic_normal() { assert_parse( "(panic! 10)", &[ExprKind::Panic(Box::new(Panic::new( ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(10)))), SyntaxObject::default(TokenType::Panic), )))], ) } #[test] fn test_lambda_function() { assert_parse( "(lambda (x) 10)", &[ExprKind::LambdaFunction(Box::new(LambdaFunction::new( vec![ExprKind::Atom(Atom::new(SyntaxObject::default( Identifier("x".to_string()), )))], ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(10)))), SyntaxObject::default(TokenType::Lambda), )))], ) } #[test] fn test_lambda_matches_let() { assert_parse( "((lambda (a) (+ a 20)) 10)", &[ExprKind::List(List::new(vec![ ExprKind::LambdaFunction(Box::new(LambdaFunction::new( vec![ExprKind::Atom(Atom::new(SyntaxObject::default( Identifier("a".to_string()), )))], ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "+".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "a".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(20)))), ])), SyntaxObject::default(TokenType::Lambda), ))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(10)))), ]))], ); } #[test] fn test_let() { assert_parse( "(let ([a 10]) (+ a 20))", &[ExprKind::List(List::new(vec![ ExprKind::LambdaFunction(Box::new(LambdaFunction::new( vec![ExprKind::Atom(Atom::new(SyntaxObject::default( Identifier("a".to_string()), )))], ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "+".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "a".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(20)))), ])), SyntaxObject::default(TokenType::Let), ))), ExprKind::Atom(Atom::new(SyntaxObject::default(IntegerLiteral(10)))), ]))], ) } #[test] fn test_execute() { assert_parse( "(execute a b)", &[ExprKind::Execute(Box::new(Execute::new( ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "a".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "b".to_string(), )))), None, SyntaxObject::default(TokenType::Execute), )))], ) } #[test] fn test_execute_nested() { assert_parse( "(if (empty? lst) '() (execute a b))", &[ExprKind::If(Box::new(If::new( ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "empty?".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "lst".to_string(), )))), ])), ExprKind::Quote( Quote::new( List::new(vec![]).into(), SyntaxObject::default(TokenType::Quote), ) .into(), ), ExprKind::Execute(Box::new(Execute::new( ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "a".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "b".to_string(), )))), None, SyntaxObject::default(TokenType::Execute), ))), SyntaxObject::default(TokenType::If), )))], ) } #[test] fn test_quote_with_inner_nested() { assert_parse( "'(#f '())", &[ExprKind::Quote( Quote::new( ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default( TokenType::BooleanLiteral(false), ))), ExprKind::Quote( Quote::new( List::new(vec![]).into(), SyntaxObject::default(TokenType::Quote), ) .into(), ), ])), SyntaxObject::default(TokenType::Quote), ) .into(), )], ) } #[test] fn test_quote_with_inner_nested_sub_expr() { assert_parse( "(if (null? contents) '(#f '()) (list (car contents) (cdr contents)))", &[ExprKind::If(Box::new(If::new( ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "null?".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "contents".to_string(), )))), ])), ExprKind::Quote( Quote::new( ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default( TokenType::BooleanLiteral(false), ))), ExprKind::Quote( Quote::new( List::new(vec![]).into(), SyntaxObject::default(TokenType::Quote), ) .into(), ), ])), SyntaxObject::default(TokenType::Quote), ) .into(), ), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "list".to_string(), )))), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "car".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "contents".to_string(), )))), ])), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "cdr".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "contents".to_string(), )))), ])), ])), SyntaxObject::default(TokenType::If), )))], ); } #[test] fn test_quote_normal_with_inner_nested_sub_expr() { assert_parse( "(if (null? contents) (quote (#f '())) (list (car contents) (cdr contents)))", &[ExprKind::If(Box::new(If::new( ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "null?".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "contents".to_string(), )))), ])), ExprKind::Quote( Quote::new( ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default( TokenType::BooleanLiteral(false), ))), ExprKind::Quote( Quote::new( List::new(vec![]).into(), SyntaxObject::default(TokenType::Quote), ) .into(), ), ])), SyntaxObject::default(TokenType::Quote), ) .into(), ), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "list".to_string(), )))), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "car".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "contents".to_string(), )))), ])), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "cdr".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "contents".to_string(), )))), ])), ])), SyntaxObject::default(TokenType::If), )))], ); } #[test] fn test_quote_with_inner_sub_expr_even_more_nested() { assert_parse( "(list (if (null? contents) '(#f '()) (list (car contents) (cdr contents))))", &[ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(TokenType::Identifier( "list".to_string(), )))), ExprKind::If(Box::new(If::new( ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "null?".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "contents".to_string(), )))), ])), ExprKind::Quote( Quote::new( ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default( TokenType::BooleanLiteral(false), ))), ExprKind::Quote( Quote::new( List::new(vec![]).into(), SyntaxObject::default(TokenType::Quote), ) .into(), ), ])), SyntaxObject::default(TokenType::Quote), ) .into(), ), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "list".to_string(), )))), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "car".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "contents".to_string(), )))), ])), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "cdr".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(Identifier( "contents".to_string(), )))), ])), ])), SyntaxObject::default(TokenType::If), ))), ]))], ); } #[test] fn test_define_with_datum_syntax_name() { assert_parse( "(define (datum->syntax var) (car ret-value))", &[ExprKind::Define(Box::new(Define::new( ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(TokenType::Identifier( "datum->syntax".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(TokenType::Identifier( "var".to_string(), )))), ])), ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(TokenType::Identifier( "car".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(TokenType::Identifier( "ret-value".to_string(), )))), ])), SyntaxObject::default(TokenType::Define), )))], ) } #[test] fn test_define_with_datum_syntax_function_name() { assert_parse( "(define ((datum->syntax var) arg) 10)", &[ExprKind::Define(Box::new(Define::new( ExprKind::List(List::new(vec![ ExprKind::Atom(Atom::new(SyntaxObject::default(TokenType::Identifier( "datum->syntax".to_string(), )))), ExprKind::Atom(Atom::new(SyntaxObject::default(TokenType::Identifier( "var".to_string(), )))), ])), ExprKind::LambdaFunction(Box::new(LambdaFunction::new( vec![ExprKind::Atom(Atom::new(SyntaxObject::default( TokenType::Identifier("arg".to_string()), )))], ExprKind::Atom(Atom::new(SyntaxObject::default(TokenType::IntegerLiteral( 10, )))), SyntaxObject::default(TokenType::Lambda), ))), SyntaxObject::default(TokenType::Define), )))], ) } }
#![doc = "generated by AutoRust 0.1.0"] #![allow(unused_mut)] #![allow(unused_variables)] #![allow(unused_imports)] use super::{models, API_VERSION}; #[non_exhaustive] #[derive(Debug, thiserror :: Error)] #[allow(non_camel_case_types)] pub enum Error { #[error(transparent)] Servers_CheckNameAvailability(#[from] servers::check_name_availability::Error), #[error(transparent)] Databases_Pause(#[from] databases::pause::Error), #[error(transparent)] Databases_Resume(#[from] databases::resume::Error), #[error(transparent)] Databases_Get(#[from] databases::get::Error), #[error(transparent)] Databases_CreateOrUpdate(#[from] databases::create_or_update::Error), #[error(transparent)] Databases_Update(#[from] databases::update::Error), #[error(transparent)] Databases_Delete(#[from] databases::delete::Error), #[error(transparent)] Databases_ListByServer(#[from] databases::list_by_server::Error), #[error(transparent)] Databases_GetByElasticPool(#[from] databases::get_by_elastic_pool::Error), #[error(transparent)] Databases_ListByElasticPool(#[from] databases::list_by_elastic_pool::Error), #[error(transparent)] Databases_GetByRecommendedElasticPool(#[from] databases::get_by_recommended_elastic_pool::Error), #[error(transparent)] Databases_ListByRecommendedElasticPool(#[from] databases::list_by_recommended_elastic_pool::Error), #[error(transparent)] ElasticPoolActivities_ListByElasticPool(#[from] elastic_pool_activities::list_by_elastic_pool::Error), #[error(transparent)] ElasticPoolDatabaseActivities_ListByElasticPool(#[from] elastic_pool_database_activities::list_by_elastic_pool::Error), #[error(transparent)] ServiceTierAdvisors_Get(#[from] service_tier_advisors::get::Error), #[error(transparent)] ServiceTierAdvisors_ListByDatabase(#[from] service_tier_advisors::list_by_database::Error), #[error(transparent)] TransparentDataEncryptions_Get(#[from] transparent_data_encryptions::get::Error), #[error(transparent)] TransparentDataEncryptions_CreateOrUpdate(#[from] transparent_data_encryptions::create_or_update::Error), #[error(transparent)] TransparentDataEncryptionActivities_ListByConfiguration(#[from] transparent_data_encryption_activities::list_by_configuration::Error), #[error(transparent)] ElasticPools_Get(#[from] elastic_pools::get::Error), #[error(transparent)] ElasticPools_CreateOrUpdate(#[from] elastic_pools::create_or_update::Error), #[error(transparent)] ElasticPools_Update(#[from] elastic_pools::update::Error), #[error(transparent)] ElasticPools_Delete(#[from] elastic_pools::delete::Error), #[error(transparent)] ElasticPools_ListByServer(#[from] elastic_pools::list_by_server::Error), #[error(transparent)] FirewallRules_Get(#[from] firewall_rules::get::Error), #[error(transparent)] FirewallRules_CreateOrUpdate(#[from] firewall_rules::create_or_update::Error), #[error(transparent)] FirewallRules_Delete(#[from] firewall_rules::delete::Error), #[error(transparent)] FirewallRules_ListByServer(#[from] firewall_rules::list_by_server::Error), #[error(transparent)] Databases_Import(#[from] databases::import::Error), #[error(transparent)] Databases_CreateImportOperation(#[from] databases::create_import_operation::Error), #[error(transparent)] Databases_Export(#[from] databases::export::Error), #[error(transparent)] RecommendedElasticPools_Get(#[from] recommended_elastic_pools::get::Error), #[error(transparent)] RecommendedElasticPools_ListByServer(#[from] recommended_elastic_pools::list_by_server::Error), #[error(transparent)] RecommendedElasticPools_ListMetrics(#[from] recommended_elastic_pools::list_metrics::Error), #[error(transparent)] ReplicationLinks_Get(#[from] replication_links::get::Error), #[error(transparent)] ReplicationLinks_Delete(#[from] replication_links::delete::Error), #[error(transparent)] ReplicationLinks_Failover(#[from] replication_links::failover::Error), #[error(transparent)] ReplicationLinks_FailoverAllowDataLoss(#[from] replication_links::failover_allow_data_loss::Error), #[error(transparent)] ReplicationLinks_Unlink(#[from] replication_links::unlink::Error), #[error(transparent)] ReplicationLinks_ListByDatabase(#[from] replication_links::list_by_database::Error), #[error(transparent)] DatabaseThreatDetectionPolicies_Get(#[from] database_threat_detection_policies::get::Error), #[error(transparent)] DatabaseThreatDetectionPolicies_CreateOrUpdate(#[from] database_threat_detection_policies::create_or_update::Error), } pub mod servers { use super::{models, API_VERSION}; pub async fn check_name_availability( operation_config: &crate::OperationConfig, subscription_id: &str, parameters: &models::CheckNameAvailabilityRequest, ) -> std::result::Result<models::CheckNameAvailabilityResponse, check_name_availability::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.Sql/checkNameAvailability", operation_config.base_path(), subscription_id ); let mut url = url::Url::parse(url_str).map_err(check_name_availability::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(check_name_availability::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(check_name_availability::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(check_name_availability::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(check_name_availability::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::CheckNameAvailabilityResponse = serde_json::from_slice(rsp_body) .map_err(|source| check_name_availability::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(check_name_availability::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod check_name_availability { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod databases { use super::{models, API_VERSION}; pub async fn pause( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, database_name: &str, ) -> std::result::Result<pause::Response, pause::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/databases/{}/pause", operation_config.base_path(), subscription_id, resource_group_name, server_name, database_name ); let mut url = url::Url::parse(url_str).map_err(pause::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(pause::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(pause::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(pause::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(pause::Response::Ok200), http::StatusCode::ACCEPTED => Ok(pause::Response::Accepted202), status_code => { let rsp_body = rsp.body(); Err(pause::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod pause { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn resume( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, database_name: &str, ) -> std::result::Result<resume::Response, resume::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/databases/{}/resume", operation_config.base_path(), subscription_id, resource_group_name, server_name, database_name ); let mut url = url::Url::parse(url_str).map_err(resume::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(resume::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(resume::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(resume::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::ACCEPTED => Ok(resume::Response::Accepted202), http::StatusCode::OK => Ok(resume::Response::Ok200), status_code => { let rsp_body = rsp.body(); Err(resume::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod resume { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Accepted202, Ok200, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, database_name: &str, expand: Option<&str>, ) -> std::result::Result<models::Database, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/databases/{}", operation_config.base_path(), subscription_id, resource_group_name, server_name, database_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); if let Some(expand) = expand { url.query_pairs_mut().append_pair("$expand", expand); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::Database = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(get::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, database_name: &str, parameters: &models::Database, ) -> std::result::Result<create_or_update::Response, create_or_update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/databases/{}", operation_config.base_path(), subscription_id, resource_group_name, server_name, database_name ); let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::Database = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: models::Database = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Created201(rsp_value)) } http::StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202), status_code => { let rsp_body = rsp.body(); Err(create_or_update::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod create_or_update { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::Database), Created201(models::Database), Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn update( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, database_name: &str, parameters: &models::DatabaseUpdate, ) -> std::result::Result<update::Response, update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/databases/{}", operation_config.base_path(), subscription_id, resource_group_name, server_name, database_name ); let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PATCH); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::Database = serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(update::Response::Ok200(rsp_value)) } http::StatusCode::ACCEPTED => Ok(update::Response::Accepted202), status_code => { let rsp_body = rsp.body(); Err(update::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod update { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::Database), Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, database_name: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/databases/{}", operation_config.base_path(), subscription_id, resource_group_name, server_name, database_name ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let rsp_body = rsp.body(); Err(delete::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod delete { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_by_server( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, expand: Option<&str>, filter: Option<&str>, ) -> std::result::Result<models::DatabaseListResult, list_by_server::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/databases", operation_config.base_path(), subscription_id, resource_group_name, server_name ); let mut url = url::Url::parse(url_str).map_err(list_by_server::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_server::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); if let Some(expand) = expand { url.query_pairs_mut().append_pair("$expand", expand); } if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_by_server::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_server::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::DatabaseListResult = serde_json::from_slice(rsp_body).map_err(|source| list_by_server::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(list_by_server::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod list_by_server { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_by_elastic_pool( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, elastic_pool_name: &str, database_name: &str, ) -> std::result::Result<models::Database, get_by_elastic_pool::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/elasticPools/{}/databases/{}", operation_config.base_path(), subscription_id, resource_group_name, server_name, elastic_pool_name, database_name ); let mut url = url::Url::parse(url_str).map_err(get_by_elastic_pool::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_by_elastic_pool::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_by_elastic_pool::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_by_elastic_pool::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::Database = serde_json::from_slice(rsp_body) .map_err(|source| get_by_elastic_pool::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(get_by_elastic_pool::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod get_by_elastic_pool { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_by_elastic_pool( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, elastic_pool_name: &str, ) -> std::result::Result<models::DatabaseListResult, list_by_elastic_pool::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/elasticPools/{}/databases", operation_config.base_path(), subscription_id, resource_group_name, server_name, elastic_pool_name ); let mut url = url::Url::parse(url_str).map_err(list_by_elastic_pool::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_elastic_pool::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_by_elastic_pool::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_elastic_pool::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::DatabaseListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_by_elastic_pool::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(list_by_elastic_pool::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod list_by_elastic_pool { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_by_recommended_elastic_pool( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, recommended_elastic_pool_name: &str, database_name: &str, ) -> std::result::Result<models::Database, get_by_recommended_elastic_pool::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/recommendedElasticPools/{}/databases/{}", operation_config.base_path(), subscription_id, resource_group_name, server_name, recommended_elastic_pool_name, database_name ); let mut url = url::Url::parse(url_str).map_err(get_by_recommended_elastic_pool::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_by_recommended_elastic_pool::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(get_by_recommended_elastic_pool::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_by_recommended_elastic_pool::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::Database = serde_json::from_slice(rsp_body) .map_err(|source| get_by_recommended_elastic_pool::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(get_by_recommended_elastic_pool::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod get_by_recommended_elastic_pool { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_by_recommended_elastic_pool( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, recommended_elastic_pool_name: &str, ) -> std::result::Result<models::DatabaseListResult, list_by_recommended_elastic_pool::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/recommendedElasticPools/{}/databases", operation_config.base_path(), subscription_id, resource_group_name, server_name, recommended_elastic_pool_name ); let mut url = url::Url::parse(url_str).map_err(list_by_recommended_elastic_pool::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_recommended_elastic_pool::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_by_recommended_elastic_pool::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_recommended_elastic_pool::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::DatabaseListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_by_recommended_elastic_pool::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(list_by_recommended_elastic_pool::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod list_by_recommended_elastic_pool { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn import( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, parameters: &models::ImportRequest, ) -> std::result::Result<import::Response, import::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/import", operation_config.base_path(), subscription_id, resource_group_name, server_name ); let mut url = url::Url::parse(url_str).map_err(import::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(import::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(import::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(import::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(import::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ImportExportResponse = serde_json::from_slice(rsp_body).map_err(|source| import::Error::DeserializeError(source, rsp_body.clone()))?; Ok(import::Response::Ok200(rsp_value)) } http::StatusCode::ACCEPTED => Ok(import::Response::Accepted202), status_code => { let rsp_body = rsp.body(); Err(import::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod import { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::ImportExportResponse), Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_import_operation( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, database_name: &str, extension_name: &str, parameters: &models::ImportExtensionRequest, ) -> std::result::Result<create_import_operation::Response, create_import_operation::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/databases/{}/extensions/{}", operation_config.base_path(), subscription_id, resource_group_name, server_name, database_name, extension_name ); let mut url = url::Url::parse(url_str).map_err(create_import_operation::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_import_operation::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(create_import_operation::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(create_import_operation::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_import_operation::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: models::ImportExportResponse = serde_json::from_slice(rsp_body) .map_err(|source| create_import_operation::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_import_operation::Response::Created201(rsp_value)) } http::StatusCode::ACCEPTED => Ok(create_import_operation::Response::Accepted202), status_code => { let rsp_body = rsp.body(); Err(create_import_operation::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod create_import_operation { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Created201(models::ImportExportResponse), Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn export( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, database_name: &str, parameters: &models::ExportRequest, ) -> std::result::Result<export::Response, export::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/databases/{}/export", operation_config.base_path(), subscription_id, resource_group_name, server_name, database_name ); let mut url = url::Url::parse(url_str).map_err(export::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(export::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(export::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(export::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(export::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ImportExportResponse = serde_json::from_slice(rsp_body).map_err(|source| export::Error::DeserializeError(source, rsp_body.clone()))?; Ok(export::Response::Ok200(rsp_value)) } http::StatusCode::ACCEPTED => Ok(export::Response::Accepted202), status_code => { let rsp_body = rsp.body(); Err(export::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod export { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::ImportExportResponse), Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod elastic_pool_activities { use super::{models, API_VERSION}; pub async fn list_by_elastic_pool( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, elastic_pool_name: &str, ) -> std::result::Result<models::ElasticPoolActivityListResult, list_by_elastic_pool::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/elasticPools/{}/elasticPoolActivity", operation_config.base_path(), subscription_id, resource_group_name, server_name, elastic_pool_name ); let mut url = url::Url::parse(url_str).map_err(list_by_elastic_pool::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_elastic_pool::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_by_elastic_pool::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_elastic_pool::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ElasticPoolActivityListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_by_elastic_pool::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(list_by_elastic_pool::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod list_by_elastic_pool { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod elastic_pool_database_activities { use super::{models, API_VERSION}; pub async fn list_by_elastic_pool( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, elastic_pool_name: &str, ) -> std::result::Result<models::ElasticPoolDatabaseActivityListResult, list_by_elastic_pool::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/elasticPools/{}/elasticPoolDatabaseActivity", operation_config.base_path(), subscription_id, resource_group_name, server_name, elastic_pool_name ); let mut url = url::Url::parse(url_str).map_err(list_by_elastic_pool::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_elastic_pool::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_by_elastic_pool::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_elastic_pool::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ElasticPoolDatabaseActivityListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_by_elastic_pool::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(list_by_elastic_pool::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod list_by_elastic_pool { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod service_tier_advisors { use super::{models, API_VERSION}; pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, database_name: &str, service_tier_advisor_name: &str, ) -> std::result::Result<models::ServiceTierAdvisor, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/databases/{}/serviceTierAdvisors/{}", operation_config.base_path(), subscription_id, resource_group_name, server_name, database_name, service_tier_advisor_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ServiceTierAdvisor = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(get::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_by_database( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, database_name: &str, ) -> std::result::Result<models::ServiceTierAdvisorListResult, list_by_database::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/databases/{}/serviceTierAdvisors", operation_config.base_path(), subscription_id, resource_group_name, server_name, database_name ); let mut url = url::Url::parse(url_str).map_err(list_by_database::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_database::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_by_database::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_database::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ServiceTierAdvisorListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_by_database::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(list_by_database::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod list_by_database { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod transparent_data_encryptions { use super::{models, API_VERSION}; pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, database_name: &str, transparent_data_encryption_name: &str, ) -> std::result::Result<models::TransparentDataEncryption, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/databases/{}/transparentDataEncryption/{}", operation_config.base_path(), subscription_id, resource_group_name, server_name, database_name, transparent_data_encryption_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::TransparentDataEncryption = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(get::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, database_name: &str, transparent_data_encryption_name: &str, parameters: &models::TransparentDataEncryption, ) -> std::result::Result<create_or_update::Response, create_or_update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/databases/{}/transparentDataEncryption/{}", operation_config.base_path(), subscription_id, resource_group_name, server_name, database_name, transparent_data_encryption_name ); let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::TransparentDataEncryption = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: models::TransparentDataEncryption = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Created201(rsp_value)) } status_code => { let rsp_body = rsp.body(); Err(create_or_update::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod create_or_update { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::TransparentDataEncryption), Created201(models::TransparentDataEncryption), } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod transparent_data_encryption_activities { use super::{models, API_VERSION}; pub async fn list_by_configuration( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, database_name: &str, transparent_data_encryption_name: &str, ) -> std::result::Result<models::TransparentDataEncryptionActivityListResult, list_by_configuration::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/databases/{}/transparentDataEncryption/{}/operationResults" , operation_config . base_path () , subscription_id , resource_group_name , server_name , database_name , transparent_data_encryption_name) ; let mut url = url::Url::parse(url_str).map_err(list_by_configuration::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_configuration::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_by_configuration::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_configuration::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::TransparentDataEncryptionActivityListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_by_configuration::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(list_by_configuration::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod list_by_configuration { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod elastic_pools { use super::{models, API_VERSION}; pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, elastic_pool_name: &str, ) -> std::result::Result<models::ElasticPool, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/elasticPools/{}", operation_config.base_path(), subscription_id, resource_group_name, server_name, elastic_pool_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ElasticPool = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(get::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, elastic_pool_name: &str, parameters: &models::ElasticPool, ) -> std::result::Result<create_or_update::Response, create_or_update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/elasticPools/{}", operation_config.base_path(), subscription_id, resource_group_name, server_name, elastic_pool_name ); let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ElasticPool = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: models::ElasticPool = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Created201(rsp_value)) } http::StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202), status_code => { let rsp_body = rsp.body(); Err(create_or_update::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod create_or_update { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::ElasticPool), Created201(models::ElasticPool), Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn update( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, elastic_pool_name: &str, parameters: &models::ElasticPoolUpdate, ) -> std::result::Result<update::Response, update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/elasticPools/{}", operation_config.base_path(), subscription_id, resource_group_name, server_name, elastic_pool_name ); let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PATCH); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ElasticPool = serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(update::Response::Ok200(rsp_value)) } http::StatusCode::ACCEPTED => Ok(update::Response::Accepted202), status_code => { let rsp_body = rsp.body(); Err(update::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod update { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::ElasticPool), Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, elastic_pool_name: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/elasticPools/{}", operation_config.base_path(), subscription_id, resource_group_name, server_name, elastic_pool_name ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let rsp_body = rsp.body(); Err(delete::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod delete { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_by_server( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, ) -> std::result::Result<models::ElasticPoolListResult, list_by_server::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/elasticPools", operation_config.base_path(), subscription_id, resource_group_name, server_name ); let mut url = url::Url::parse(url_str).map_err(list_by_server::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_server::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_by_server::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_server::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ElasticPoolListResult = serde_json::from_slice(rsp_body).map_err(|source| list_by_server::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(list_by_server::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod list_by_server { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod firewall_rules { use super::{models, API_VERSION}; pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, firewall_rule_name: &str, ) -> std::result::Result<models::FirewallRule, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/firewallRules/{}", operation_config.base_path(), subscription_id, resource_group_name, server_name, firewall_rule_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::FirewallRule = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(get::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, firewall_rule_name: &str, parameters: &models::FirewallRule, ) -> std::result::Result<create_or_update::Response, create_or_update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/firewallRules/{}", operation_config.base_path(), subscription_id, resource_group_name, server_name, firewall_rule_name ); let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::FirewallRule = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: models::FirewallRule = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Created201(rsp_value)) } status_code => { let rsp_body = rsp.body(); Err(create_or_update::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod create_or_update { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::FirewallRule), Created201(models::FirewallRule), } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, firewall_rule_name: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/firewallRules/{}", operation_config.base_path(), subscription_id, resource_group_name, server_name, firewall_rule_name ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let rsp_body = rsp.body(); Err(delete::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod delete { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_by_server( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, ) -> std::result::Result<models::FirewallRuleListResult, list_by_server::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/firewallRules", operation_config.base_path(), subscription_id, resource_group_name, server_name ); let mut url = url::Url::parse(url_str).map_err(list_by_server::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_server::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_by_server::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_server::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::FirewallRuleListResult = serde_json::from_slice(rsp_body).map_err(|source| list_by_server::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(list_by_server::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod list_by_server { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod recommended_elastic_pools { use super::{models, API_VERSION}; pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, recommended_elastic_pool_name: &str, ) -> std::result::Result<models::RecommendedElasticPool, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/recommendedElasticPools/{}", operation_config.base_path(), subscription_id, resource_group_name, server_name, recommended_elastic_pool_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::RecommendedElasticPool = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(get::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_by_server( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, ) -> std::result::Result<models::RecommendedElasticPoolListResult, list_by_server::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/recommendedElasticPools", operation_config.base_path(), subscription_id, resource_group_name, server_name ); let mut url = url::Url::parse(url_str).map_err(list_by_server::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_server::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_by_server::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_server::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::RecommendedElasticPoolListResult = serde_json::from_slice(rsp_body).map_err(|source| list_by_server::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(list_by_server::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod list_by_server { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_metrics( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, recommended_elastic_pool_name: &str, ) -> std::result::Result<models::RecommendedElasticPoolListMetricsResult, list_metrics::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/recommendedElasticPools/{}/metrics", operation_config.base_path(), subscription_id, resource_group_name, server_name, recommended_elastic_pool_name ); let mut url = url::Url::parse(url_str).map_err(list_metrics::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_metrics::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_metrics::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_metrics::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::RecommendedElasticPoolListMetricsResult = serde_json::from_slice(rsp_body).map_err(|source| list_metrics::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(list_metrics::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod list_metrics { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod replication_links { use super::{models, API_VERSION}; pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, database_name: &str, link_id: &str, ) -> std::result::Result<models::ReplicationLink, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/databases/{}/replicationLinks/{}", operation_config.base_path(), subscription_id, resource_group_name, server_name, database_name, link_id ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ReplicationLink = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(get::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, database_name: &str, link_id: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/databases/{}/replicationLinks/{}", operation_config.base_path(), subscription_id, resource_group_name, server_name, database_name, link_id ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let rsp_body = rsp.body(); Err(delete::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod delete { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn failover( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, database_name: &str, link_id: &str, ) -> std::result::Result<failover::Response, failover::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/databases/{}/replicationLinks/{}/failover", operation_config.base_path(), subscription_id, resource_group_name, server_name, database_name, link_id ); let mut url = url::Url::parse(url_str).map_err(failover::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(failover::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(failover::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(failover::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::NO_CONTENT => Ok(failover::Response::NoContent204), http::StatusCode::ACCEPTED => Ok(failover::Response::Accepted202), status_code => { let rsp_body = rsp.body(); Err(failover::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod failover { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { NoContent204, Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn failover_allow_data_loss( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, database_name: &str, link_id: &str, ) -> std::result::Result<failover_allow_data_loss::Response, failover_allow_data_loss::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/databases/{}/replicationLinks/{}/forceFailoverAllowDataLoss" , operation_config . base_path () , subscription_id , resource_group_name , server_name , database_name , link_id) ; let mut url = url::Url::parse(url_str).map_err(failover_allow_data_loss::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(failover_allow_data_loss::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(failover_allow_data_loss::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(failover_allow_data_loss::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::NO_CONTENT => Ok(failover_allow_data_loss::Response::NoContent204), http::StatusCode::ACCEPTED => Ok(failover_allow_data_loss::Response::Accepted202), status_code => { let rsp_body = rsp.body(); Err(failover_allow_data_loss::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod failover_allow_data_loss { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { NoContent204, Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn unlink( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, database_name: &str, link_id: &str, parameters: &models::UnlinkParameters, ) -> std::result::Result<unlink::Response, unlink::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/databases/{}/replicationLinks/{}/unlink", operation_config.base_path(), subscription_id, resource_group_name, server_name, database_name, link_id ); let mut url = url::Url::parse(url_str).map_err(unlink::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(unlink::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(unlink::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(unlink::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(unlink::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::NO_CONTENT => Ok(unlink::Response::NoContent204), http::StatusCode::ACCEPTED => Ok(unlink::Response::Accepted202), status_code => { let rsp_body = rsp.body(); Err(unlink::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod unlink { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { NoContent204, Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_by_database( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, database_name: &str, ) -> std::result::Result<models::ReplicationLinkListResult, list_by_database::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/databases/{}/replicationLinks", operation_config.base_path(), subscription_id, resource_group_name, server_name, database_name ); let mut url = url::Url::parse(url_str).map_err(list_by_database::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_database::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_by_database::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_database::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ReplicationLinkListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_by_database::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(list_by_database::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod list_by_database { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod database_threat_detection_policies { use super::{models, API_VERSION}; pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, database_name: &str, security_alert_policy_name: &str, ) -> std::result::Result<models::DatabaseSecurityAlertPolicy, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/databases/{}/securityAlertPolicies/{}", operation_config.base_path(), subscription_id, resource_group_name, server_name, database_name, security_alert_policy_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::DatabaseSecurityAlertPolicy = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(get::Error::DefaultResponse { status_code }), } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, server_name: &str, database_name: &str, security_alert_policy_name: &str, parameters: &models::DatabaseSecurityAlertPolicy, ) -> std::result::Result<create_or_update::Response, create_or_update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Sql/servers/{}/databases/{}/securityAlertPolicies/{}", operation_config.base_path(), subscription_id, resource_group_name, server_name, database_name, security_alert_policy_name ); let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::DatabaseSecurityAlertPolicy = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: models::DatabaseSecurityAlertPolicy = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Created201(rsp_value)) } status_code => Err(create_or_update::Error::DefaultResponse { status_code }), } } pub mod create_or_update { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::DatabaseSecurityAlertPolicy), Created201(models::DatabaseSecurityAlertPolicy), } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } }
extern crate futures; extern crate rand; use std::time::Duration; use std::thread; use std::sync::mpsc::channel; use rand::Rng; use futures::Future; use futures::sync::oneshot; fn main() { let (data_tx, data_rx) = channel(); let (oneshot_tx, oneshot_rx) = channel(); thread::spawn(move|| { loop { let oneshot: oneshot::Sender<_> = oneshot_rx.recv().unwrap(); let data = data_rx.recv().unwrap(); oneshot.send(data).unwrap(); } }); // send numbers infinitely for i in 0..5 { let data_tx = data_tx.clone(); thread::Builder::new() .name(format!("Thread-{}", i)) .spawn(move|| { let mut rng = rand::thread_rng(); for i in 0.. { let seconds = rng.gen_range(1,5); thread::sleep(Duration::new(seconds, 0)); data_tx.send(format!("{}({}):{}s", thread::current().name().unwrap(), i, seconds)).unwrap(); } }) .unwrap(); } // allow another thread to recv let (oneshot_rx_tx, oneshot_rx_rx) = channel(); thread::spawn(move|| { loop { let (once_tx, once_rx) = oneshot::channel(); oneshot_tx.send(once_tx).unwrap(); oneshot_rx_tx.send(once_rx).unwrap(); } }); // the main thread recvs futures // then it blocks for each future loop { let once_rx = oneshot_rx_rx.recv().unwrap(); println!("{}", once_rx.wait().unwrap()); } }
use serde::{Serialize, Deserialize}; use crate::schema::users; #[derive(Debug, Identifiable, Queryable, Serialize, Deserialize, PartialEq)] pub struct User { pub id: i32, pub username: String, pub email: Option<String>, pub profile: Option<i32>, pub password: String, pub login_session: String, } #[derive(Debug, Insertable, Serialize, Deserialize)] #[table_name = "users"] pub struct RegisterUser { pub username: String, pub email: Option<String>, pub password: String, } #[derive(Debug, Serialize, Deserialize)] pub struct Login { pub username_or_email: String, pub login_session: String, }
pub mod button; pub mod playback; use crate::player::PlaybackRequest; #[derive(Clone, Debug)] pub enum Input { Button(button::Command), Playback(PlaybackRequest), }
use crate::simulation::agent_shader::ty::PushConstantData; use crate::simulation::blur_fade_shader; use crate::simulation::Simulation; use imgui::{Context, Ui}; use imgui_vulkano_renderer::Renderer; use imgui_winit_support::{HiDpiMode, WinitPlatform}; use std::sync::Arc; use std::time::{Duration, Instant}; use vulkano::command_buffer::AutoCommandBufferBuilder; use vulkano::device::{Device, DeviceExtensions, Queue}; use vulkano::image::{ImageUsage, SwapchainImage}; use vulkano::instance::{Instance, PhysicalDevice}; use vulkano::swapchain; use vulkano::swapchain::{ AcquireError, ColorSpace, FullscreenExclusive, PresentMode, Surface, SurfaceTransform, Swapchain, SwapchainCreationError, }; use vulkano::sync; use vulkano::sync::{FlushError, GpuFuture}; use vulkano_win::VkSurfaceBuild; use winit::event::{Event, WindowEvent}; use winit::event_loop::{ControlFlow, EventLoop}; use winit::window::{Window, WindowBuilder}; pub struct System { pub event_loop: EventLoop<()>, pub device: Arc<Device>, pub queue: Arc<Queue>, pub surface: Arc<Surface<Window>>, pub swapchain: Arc<Swapchain<Window>>, pub images: Vec<Arc<SwapchainImage<Window>>>, pub imgui: Context, pub platform: WinitPlatform, pub renderer: Renderer, } impl System { pub fn init(window_title: &str) -> System { // Basic commands taken from the vulkano imgui examples: // https://github.com/Tenebryo/imgui-vulkano-renderer/blob/master/examples/support/mod.rs let instance = { let extensions = vulkano_win::required_extensions(); Instance::new(None, &extensions, None).expect("Failed to create instance.") }; let physical = PhysicalDevice::enumerate(&instance) .next() .expect("No device available"); let event_loop = EventLoop::new(); let surface = WindowBuilder::new() .with_title(window_title.to_owned()) .with_inner_size(winit::dpi::PhysicalSize { width: 2000, height: 1400, }) .build_vk_surface(&event_loop, instance.clone()) .unwrap(); let queue_family = physical .queue_families() .find(|&q| q.supports_graphics() && q.explicitly_supports_transfers() && surface.is_supported(q).unwrap_or(false) ) .expect("Device does not have a queue family that can draw to the window and supports transfers."); let (device, mut queues) = { let device_ext = DeviceExtensions { khr_swapchain: true, // Needed for compute shaders. khr_storage_buffer_storage_class: true, ..DeviceExtensions::none() }; Device::new( physical, physical.supported_features(), &device_ext, [(queue_family, 0.5)].iter().cloned(), ) .expect("Failed to create device") }; let queue = queues.next().unwrap(); let format; let (swapchain, images) = { let caps = surface .capabilities(physical) .expect("Failed to get capabilities."); format = caps.supported_formats[0].0; let dimensions = caps.current_extent.unwrap_or([1280, 1024]); let alpha = caps.supported_composite_alpha.iter().next().unwrap(); let image_usage = ImageUsage { transfer_destination: true, ..ImageUsage::color_attachment() }; Swapchain::new( device.clone(), surface.clone(), caps.min_image_count, format, dimensions, 1, image_usage, &queue, SurfaceTransform::Identity, alpha, PresentMode::Fifo, FullscreenExclusive::Default, true, ColorSpace::SrgbNonLinear, ) .expect("Failed to create swapchain") }; let mut imgui = Context::create(); imgui.set_ini_filename(None); let mut platform = WinitPlatform::init(&mut imgui); platform.attach_window(imgui.io_mut(), &surface.window(), HiDpiMode::Rounded); let renderer = Renderer::init(&mut imgui, device.clone(), queue.clone(), format) .expect("Failed to initialize renderer"); System { event_loop, device, queue, surface, swapchain, images, imgui, platform, renderer, } } pub fn main_loop< F: FnMut( &mut bool, &mut PushConstantData, &mut blur_fade_shader::ty::PushConstantData, &mut Ui, ) + 'static, >( self, simulation: Simulation, mut run_ui: F, ) { let System { event_loop, device, queue, surface, mut swapchain, mut images, mut imgui, mut platform, mut renderer, .. } = self; // Apparently there are various reasons why we might need to re-create the swapchain. // For example when the target surface has changed size. // This keeps track of whether the previous frame encountered one of those reasons. let mut recreate_swapchain = false; let mut previous_frame_end = Some(sync::now(device.clone()).boxed()); let mut last_redraw = Instant::now(); let mut sim_parameters: PushConstantData = PushConstantData { // Pixels per second. agent_speed: 100.0, // Radians per second. agent_turn_speed: 50.0, sensor_radius: 1, // In the range [0 - PI] sensor_angle_spacing: 0.18, // Seconds per frame. (60fps) delta_time: 0.016667, }; let mut fade_parameters: blur_fade_shader::ty::PushConstantData = blur_fade_shader::ty::PushConstantData { // Seconds per frame. (60fps) delta_time: 0.016667, evaporate_speed: 0.9, }; // target 60 fps let target_frame_time = Duration::from_millis(1000 / 60); event_loop.run(move |event, _, control_flow| { *control_flow = ControlFlow::Wait; match event { Event::MainEventsCleared => { platform .prepare_frame(imgui.io_mut(), &surface.window()) .expect("Failed to prepare frame."); surface.window().request_redraw(); } Event::RedrawRequested(_) => { // ---- Stick to the framerate ---- let t = Instant::now(); let since_last = t.duration_since(last_redraw); last_redraw = t; if since_last < target_frame_time { std::thread::sleep(target_frame_time - since_last); } // ---- Cleanup ---- previous_frame_end.as_mut().unwrap().cleanup_finished(); // ---- Recreate swapchain if necessary ---- if recreate_swapchain { let dimensions: [u32; 2] = surface.window().inner_size().into(); let (new_swapchain, new_images) = match swapchain.recreate_with_dimensions(dimensions) { Ok(r) => r, Err(SwapchainCreationError::UnsupportedDimensions) => return, Err(e) => panic!("Failed to recreate swapchain: {:?}", e), }; images = new_images; swapchain = new_swapchain; recreate_swapchain = false; } // ---- Run the user's imgui code ---- let mut ui = imgui.frame(); let mut run = true; run_ui(&mut run, &mut sim_parameters, &mut fade_parameters, &mut ui); if !run { *control_flow = ControlFlow::Exit; } // ---- Create draw commands ---- let (image_num, suboptimal, acquire_future) = match swapchain::acquire_next_image(swapchain.clone(), None) { Ok(r) => r, Err(AcquireError::OutOfDate) => { recreate_swapchain = true; return; } Err(e) => panic!("Failed to acquire next image: {:?}", e), }; if suboptimal { recreate_swapchain = true; } platform.prepare_render(&ui, surface.window()); let draw_data = ui.render(); let extent_x = simulation .result_image .dimensions() .width() .min(images[image_num].dimensions()[0]); let extent_y = simulation .result_image .dimensions() .height() .min(images[image_num].dimensions()[1]); let mut cmd_buf_builder = AutoCommandBufferBuilder::new(device.clone(), queue.family()) .expect("Failed to create command buffer"); cmd_buf_builder .clear_color_image(images[image_num].clone(), [0.0; 4].into()) .unwrap(); cmd_buf_builder .copy_image( simulation.result_image.clone(), [0; 3], 0, 0, images[image_num].clone(), [0; 3], 0, 0, [extent_x, extent_y, 1], 1, ) .expect("Failed to create image copy command"); renderer .draw_commands( &mut cmd_buf_builder, queue.clone(), images[image_num].clone(), draw_data, ) .expect("Rendering failed"); let cmd_buf = cmd_buf_builder .build() .expect("Failed to build command buffer"); // ---- Execute the draw commands ---- let (buffer_1, buffer_2, buffer_3) = simulation.create_command_buffers(&sim_parameters, &fade_parameters); let future = previous_frame_end .take() .unwrap() .join(acquire_future) .then_execute(queue.clone(), buffer_1) .unwrap() .then_execute(queue.clone(), buffer_2) .unwrap() .then_execute(queue.clone(), buffer_3) .unwrap() .then_execute(queue.clone(), cmd_buf) .unwrap() .then_swapchain_present(queue.clone(), swapchain.clone(), image_num) .then_signal_fence_and_flush(); match future { Ok(future) => { previous_frame_end = Some(future.boxed()); } Err(FlushError::OutOfDate) => { recreate_swapchain = true; previous_frame_end = Some(sync::now(device.clone()).boxed()); } Err(e) => { println!("Failed to flush future: {:?}", e); previous_frame_end = Some(sync::now(device.clone()).boxed()); } } } Event::WindowEvent { event: WindowEvent::CloseRequested, .. } => { *control_flow = ControlFlow::Exit; } event => { // Pass events on to imgui. platform.handle_event(imgui.io_mut(), surface.window(), &event); } } }); } }
#[doc = "Register `ISR` reader"] pub type R = crate::R<ISR_SPEC>; #[doc = "Field `FLT1` reader - Fault 1 Interrupt Flag"] pub type FLT1_R = crate::BitReader; #[doc = "Field `FLT2` reader - Fault 2 Interrupt Flag"] pub type FLT2_R = crate::BitReader; #[doc = "Field `FLT3` reader - Fault 3 Interrupt Flag"] pub type FLT3_R = crate::BitReader; #[doc = "Field `FLT4` reader - Fault 4 Interrupt Flag"] pub type FLT4_R = crate::BitReader; #[doc = "Field `FLT5` reader - Fault 5 Interrupt Flag"] pub type FLT5_R = crate::BitReader; #[doc = "Field `SYSFLT` reader - System Fault Interrupt Flag"] pub type SYSFLT_R = crate::BitReader; #[doc = "Field `FLT6` reader - Fault 6 Interrupt Flag"] pub type FLT6_R = crate::BitReader; #[doc = "Field `DLLRDY` reader - DLL Ready Interrupt Flag"] pub type DLLRDY_R = crate::BitReader; #[doc = "Field `BMPER` reader - Burst mode Period Interrupt Flag"] pub type BMPER_R = crate::BitReader; impl R { #[doc = "Bit 0 - Fault 1 Interrupt Flag"] #[inline(always)] pub fn flt1(&self) -> FLT1_R { FLT1_R::new((self.bits & 1) != 0) } #[doc = "Bit 1 - Fault 2 Interrupt Flag"] #[inline(always)] pub fn flt2(&self) -> FLT2_R { FLT2_R::new(((self.bits >> 1) & 1) != 0) } #[doc = "Bit 2 - Fault 3 Interrupt Flag"] #[inline(always)] pub fn flt3(&self) -> FLT3_R { FLT3_R::new(((self.bits >> 2) & 1) != 0) } #[doc = "Bit 3 - Fault 4 Interrupt Flag"] #[inline(always)] pub fn flt4(&self) -> FLT4_R { FLT4_R::new(((self.bits >> 3) & 1) != 0) } #[doc = "Bit 4 - Fault 5 Interrupt Flag"] #[inline(always)] pub fn flt5(&self) -> FLT5_R { FLT5_R::new(((self.bits >> 4) & 1) != 0) } #[doc = "Bit 5 - System Fault Interrupt Flag"] #[inline(always)] pub fn sysflt(&self) -> SYSFLT_R { SYSFLT_R::new(((self.bits >> 5) & 1) != 0) } #[doc = "Bit 6 - Fault 6 Interrupt Flag"] #[inline(always)] pub fn flt6(&self) -> FLT6_R { FLT6_R::new(((self.bits >> 6) & 1) != 0) } #[doc = "Bit 16 - DLL Ready Interrupt Flag"] #[inline(always)] pub fn dllrdy(&self) -> DLLRDY_R { DLLRDY_R::new(((self.bits >> 16) & 1) != 0) } #[doc = "Bit 17 - Burst mode Period Interrupt Flag"] #[inline(always)] pub fn bmper(&self) -> BMPER_R { BMPER_R::new(((self.bits >> 17) & 1) != 0) } } #[doc = "Interrupt Status Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`isr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct ISR_SPEC; impl crate::RegisterSpec for ISR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`isr::R`](R) reader structure"] impl crate::Readable for ISR_SPEC {} #[doc = "`reset()` method sets ISR to value 0"] impl crate::Resettable for ISR_SPEC { const RESET_VALUE: Self::Ux = 0; }
#[macro_use] extern crate pest; #[macro_use] extern crate pest_derive; #[macro_use] extern crate lazy_static; mod operations; mod parse; mod print; mod serialize; mod spot; #[derive(Debug, PartialEq, Eq, Hash, Clone)] pub enum HyperLTL { /// A quantifier, e.g. `forall pi` Quant(QuantKind, Vec<String>, Box<HyperLTL>), /// An operation, e.g., `G x`, `! a`, `a U b`, or `c && d` Appl(Op, Vec<HyperLTL>), /// A proposition Prop(String, Option<String>), } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum QuantKind { /// The existential path quantifier Exists, /// The universal path quantifier Forall, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum Op { /// The Boolean operator `!` for logical inversion Negation, /// The temporal operator `X` for next Next, /// The temporal operator `G` for globally Globally, /// The temporal operator `F` for finally Finally, /// The Boolean operator `&&` for logical conjunction Conjunction, /// The Boolean operator `||` for logical disjunction Disjunction, /// The Boolean operator `->` for logical implication Implication, /// The Boolean operator `xor` for logical exclusion Exclusion, /// The Boolean operator `<->` for logical equivalence Equivalence, /// The temporal operator `U` for until Until, /// The temporal operator `R` for release Release, /// The temporal operator `W` for the weak variant of until WeakUntil, /// Literal `true` True, /// Literal `false` False, } impl Op { /// returns the arity if operator has a fixed one and `None` otherwise fn arity(&self) -> Option<usize> { use self::Op::*; match self { Negation => Some(1), Next => Some(1), Finally => Some(1), Globally => Some(1), Conjunction => None, Disjunction => None, Implication => Some(2), Exclusion => Some(2), Equivalence => Some(2), Until => Some(2), Release => Some(2), WeakUntil => Some(2), True => Some(0), False => Some(0), } } fn is_safety(&self) -> bool { use self::Op::*; match self { Negation => true, Next => true, Finally => false, Globally => true, Conjunction => true, Disjunction => true, Implication => true, Exclusion => true, Equivalence => true, Until => false, Release => true, WeakUntil => true, True => true, False => true, } } fn is_propositional(&self) -> bool { use self::Op::*; match self { Negation => true, Next => false, Finally => false, Globally => false, Conjunction => true, Disjunction => true, Implication => true, Exclusion => true, Equivalence => true, Until => false, Release => false, WeakUntil => false, True => true, False => true, } } fn is_temporal(&self) -> bool { !self.is_propositional() } fn is_chainable(&self) -> bool { use self::Op::*; match self { Negation => false, Next => false, Finally => false, Globally => false, Conjunction => true, Disjunction => true, Implication => false, Exclusion => false, Equivalence => false, Until => false, Release => false, WeakUntil => false, True => false, False => false, } } } impl HyperLTL { pub fn new_unary(op: Op, inner: Self) -> Self { HyperLTL::Appl(op, vec![inner]) } pub fn new_binary(op: Op, lhs: Self, rhs: Self) -> Self { HyperLTL::Appl(op, vec![lhs, rhs]) } fn constant_false() -> HyperLTL { HyperLTL::Appl(Op::False, Vec::new()) } fn constant_true() -> HyperLTL { HyperLTL::Appl(Op::True, Vec::new()) } }
// get an iterator if an operating system is present pub fn devices<'a>(&'a self) -> io::Result<Devices<'a>> { ... } // contains setup search handle, current index // as well as a pointer to the shared buffer pub struct Devices<'a> { ... } // close search handle and release buffer impl Drop for Devices { ... } impl<'a> Iterator for Devices<'a> { type Item = io::Result<&'a DeviceSlot>; ... } // contains usb device information handle, information // and address; the connection is not actually opened pub struct DeviceSlot { ... } impl DeviceSlot { pub fn device_descriptor(&self) -> io::Result<DeviceDescriptor> { ... } pub fn config_descriptor(&self) -> io::Result<ConfigDescriptor> { ... } pub fn open(&self) -> io::Result<Device> { ... } } // contains a device handle which is opened pub struct Device { ... } // closes this device impl Drop for Device { ... } impl Device { pub fn reset(&mut self) -> io::Result<()> { ... } // no close function here! just `drop(device)`. } /* =-=-=-=-= descriptors =-=-=-=-= */ pub struct VendorId(u16); pub struct ProductId(u16); pub struct EndpointAddr(u8); // follow usb-standard descriptor pub struct DeviceDescriptor { ... } impl DeviceDescriptor { fn vendor_id(&self) -> VendorId { ... } fn product_id(&self) -> ProductId { ... } } pub struct ConfigDescriptor { ... } impl ConfigDescriptor { ... } pub struct InterfaceDescriptor { ... } impl InterfaceDescriptor { ... } pub struct EndpointDescriptor { ... } impl EndpointDescriptor { ... } /* =-=-=-=-= example of main function =-=-=-=-= */ // example: list all devices and pipes for device in usb::devices()? { println!("Device:\t{:?}", device); let mut handle = device.open()?; for pipe in handle.pipes()? { println!("Pipe:\t{:?}", pipe); } } // example: count all st-link's with proper voltage (sync) let mut buf = vec![0u8; 4096]; let cnt = usb::devices()?.filter_vid_pid(0x0483, 0x3748) .filter_map(|device| device.open().map(|handle| { let handle = handle?; block_on!(|| { handle.write_pipe(0x02, &[0xF7, 0x00, 0x00, 0x00])?; handle.read_pipe(0x81, &mut buf)?; }); let r1 = u32::from_le_bytes([buf[0], buf[1], buf[2], buf[3]]); let r2 = u32::from_le_bytes([buf[4], buf[5], buf[6], buf[7]]); Ok(8 * r2 > 5 * r1) // okay := 2.4*r2/r1 > 1.5 => r2 / r1 > 5 / 8 }).ok()) .fold(0, |prev, okay| prev + if okay { 1 } else { 0 }); // example: count all st-link's with proper voltage // in an advanced application interface let mut buf = vec![0u8; 4096]; let cnt = stlink::devices()?.filter_map(|device| device.open().map(|handle| { let volt: f32 = block_on!(handle?.query_voltage()?).into(); Ok(volt > 1.5) }).ok()) .fold(0, |prev, okay| prev + if okay { 1 } else { 0 }); // example: debug print all st-link dongles connected stlink::devices()?.for_each(|dev| println!("{:?}", dev)); // example: erase all connected stm32f103 chips via st-link // count all successes and fails let (successes, fails) = stlink::devices()? .map(|device| device.open().map(|handle| { block_on!(handle?.erase_all()).is_ok() }).ok()) .fold((0, 0), |(success, fail), ok| if ok { (success + 1, fail) } else { (success, fail + 1)} ); // example: flash one file onto one chip use nihao_stm32::DevicesExt; let source = include!("my_program"); let chip = nihao::devices()?.filter_stm32f103().next() .expect("have a device plugged in")?; block_on!(nihao::Flash::new() .erase_all() .program_entire(source) .verify(nihao_stm32::verify::CRC) .flash(chip)); // example: read bluetooth mac address from cc2640r2f use nihao_simplelink::DevicesExt; let addr = block_on!(nihao::devices().filter_cc2640r2f().next() .expect("have a device plugged in")? .query_primary_ble_mac_addr()? ); // example: erase all chips connected via all supported // flashers and programmers nihao::devices()?.for_each(|device| block_on!(device?.open()?.erase_all()?)); // example: choose chip at runtime //todo
// 1864. Minimum Number of Swaps to Make the Binary String Alternating // https://leetcode.com/contest/weekly-contest-241/problems/minimum-number-of-swaps-to-make-the-binary-string-alternating/ // use std::cmp::Ordering; impl Solution { fn swap_counter(s: &String, result_first_char: char) -> u32 { let mut counter: u32 = 0; let mut marker: u8 = result_first_char as u8; for c in s.chars() { if c != marker as char { counter += 1; } marker ^= 1; } counter / 2 } pub fn min_swaps(s: String) -> i32 { // 1. s can be already alternated // 2. each element of s can have either 0 or 1 // 3. its possible that string cannot be alternated , return -1 // 4. can have un-even number of 1s and 0s in s // count number of chars let mut ones: u32 = 0; let mut zeros: u32 = 0; for c in s.chars() { if c == '1' { ones += 1; } else { zeros += 1; } } // impossible to do swaps and find alternating string // if diff between ones and zero is more than 1 if ((ones - zeros) as i32).abs() > 1 { return -1i32; } let result: u32 = match ones.cmp(&zeros) { Ordering::Less => Solution::swap_counter(&s, '0'), Ordering::Greater => Solution::swap_counter(&s, '1'), Ordering::Equal => std::cmp::min( Solution::swap_counter(&s, '1'), Solution::swap_counter(&s, '0'), ), }; result as i32 } }
// Copyright (c) 2017-present PyO3 Project and Contributors use crate::{ attributes::FromPyWithAttribute, method::{FnArg, FnSpec}, pyfunction::Argument, utils::{remove_lifetime, replace_self, unwrap_ty_group}, }; use proc_macro2::{Span, TokenStream}; use quote::{quote, quote_spanned}; use syn::ext::IdentExt; use syn::spanned::Spanned; use syn::Result; /// Determine if the function gets passed a *args tuple or **kwargs dict. pub fn accept_args_kwargs(attrs: &[Argument]) -> (bool, bool) { let (mut accept_args, mut accept_kwargs) = (false, false); for s in attrs { match s { Argument::VarArgs(_) => accept_args = true, Argument::KeywordArgs(_) => accept_kwargs = true, _ => continue, } } (accept_args, accept_kwargs) } /// Return true if the argument list is simply (*args, **kwds). pub fn is_forwarded_args(args: &[FnArg<'_>], attrs: &[Argument]) -> bool { args.len() == 2 && is_args(attrs, args[0].name) && is_kwargs(attrs, args[1].name) } fn is_args(attrs: &[Argument], name: &syn::Ident) -> bool { for s in attrs.iter() { if let Argument::VarArgs(path) = s { return path.is_ident(name); } } false } fn is_kwargs(attrs: &[Argument], name: &syn::Ident) -> bool { for s in attrs.iter() { if let Argument::KeywordArgs(path) = s { return path.is_ident(name); } } false } pub fn impl_arg_params( spec: &FnSpec<'_>, self_: Option<&syn::Type>, body: TokenStream, py: &syn::Ident, fastcall: bool, ) -> Result<TokenStream> { if spec.args.is_empty() { return Ok(body); } let args_array = syn::Ident::new("output", Span::call_site()); if !fastcall && is_forwarded_args(&spec.args, &spec.attrs) { // In the varargs convention, we can just pass though if the signature // is (*args, **kwds). let mut arg_convert = vec![]; for (i, arg) in spec.args.iter().enumerate() { arg_convert.push(impl_arg_param(arg, spec, i, None, &mut 0, py, &args_array)?); } return Ok(quote! {{ let _args = Some(_args); #(#arg_convert)* #body }}); }; let mut positional_parameter_names = Vec::new(); let mut positional_only_parameters = 0usize; let mut required_positional_parameters = 0usize; let mut keyword_only_parameters = Vec::new(); for arg in spec.args.iter() { if arg.py || is_args(&spec.attrs, arg.name) || is_kwargs(&spec.attrs, arg.name) { continue; } let name = arg.name.unraw().to_string(); let posonly = spec.is_pos_only(arg.name); let kwonly = spec.is_kw_only(arg.name); let required = !(arg.optional.is_some() || spec.default_value(arg.name).is_some()); if kwonly { keyword_only_parameters.push(quote! { ::pyo3::derive_utils::KeywordOnlyParameterDescription { name: #name, required: #required, } }); } else { if required { required_positional_parameters += 1; } if posonly { positional_only_parameters += 1; } positional_parameter_names.push(name); } } let num_params = positional_parameter_names.len() + keyword_only_parameters.len(); let mut param_conversion = Vec::new(); let mut option_pos = 0; for (idx, arg) in spec.args.iter().enumerate() { param_conversion.push(impl_arg_param( arg, spec, idx, self_, &mut option_pos, py, &args_array, )?); } let (accept_args, accept_kwargs) = accept_args_kwargs(&spec.attrs); let cls_name = if let Some(cls) = self_ { quote! { ::std::option::Option::Some(<#cls as ::pyo3::type_object::PyTypeInfo>::NAME) } } else { quote! { ::std::option::Option::None } }; let python_name = &spec.python_name; let (args_to_extract, kwargs_to_extract) = if fastcall { // _args is a &[&PyAny], _kwnames is a Option<&PyTuple> containing the // keyword names of the keyword args in _kwargs ( // need copied() for &&PyAny -> &PyAny quote! { ::std::iter::Iterator::copied(_args.iter()) }, quote! { _kwnames.map(|kwnames| { use ::std::iter::Iterator; kwnames.as_slice().iter().copied().zip(_kwargs.iter().copied()) }) }, ) } else { // _args is a &PyTuple, _kwargs is an Option<&PyDict> ( quote! { _args.iter() }, quote! { _kwargs.map(|dict| dict.iter()) }, ) }; // create array of arguments, and then parse Ok(quote! {{ const DESCRIPTION: ::pyo3::derive_utils::FunctionDescription = ::pyo3::derive_utils::FunctionDescription { cls_name: #cls_name, func_name: stringify!(#python_name), positional_parameter_names: &[#(#positional_parameter_names),*], positional_only_parameters: #positional_only_parameters, required_positional_parameters: #required_positional_parameters, keyword_only_parameters: &[#(#keyword_only_parameters),*], accept_varargs: #accept_args, accept_varkeywords: #accept_kwargs, }; let mut #args_array = [::std::option::Option::None; #num_params]; let (_args, _kwargs) = DESCRIPTION.extract_arguments( #py, #args_to_extract, #kwargs_to_extract, &mut #args_array )?; #(#param_conversion)* #body }}) } /// Re option_pos: The option slice doesn't contain the py: Python argument, so the argument /// index and the index in option diverge when using py: Python fn impl_arg_param( arg: &FnArg<'_>, spec: &FnSpec<'_>, idx: usize, self_: Option<&syn::Type>, option_pos: &mut usize, py: &syn::Ident, args_array: &syn::Ident, ) -> Result<TokenStream> { // Use this macro inside this function, to ensure that all code generated here is associated // with the function argument macro_rules! quote_arg_span { ($($tokens:tt)*) => { quote_spanned!(arg.ty.span() => $($tokens)*) } } let arg_name = syn::Ident::new(&format!("arg{}", idx), Span::call_site()); if arg.py { return Ok(quote_arg_span! { let #arg_name = #py; }); } let ty = arg.ty; let name = arg.name; let transform_error = quote! { |e| ::pyo3::derive_utils::argument_extraction_error(#py, stringify!(#name), e) }; if is_args(&spec.attrs, name) { ensure_spanned!( arg.optional.is_none(), arg.name.span() => "args cannot be optional" ); return Ok(quote_arg_span! { let #arg_name = _args.unwrap().extract().map_err(#transform_error)?; }); } else if is_kwargs(&spec.attrs, name) { ensure_spanned!( arg.optional.is_some(), arg.name.span() => "kwargs must be Option<_>" ); return Ok(quote_arg_span! { let #arg_name = _kwargs.map(|kwargs| kwargs.extract()) .transpose() .map_err(#transform_error)?; }); } let arg_value = quote_arg_span!(#args_array[#option_pos]); *option_pos += 1; let extract = if let Some(FromPyWithAttribute(expr_path)) = &arg.attrs.from_py_with { quote_arg_span! { #expr_path(_obj).map_err(#transform_error) } } else { quote_arg_span! { _obj.extract().map_err(#transform_error) } }; let arg_value_or_default = match (spec.default_value(name), arg.optional.is_some()) { (Some(default), true) if default.to_string() != "None" => { quote_arg_span! { #arg_value.map_or_else(|| ::std::result::Result::Ok(::std::option::Option::Some(#default)), |_obj| #extract)? } } (Some(default), _) => { quote_arg_span! { #arg_value.map_or_else(|| ::std::result::Result::Ok(#default), |_obj| #extract)? } } (None, true) => { quote_arg_span! { #arg_value.map_or(::std::result::Result::Ok(::std::option::Option::None), |_obj| #extract)? } } (None, false) => { quote_arg_span! { { let _obj = #arg_value.expect("Failed to extract required method argument"); #extract? } } } }; return if let syn::Type::Reference(tref) = unwrap_ty_group(arg.optional.unwrap_or(ty)) { let mut tref = remove_lifetime(tref); if let Some(cls) = self_ { replace_self(&mut tref.elem, cls); } let mut_ = tref.mutability; let (target_ty, borrow_tmp) = if arg.optional.is_some() { // Get Option<&T> from Option<PyRef<T>> ( quote_arg_span! { ::std::option::Option<<#tref as ::pyo3::derive_utils::ExtractExt<'_>>::Target> }, if mut_.is_some() { quote_arg_span! { _tmp.as_deref_mut() } } else { quote_arg_span! { _tmp.as_deref() } }, ) } else { // Get &T from PyRef<T> ( quote_arg_span! { <#tref as ::pyo3::derive_utils::ExtractExt<'_>>::Target }, quote_arg_span! { &#mut_ *_tmp }, ) }; Ok(quote_arg_span! { let #mut_ _tmp: #target_ty = #arg_value_or_default; let #arg_name = #borrow_tmp; }) } else { Ok(quote_arg_span! { let #arg_name = #arg_value_or_default; }) }; }
// Copyright © 2020, Oracle and/or its affiliates. // // Copyright (c) 2019 Intel Corporation. All rights reserved. // Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. // // Copyright 2017 The Chromium OS Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE-BSD-3-Clause file. // // SPDX-License-Identifier: Apache-2.0 AND BSD-3-Clause //! Traits and structs for loading kernels into guest memory. //! - [KernelLoader](trait.KernelLoader.html): load kernel image into guest memory. //! - [KernelLoaderResult](struct.KernelLoaderResult.html): structure passed to the VMM to assist //! zero page construction and boot environment setup. //! - [Elf](elf/struct.Elf.html): elf image loader. //! - [BzImage](bzimage/struct.BzImage.html): bzImage loader. //! - [PE](pe/struct.PE.html): PE image loader. extern crate vm_memory; use std::fmt; use std::io::{Read, Seek}; #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] use vm_memory::ByteValued; use vm_memory::{Address, Bytes, GuestAddress, GuestMemory, GuestUsize}; #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] pub use crate::loader_gen::bootparam; pub use crate::cmdline::Cmdline; #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] mod x86_64; #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] pub use x86_64::*; #[cfg(target_arch = "aarch64")] mod aarch64; #[cfg(target_arch = "aarch64")] pub use aarch64::*; #[derive(Debug, PartialEq, Eq)] /// Kernel loader errors. pub enum Error { /// Failed to load bzimage. #[cfg(all(feature = "bzimage", any(target_arch = "x86", target_arch = "x86_64")))] Bzimage(bzimage::Error), /// Failed to load elf image. #[cfg(all(feature = "elf", any(target_arch = "x86", target_arch = "x86_64")))] Elf(elf::Error), /// Failed to load PE image. #[cfg(all(feature = "pe", target_arch = "aarch64"))] Pe(pe::Error), /// Invalid command line. InvalidCommandLine, /// Failed writing command line to guest memory. CommandLineCopy, /// Command line overflowed guest memory. CommandLineOverflow, /// Invalid kernel start address. InvalidKernelStartAddress, /// Memory to load kernel image is too small. MemoryOverflow, } /// A specialized [`Result`] type for the kernel loader. /// /// [`Result`]: https://doc.rust-lang.org/std/result/enum.Result.html pub type Result<T> = std::result::Result<T, Error>; impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let desc = match self { #[cfg(all(feature = "bzimage", any(target_arch = "x86", target_arch = "x86_64")))] Error::Bzimage(ref _e) => "failed to load bzImage kernel image", #[cfg(all(feature = "elf", any(target_arch = "x86", target_arch = "x86_64")))] Error::Elf(ref _e) => "failed to load ELF kernel image", #[cfg(all(feature = "pe", target_arch = "aarch64"))] Error::Pe(ref _e) => "failed to load PE kernel image", Error::InvalidCommandLine => "invalid command line provided", Error::CommandLineCopy => "failed writing command line to guest memory", Error::CommandLineOverflow => "command line overflowed guest memory", Error::InvalidKernelStartAddress => "invalid kernel start address", Error::MemoryOverflow => "memory to load kernel image is not enough", }; write!(f, "Kernel Loader: {}", desc) } } impl std::error::Error for Error { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match self { #[cfg(all(feature = "bzimage", any(target_arch = "x86", target_arch = "x86_64")))] Error::Bzimage(ref e) => Some(e), #[cfg(all(feature = "elf", any(target_arch = "x86", target_arch = "x86_64")))] Error::Elf(ref e) => Some(e), #[cfg(all(feature = "pe", target_arch = "aarch64"))] Error::Pe(ref e) => Some(e), Error::InvalidCommandLine => None, Error::CommandLineCopy => None, Error::CommandLineOverflow => None, Error::InvalidKernelStartAddress => None, Error::MemoryOverflow => None, } } } #[cfg(all(feature = "elf", any(target_arch = "x86", target_arch = "x86_64")))] impl From<elf::Error> for Error { fn from(err: elf::Error) -> Self { Error::Elf(err) } } #[cfg(all(feature = "bzimage", any(target_arch = "x86", target_arch = "x86_64")))] impl From<bzimage::Error> for Error { fn from(err: bzimage::Error) -> Self { Error::Bzimage(err) } } #[cfg(all(feature = "pe", target_arch = "aarch64"))] impl From<pe::Error> for Error { fn from(err: pe::Error) -> Self { Error::Pe(err) } } /// Result of [`KernelLoader.load()`](trait.KernelLoader.html#tymethod.load). /// /// This specifies where the kernel is loading and passes additional /// information for the rest of the boot process to be completed by /// the VMM. #[derive(Clone, Copy, Debug, Default, PartialEq)] pub struct KernelLoaderResult { /// Address in the guest memory where the kernel image starts to be loaded. pub kernel_load: GuestAddress, /// Offset in guest memory corresponding to the end of kernel image, in case the device tree /// blob and initrd will be loaded adjacent to kernel image. pub kernel_end: GuestUsize, /// Configuration for the VMM to use to fill zero page for bzImage direct boot. /// See https://www.kernel.org/doc/Documentation/x86/boot.txt. #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] pub setup_header: Option<bootparam::setup_header>, /// Availability of a PVH entry point. Only used for ELF boot, indicates whether the kernel /// supports the PVH boot protocol as described in: /// https://xenbits.xen.org/docs/unstable/misc/pvh.html #[cfg(all(feature = "elf", any(target_arch = "x86", target_arch = "x86_64")))] pub pvh_boot_cap: elf::PvhBootCapability, } /// Trait that specifies kernel image loading support. pub trait KernelLoader { /// How to load a specific kernel image format into the guest memory. /// /// # Arguments /// /// * `guest_mem`: [`GuestMemory`] to load the kernel in. /// * `kernel_offset`: Usage varies between implementations. /// * `kernel_image`: Kernel image to be loaded. /// * `highmem_start_address`: Address where high memory starts. /// /// [`GuestMemory`]: https://docs.rs/vm-memory/latest/vm_memory/guest_memory/trait.GuestMemory.html fn load<F, M: GuestMemory>( guest_mem: &M, kernel_offset: Option<GuestAddress>, kernel_image: &mut F, highmem_start_address: Option<GuestAddress>, ) -> Result<KernelLoaderResult> where F: Read + Seek; } #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] // SAFETY: The layout of the structure is fixed and can be initialized by // reading its content from byte array. unsafe impl ByteValued for bootparam::setup_header {} #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] // SAFETY: The layout of the structure is fixed and can be initialized by // reading its content from byte array. unsafe impl ByteValued for bootparam::boot_params {} /// Writes the command line string to the given guest memory slice. /// /// # Arguments /// /// * `guest_mem` - [`GuestMemory`] that will be partially overwritten by the command line. /// * `guest_addr` - The address in `guest_mem` at which to load the command line. /// * `cmdline` - The kernel command line. /// /// [`GuestMemory`]: https://docs.rs/vm-memory/latest/vm_memory/guest_memory/trait.GuestMemory.html /// /// # Examples /// /// ```rust /// # use std::ffi::CStr; /// # extern crate vm_memory; /// # use linux_loader::loader::*; /// # use vm_memory::{Bytes, GuestAddress}; /// # type GuestMemoryMmap = vm_memory::GuestMemoryMmap<()>; /// let mem_size: usize = 0x1000000; /// let gm = GuestMemoryMmap::from_ranges(&[(GuestAddress(0x0), mem_size)]).unwrap(); /// let mut cl = Cmdline::new(10).unwrap(); /// cl.insert("foo", "bar"); /// let mut buf = vec![0u8;8]; /// let result = load_cmdline(&gm, GuestAddress(0x1000), &cl).unwrap(); /// gm.read_slice(buf.as_mut_slice(), GuestAddress(0x1000)).unwrap(); /// assert_eq!(buf.as_slice(), "foo=bar\0".as_bytes()); pub fn load_cmdline<M: GuestMemory>( guest_mem: &M, guest_addr: GuestAddress, cmdline: &Cmdline, ) -> Result<()> { // We need a null terminated string because that's what the Linux // kernel expects when parsing the command line: // https://elixir.bootlin.com/linux/v5.10.139/source/kernel/params.c#L179 let cmdline_string = cmdline .as_cstring() .map_err(|_| Error::InvalidCommandLine)?; let cmdline_bytes = cmdline_string.as_bytes_with_nul(); let end = guest_addr // Underflow not possible because the cmdline contains at least // a byte (null terminator) .checked_add((cmdline_bytes.len() - 1) as u64) .ok_or(Error::CommandLineOverflow)?; if end > guest_mem.last_addr() { return Err(Error::CommandLineOverflow); } guest_mem .write_slice(cmdline_bytes, guest_addr) .map_err(|_| Error::CommandLineCopy)?; Ok(()) } #[cfg(test)] mod tests { use super::*; use vm_memory::{Address, GuestAddress}; type GuestMemoryMmap = vm_memory::GuestMemoryMmap<()>; const MEM_SIZE: u64 = 0x100_0000; fn create_guest_mem() -> GuestMemoryMmap { GuestMemoryMmap::from_ranges(&[(GuestAddress(0x0), (MEM_SIZE as usize))]).unwrap() } #[test] fn test_cmdline_overflow() { let gm = create_guest_mem(); let mut cl = Cmdline::new(10).unwrap(); cl.insert_str("12345").unwrap(); let cmdline_address = GuestAddress(u64::MAX - 5); assert_eq!( Err(Error::CommandLineOverflow), load_cmdline(&gm, cmdline_address, &cl) ); let cmdline_address = GuestAddress(MEM_SIZE - 5); assert_eq!( Err(Error::CommandLineOverflow), load_cmdline(&gm, cmdline_address, &cl) ); let cmdline_address = GuestAddress(MEM_SIZE - 6); assert!(load_cmdline(&gm, cmdline_address, &cl).is_ok()); } #[test] fn test_cmdline_write_end_regresion() { let gm = create_guest_mem(); let mut cmdline_address = GuestAddress(45); let sample_buf = &[1; 100]; // Fill in guest memory with non zero bytes gm.write(sample_buf, cmdline_address).unwrap(); let mut cl = Cmdline::new(10).unwrap(); // Test loading an empty cmdline load_cmdline(&gm, cmdline_address, &cl).unwrap(); let val: u8 = gm.read_obj(cmdline_address).unwrap(); assert_eq!(val, b'\0'); // Test loading an non-empty cmdline cl.insert_str("123").unwrap(); load_cmdline(&gm, cmdline_address, &cl).unwrap(); let val: u8 = gm.read_obj(cmdline_address).unwrap(); assert_eq!(val, b'1'); cmdline_address = cmdline_address.unchecked_add(1); let val: u8 = gm.read_obj(cmdline_address).unwrap(); assert_eq!(val, b'2'); cmdline_address = cmdline_address.unchecked_add(1); let val: u8 = gm.read_obj(cmdline_address).unwrap(); assert_eq!(val, b'3'); cmdline_address = cmdline_address.unchecked_add(1); let val: u8 = gm.read_obj(cmdline_address).unwrap(); assert_eq!(val, b'\0'); } }
use pancurses::{endwin, Input, Window}; use std::{thread, time}; const VERSION: &str = "v0.2.0"; mod display; mod interface; mod mechanics; fn main() { let screen = display::init_curses(); let colors = display::init_colors(); let mut window = display::init_window(&screen); let mut max_yx = screen.get_max_yx(); let mut main_menu = create_main_menu(); let mut snake = mechanics::Snake::new((20, 10)); let mut fruit_manager = mechanics::FruitManager::new(); new_fruit_wrapper(window.get_max_yx(), &snake, &mut fruit_manager); let mut going = true; let mut state = mechanics::State::MainMenu; while going { display::recenter(&screen, &mut window, &mut max_yx); if state == mechanics::State::MainMenu { display::print_simple_menu(&window, &main_menu, &colors); let exec_option = main_menu.handle_input(window.getch()); if exec_option { state = main_menu.options[main_menu.selected].target_state; } // don't waste cpu on refreshing this thread::sleep(time::Duration::from_millis(10)); } else if state == mechanics::State::Game { handle_input(&window, &mut snake, &mut state); snake.advance(); display::print_game(&window, &snake, &fruit_manager.fruits, false, &colors); thread::sleep(time::Duration::from_millis(50)); if mechanics::check_if_lost(window.get_max_yx(), &snake) { state = mechanics::State::Lost; } if fruit_manager.fruit_eaten(&snake) { snake.growth += 1; new_fruit_wrapper(window.get_max_yx(), &snake, &mut fruit_manager); } } else if state == mechanics::State::Lost { display::print_game(&window, &snake, &fruit_manager.fruits, true, &colors); thread::sleep(time::Duration::from_millis(1000)); snake = mechanics::Snake::new((20, 10)); fruit_manager = mechanics::FruitManager::new(); new_fruit_wrapper(window.get_max_yx(), &snake, &mut fruit_manager); flush_input(&window); state = mechanics::State::MainMenu; } else if state == mechanics::State::Quit { going = false; } } endwin(); } fn handle_input(window: &Window, snake: &mut mechanics::Snake, state: &mut mechanics::State) { match window.getch() { Some(Input::Character('q')) => *state = mechanics::State::MainMenu, Some(Input::KeyUp) => snake.turn(mechanics::Direction::Up), Some(Input::KeyDown) => snake.turn(mechanics::Direction::Down), Some(Input::KeyRight) => snake.turn(mechanics::Direction::Right), Some(Input::KeyLeft) => snake.turn(mechanics::Direction::Left), _ => (), } } fn create_main_menu() -> interface::SimpleMenu { let mut options = Vec::new(); options.push(interface::MenuOption::new( "Play".to_string(), mechanics::State::Game, )); options.push(interface::MenuOption::new( "Exit".to_string(), mechanics::State::Quit, )); interface::SimpleMenu::new("Rusty Snake".to_string(), VERSION.to_string(), options) } fn new_fruit_wrapper( max_yx: (i32, i32), snake: &mechanics::Snake, fruit_manager: &mut mechanics::FruitManager, ) { let max_xy = (max_yx.1, max_yx.0); fruit_manager.place_new(max_xy, &snake); } fn flush_input(window: &Window) { while let Some(_) = window.getch() { // do nothing } }
pub(crate) mod year_2022;
pub mod instruction; mod test; use self::instruction::*; use std::ops::DerefMut; fn same_sign(lhs: u8, rhs: u8) -> bool { lhs & 0x80 == rhs & 0x80 } trait Address where Self: Sized { fn low_byte(self) -> u8; fn high_byte(self) -> u8; fn combine_low_high(low: u8, high: u8) -> Self; fn add_offset(self, offset: u8) -> Self; fn on_different_page(self, rhs: Self) -> bool { self.high_byte() != rhs.high_byte() } } impl Address for usize { fn low_byte(self) -> u8 { (self & 0xff) as u8 } fn high_byte(self) -> u8 { ((self & 0xff00) >> 8) as u8 } fn combine_low_high(low: u8, high: u8) -> Self { low as Self | ((high as Self) << 8) } fn add_offset(self, offset: u8) -> usize { if offset < 128 { self + offset as usize } else { self - (255 - offset) as usize } } } #[derive(Default)] pub struct StatusRegister { carry: bool, zero: bool, irq_disabled: bool, decimal_mode: bool, overflow: bool, negative: bool } impl StatusRegister { fn carry_from_u8(&mut self, val: u8) { self.carry = val != 0; } fn carry_into_u8(&self) -> u8 { if self.carry { 1 } else { 0 } } } impl From<u8> for StatusRegister { fn from(val: u8) -> Self { Self { carry: val & 1 == 1, zero: val & 2 == 1, irq_disabled: val & 4 == 1, decimal_mode: val & 8 == 1, overflow: val & 64 == 1, negative: val & 128 == 1 } } } fn from_bool(b: bool, val: u8) -> u8 { if b { val } else { 0 } } impl Into<u8> for StatusRegister { fn into(self) -> u8 { 0x30 | from_bool(self.carry, 1) | from_bool(self.zero, 2) | from_bool(self.irq_disabled, 4) | from_bool(self.decimal_mode, 8) | from_bool(self.overflow, 64) | from_bool(self.negative, 128) } } #[derive(Debug)] pub enum Error { IllegalOpcode(u8) } #[derive(Default)] pub struct Registers { p: StatusRegister, pc: usize, a: u8, x: u8, y: u8, s: usize } pub struct Cpu { memory: Vec<u8>, cycles: u64, registers: Registers, executing_read: bool } impl Default for Cpu { fn default() -> Self { Cpu { memory: vec![0; 65536], cycles: 0, registers: Default::default(), executing_read: false } } } impl Cpu { pub fn new(r: Registers) -> Self { Cpu { registers: r, .. Default::default() } } pub fn fill_memory(&mut self, from: usize, with: &[u8]) { let end = from + with.len(); assert!(end <= 65536, "Filling memory would exceed 64K mark"); self.memory[from..end].clone_from_slice(with); } fn s_into_byte(&self) -> u8 { (self.registers.s & 0xff) as u8 } fn s_from_byte(&mut self, val: u8) { self.registers.s = val as usize; } fn set_zn(&mut self, val: u8) { self.registers.p.zero = val == 0; self.registers.p.negative = val & 0x80 == 0x80; } fn get_x(&self) -> u8 { self.registers.x } fn get_y(&self) -> u8 { self.registers.y } fn set_a(&mut self, val: u8) { self.registers.a = val; self.set_zn(val) } fn set_x(&mut self, val: u8) { self.registers.x = val; self.set_zn(val) } fn set_y(&mut self, val: u8) { self.registers.y = val; self.set_zn(val) } pub fn get_memory(&mut self) -> &mut [u8] { self.memory.deref_mut() } pub fn step(&mut self) -> Result<(), Error> { let opcode = self.read_pc(); self.dispatch(opcode) } fn do_asl(&mut self, val: u8) -> u8 { self.registers.p.carry_from_u8(self.registers.a & 0x80); val << 1 } fn do_lsr(&mut self, val: u8) -> u8 { self.registers.p.carry_from_u8(self.registers.a & 0x01); val >> 1 } fn do_rol(&mut self, val: u8) -> u8 { let old_c = self.registers.p.carry_into_u8(); self.registers.p.carry_from_u8(val & 0x80); (val << 1) | old_c } fn do_ror(&mut self, val: u8) -> u8 { let old_c = self.registers.p.carry_into_u8(); self.registers.p.carry_from_u8(val & 0x01); (val >> 1) | (old_c << 7) } fn mod_x<F>(&mut self, f: F) where F: Fn(&mut Self, u8) -> u8 { let x = self.registers.x; let res = f(self, x); self.set_x(res); } fn mod_y<F>(&mut self, f: F) where F: Fn(&mut Self, u8) -> u8 { let y = self.registers.y; let res = f(self, y); self.set_y(res); } fn mod_a<F>(&mut self, mut f: F) where F: FnMut(&mut Self, u8) -> u8 { let a = self.registers.a; let res = f(self, a); self.set_a(res); } fn resolve_immmediate(&mut self) -> usize { let pc = self.registers.pc; self.registers.pc += 1; pc } fn resolve_zero_page(&mut self) -> usize { let val = self.read_pc(); val as usize } fn resolve_zero_page_x(&mut self) -> usize { let val = self.read_pc(); let _ = self.read_byte(val as usize); let x = self.get_x(); val.wrapping_add(x) as usize } fn resolve_zero_page_y(&mut self) -> usize { let val = self.read_pc(); let _ = self.read_byte(val as usize); let y = self.get_y(); val.wrapping_add(y) as usize } fn resolve_absolute(&mut self) -> usize { let adl = self.read_pc(); let adh = self.read_pc(); Address::combine_low_high(adl, adh) } fn resolve_low_high(&mut self, adl: u8, adh: u8, val: u8) -> usize { let addr = Address::combine_low_high(adl.wrapping_add(val), adh); if adl <= 255 - val { if !self.executing_read { let _ = self.read_byte(addr); } addr } else { let _ = self.read_byte(addr); addr + 0x100 } } fn resolve_absolute_add(&mut self, val: u8) -> usize { let adl = self.read_pc(); let adh = self.read_pc(); self.resolve_low_high(adl, adh, val) } fn resolve_absolute_x(&mut self) -> usize { let x = self.get_x(); self.resolve_absolute_add(x) } fn resolve_absolute_y(&mut self) -> usize { let y = self.get_y(); self.resolve_absolute_add(y) } fn resolve_indexed_indirect(&mut self) -> usize { let bal = self.read_pc(); let _ = self.read_byte(bal as usize); let effective_bal = bal.wrapping_add(self.get_x()); let adl = self.read_byte(effective_bal as usize); let adh = self.read_byte(effective_bal.wrapping_add(1) as usize); Address::combine_low_high(adl, adh) } fn resolve_indirect_indexed(&mut self) -> usize { let ial = self.read_pc(); let bal = self.read_byte(ial as usize); let bah = self.read_byte(ial.wrapping_add(1) as usize); let y = self.get_y(); self.resolve_low_high(bal, bah, y) } fn resolve_address(&mut self, am: AddressingMode) -> usize { match am { AddressingMode::Immediate => self.resolve_immmediate(), AddressingMode::ZeroPage => self.resolve_zero_page(), AddressingMode::ZeroPageX => self.resolve_zero_page_x(), AddressingMode::ZeroPageY => self.resolve_zero_page_y(), AddressingMode::Absolute => self.resolve_absolute(), AddressingMode::AbsoluteX => self.resolve_absolute_x(), AddressingMode::AbsoluteY => self.resolve_absolute_y(), AddressingMode::IndexedIndirect => self.resolve_indexed_indirect(), AddressingMode::IndirectIndexed => self.resolve_indirect_indexed() } } fn fetch_operand(&mut self, am: AddressingMode) -> u8 { let addr = self.resolve_address(am); self.read_byte(addr) } fn compare(&mut self, register: u8, operand: u8) { self.registers.p.carry = register >= operand; self.registers.p.zero = register == operand; self.registers.p.negative = ((register.wrapping_sub(operand)) & 0x80) != 0; } fn compare_a(&mut self, operand: u8) { let a = self.registers.a; self.compare(a, operand); } fn compare_x(&mut self, operand: u8) { let x = self.registers.x; self.compare(x, operand); } fn compare_y(&mut self, operand: u8) { let y = self.registers.y; self.compare(y, operand); } fn bit(&mut self, operand: u8) { let a = self.registers.a; self.registers.p.overflow = operand & 0x40 != 0; self.registers.p.negative = operand & 0x80 != 0; self.registers.p.zero = a == operand; } fn sbc(&mut self, operand: u8) { // [TODO]: Add support for decimal mode let a = self.registers.a; let c = if self.registers.p.carry { 0 } else { 1 }; let effective_operand = operand.wrapping_sub(c); let res = a.wrapping_sub(effective_operand); self.registers.p.carry = a < effective_operand; self.registers.p.overflow = same_sign(a, operand) && !same_sign(a, res); self.set_a(res) } fn adc(&mut self, operand: u8) { // [TODO]: Add support for decimal mode let a = self.registers.a; let effective_operand = operand.wrapping_add(self.registers.p.carry_into_u8()); let res = a.wrapping_add(effective_operand); self.registers.p.carry = a > 255 - effective_operand; self.registers.p.overflow = same_sign(a, operand) && !same_sign(a, res); self.set_a(res); } fn execute_single_byte(&mut self, m: SingleByteMnemonic) { match m { SingleByteMnemonic::ASL => self.mod_a(Self::do_asl), SingleByteMnemonic::CLC => self.registers.p.carry = false, SingleByteMnemonic::CLD => self.registers.p.decimal_mode = false, SingleByteMnemonic::CLI => self.registers.p.irq_disabled = false, SingleByteMnemonic::CLV => self.registers.p.overflow = false, SingleByteMnemonic::DEX => self.mod_x(|_, x| x.wrapping_sub(1)), SingleByteMnemonic::DEY => self.mod_y(|_, y| y.wrapping_sub(1)), SingleByteMnemonic::INX => self.mod_x(|_, x| x.wrapping_add(1)), SingleByteMnemonic::INY => self.mod_y(|_, y| y.wrapping_add(1)), SingleByteMnemonic::LSR => self.mod_a(Self::do_lsr), SingleByteMnemonic::NOP => {}, SingleByteMnemonic::ROL => self.mod_a(Self::do_rol), SingleByteMnemonic::ROR => self.mod_a(Self::do_ror), SingleByteMnemonic::SEC => self.registers.p.carry = true, SingleByteMnemonic::SED => self.registers.p.decimal_mode = true, SingleByteMnemonic::SEI => self.registers.p.irq_disabled = true, SingleByteMnemonic::TAX => self.mod_x(|this, _| this.registers.a), SingleByteMnemonic::TAY => self.mod_y(|this, _| this.registers.y), SingleByteMnemonic::TSX => self.mod_x(|this, _| this.s_into_byte()), SingleByteMnemonic::TXA => self.mod_a(|this, _| this.registers.x), SingleByteMnemonic::TXS => { let x = self.registers.x; self.s_from_byte(x) }, SingleByteMnemonic::TYA => self.mod_a(|this, _| this.registers.y) } self.full_cycle(); self.bogus_read_pc(); } fn execute_read(&mut self, m: ReadMnemonic, am: AddressingMode) { self.executing_read = true; let operand = self.fetch_operand(am); match m { ReadMnemonic::ADC => self.adc(operand), ReadMnemonic::AND => self.mod_a(|_, a| a & operand), ReadMnemonic::BIT => self.bit(operand), ReadMnemonic::CMP => self.compare_a(operand), ReadMnemonic::CPX => self.compare_x(operand), ReadMnemonic::CPY => self.compare_y(operand), ReadMnemonic::EOR => self.mod_a(|_, a| a ^ operand), ReadMnemonic::LDA => self.mod_a(|_, _| operand), ReadMnemonic::LDX => self.mod_x(|_, _| operand), ReadMnemonic::LDY => self.mod_y(|_, _| operand), ReadMnemonic::ORA => self.mod_a(|_, a| a | operand), ReadMnemonic::SBC => self.sbc(operand) } self.executing_read = false; } fn should_branch(&self, m: BranchMnemonic) -> bool { match m { BranchMnemonic::BCC => !self.registers.p.carry, BranchMnemonic::BCS => self.registers.p.carry, BranchMnemonic::BEQ => self.registers.p.zero, BranchMnemonic::BMI => self.registers.p.negative, BranchMnemonic::BNE => !self.registers.p.zero, BranchMnemonic::BPL => !self.registers.p.negative, BranchMnemonic::BVC => self.registers.p.overflow, BranchMnemonic::BVS => !self.registers.p.overflow } } fn execute_branch(&mut self, m: BranchMnemonic) { let offset = self.read_pc(); if !self.should_branch(m) { return; } let pc = self.registers.pc; let next_pc = pc.add_offset(offset); if next_pc.on_different_page(pc) { let _ = self.read_byte(Address::combine_low_high(next_pc.low_byte(), pc.high_byte())); } let _ = self.read_byte(next_pc); self.registers.pc = next_pc; } fn dispatch(&mut self, opcode: u8) -> Result<(), Error> { let instruction = decode(opcode).ok_or(Error::IllegalOpcode(opcode))?; match instruction { Instruction::SingleByte(mnemonic) => self.execute_single_byte(mnemonic), Instruction::Read(mnemonic, am) => self.execute_read(mnemonic, am), Instruction::Branch(mnemonic) => self.execute_branch(mnemonic), _ => return Err(Error::IllegalOpcode(065)) } Ok(()) } pub fn run(&mut self, cycles: u64) { } fn full_cycle(&mut self) { self.cycles += 1; } fn read_pc(&mut self) -> u8 { let pc = self.registers.pc; let v = self.read_byte(pc); self.registers.pc += 1; v } fn read_byte(&mut self, address: usize) -> u8 { assert!(address <= 65535, "address out of bounds"); let v = self.memory[address]; self.full_cycle(); v } fn bogus_read_pc(&mut self) { let _ = self.read_pc(); self.registers.pc -= 1; } }
// Modified from: http://www.adammil.net/blog/v125_roguelike_vision_algorithms.html#diamondcode use rl_utils::{Area, Coord}; use crate::{utils::Octant, Fov, FovCallbackEnum, FovConfig, Los, VisionShape}; #[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)] struct Slope { x: isize, y: isize, } impl Slope { pub fn new(x: isize, y: isize) -> Slope { Slope { x, y } } pub fn greater(&self, s: Slope) -> bool { self.y * s.x > self.x * s.y } // this > y/x pub fn greater_or_equal(&self, s: Slope) -> bool { self.y * s.x >= self.x * s.y } // this >= y/x pub fn less_or_equal(&self, s: Slope) -> bool { self.y * s.x <= self.x * s.y } // this <= y/x } impl From<(isize, isize)> for Slope { fn from(t: (isize, isize)) -> Self { Self::new(t.0, t.1) } } #[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)] enum Opaque { Uninitialised, Transparent, Opaque, } #[derive(PartialEq, Eq, Hash, PartialOrd, Ord, Debug)] pub struct DiamondWalls<'a, T, Func> where Func: FnMut(&mut T, Coord, FovCallbackEnum) -> bool, { pub symmetric: bool, pub area: Area, pub radius: usize, pub vision: VisionShape, pub cb_type: &'a mut T, pub callback: Func, } impl<'a, T, Func> DiamondWalls<'a, T, Func> where Func: FnMut(&mut T, Coord, FovCallbackEnum) -> bool, { fn compute(&mut self, src: Coord, octant: Octant, row: isize, mut top: Slope, mut bottom: Slope) { for x in row..=self.radius as isize { let mut was_opaque = Opaque::Uninitialised; let top_y = if top.x == 1 { x } else { let top_y = ((x * 2 - 1) * top.y + top.x) / (top.x * 2); let ay = (top.y * 2 + 1) * top.x; if (self.callback)(self.cb_type, (x, top_y).into(), FovCallbackEnum::IsBlocked) { if top.greater_or_equal((x * 2, ay).into()) { top.y + 1 } else { top.y } } else if top.greater((x * 2 + 1, ay).into()) { top.y + 1 } else { top.y } }; let bottom_y = if bottom.y == 0 { 0 } else { ((x * 2 - 1) * bottom.y + bottom.x) / (bottom.x * 2) }; for y in (bottom_y..=top_y).rev() { let point = octant.calc_point(src, (x, y).into()); if !self.area.point_within(point) { continue; } else if !self.vision.in_radius(x as usize, y as usize, self.radius) { continue; } // NOTE: use the next line instead if you want the algorithm to be symmetrical if !self.symmetric { (self.callback)(self.cb_type, point, FovCallbackEnum::SetVisible(true)); } else if (y != top.y || top.greater_or_equal((x, y).into())) && (y != bottom.y || bottom.less_or_equal((x, y).into())) { (self.callback)(self.cb_type, point, FovCallbackEnum::SetVisible(true)); } // if y == top.y or y == bottom.y, make sure the sector actually intersects the wall tile. if not, don't consider // it opaque to prevent the code below from moving the top vector up or the bottom vector down let is_opaque = (self.callback)(self.cb_type, point, FovCallbackEnum::IsBlocked); let is_opaque = if is_opaque { if y == top.y && top.less_or_equal((y * 2 - 1, x * 2).into()) && !(self.callback)(self.cb_type, (x, y - 1).into(), FovCallbackEnum::IsBlocked) || y == bottom.y && bottom.greater_or_equal((y * 2 + 1, x * 2).into()) && !(self.callback)(self.cb_type, (x, y + 1).into(), FovCallbackEnum::IsBlocked) { false } else { true } } else { false }; if x != self.radius as isize { if is_opaque { if was_opaque == Opaque::Transparent { let new_bottom = (x * 2 - 1, y * 2 + 1).into(); if y == bottom_y { bottom = new_bottom; break; } else { self.compute(src, octant, x + 1, top, new_bottom); } } was_opaque = Opaque::Opaque; } else { // adjust top vector downwards and continue if we found a transition from opaque to clear // (x*2+1, y*2+1) is the top-right corner of the clear tile (i.e. the bottom-right of the opaque tile) if was_opaque == Opaque::Opaque { top = (x * 2 + 1, y * 2 + 1).into(); } was_opaque = Opaque::Transparent; } } } if was_opaque != Opaque::Transparent { break; } } } } impl<'a, T, Func> FovConfig for DiamondWalls<'a, T, Func> where Func: FnMut(&mut T, Coord, FovCallbackEnum) -> bool, { fn with_area(mut self, area: Area) -> Self { self.area = area; self } fn with_radius(mut self, radius: usize) -> Self { self.radius = radius; self } fn with_vision_shape(mut self, vision: VisionShape) -> Self { self.vision = vision; self } } impl<'a, T, Func> Fov for DiamondWalls<'a, T, Func> where Func: FnMut(&mut T, Coord, FovCallbackEnum) -> bool, { fn fov(&mut self, src: Coord) { for octant in Octant::iterator() { self.compute(src, *octant, 1, Slope::new(1, 1), Slope::new(1, 0)); } } }
#[derive(Clone, Debug)] pub struct Query { sql: String, id: String, } impl Query { pub fn new(sql: impl AsRef<str>) -> Self { Self { sql: sql.as_ref().to_string(), id: "".to_string(), } } pub fn id(self, id: impl AsRef<str>) -> Self { Self { id: id.as_ref().to_string(), ..self } } pub(crate) fn get_sql(&self) -> &str { &self.sql } pub(crate) fn get_id(&self) -> &str { &self.id } pub(crate) fn map_sql<F>(self, f: F) -> Self where F: Fn(&str) -> String, { Self { sql: f(&self.sql), ..self } } } impl<T> From<T> for Query where T: AsRef<str>, { fn from(source: T) -> Self { Self::new(source) } }
use std::collections::hash_map::DefaultHasher; use std::hash::{Hash, Hasher}; use serde::{Deserialize, Serialize}; use crate::SqlValue; /// `Eq + Hash` hash key used for hash algorithms. #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize)] pub struct SqlValueHashKey(u64); impl From<&SqlValue> for SqlValueHashKey { fn from(sql_value: &SqlValue) -> Self { let mut hasher = DefaultHasher::new(); sql_value.hash(&mut hasher); Self(hasher.finish()) } }
use std::convert::From; #[derive(Debug, PartialEq)] pub enum Deleted { No, Soft, Hard, } #[derive(Debug)] pub struct Record { pub rvolumes: Vec<String>, pub deleted: Deleted, // TODO: handle pub later pub hash: String, // TODO: handle pub later } impl Record { pub fn new() -> Self { Self { rvolumes: vec![], deleted: Deleted::Hard, hash: String::new(), } } } impl From<String> for Record { fn from(mut string: String) -> Self { let mut rec = Record::new(); if string.starts_with("DELETED") { rec.deleted = Deleted::Soft; string = string[7..].to_string(); } if string.starts_with("HASH") { rec.hash = string[4..36].to_string(); string = string[36..].to_string(); } rec.rvolumes = string.split(',').map(|x| x.to_string()).collect(); rec } } impl Into<String> for Record { fn into(self) -> String { let mut cc = String::new(); if self.deleted == Deleted::Hard { panic!("Cannot put HARD delete in the database"); } if self.deleted == Deleted::Soft { cc.push_str("DELETED"); } if self.hash.len() == 32 { cc.push_str("HASH"); cc.push_str(&self.hash); } cc.push_str(&self.rvolumes.join(",")); cc } }
use itertools::Itertools; use std::{env, fs::File, iter::FromIterator, path::PathBuf}; const IMG_WIDTH: usize = 25; const IMG_HEIGHT: usize = 6; const IMG_SIZE: usize = IMG_WIDTH * IMG_HEIGHT; #[derive(Default, Debug)] struct Numbers { zeros: usize, ones: usize, twos: usize, } fn count<I: Iterator<Item = u8>>(i: I) -> Numbers { let (z, o, t) = i.fold((0, 0, 0), |(z, o, t), n| match n { 0 => (z + 1, o, t), 1 => (z, o + 1, t), 2 => (z, o, t + 1), _ => (z, o, t), }); Numbers { zeros: z, ones: o, twos: t, } } #[aoc(day08, part1)] pub fn day08_part1(input: &str) -> usize { let numbers = input .chars() .map(|d| d.to_digit(10).unwrap() as u8) .chunks(IMG_SIZE) .into_iter() .fold( Numbers { zeros: usize::max_value(), ones: usize::max_value(), twos: usize::max_value(), }, |numbers, layer| { let n = count(layer); if n.zeros < numbers.zeros { n } else { numbers } }, ); numbers.ones * numbers.twos } #[aoc(day08, part2)] pub fn day08_part2(input: &str) -> String { let layers: Vec<Vec<u8>> = input .chars() .map(|d| d.to_digit(10).unwrap() as u8) .chunks(IMG_SIZE) .into_iter() .map(FromIterator::from_iter) .collect(); (0..IMG_SIZE) .map(|i| { layers .iter() .fold(2, |pixel, layer| if pixel == 2 { layer[i] } else { pixel }) }) .chunks(IMG_WIDTH) .into_iter() .fold(String::with_capacity(IMG_SIZE), |mut res, row| { res.push('\n'); row.for_each(|pixel| { res.push(match pixel { 0 => '░', 1 => '█', _ => '?', }) }); res }) }
use serde::{Deserialize, Serialize}; /// Represents a customer #[derive(Clone, Debug, Default, Deserialize, Serialize)] pub struct Customer { /// A unique identifier for a customer record pub guid: String, /// First name pub first_name: String, /// Last name pub last_name: String, /// Email address pub email: String, /// Physical address pub address: String, }
/* * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT license. */ use vector::Metric; use crate::index::InmemIndex; use crate::model::configuration::index_write_parameters::IndexWriteParametersBuilder; use crate::model::{IndexConfiguration}; use crate::model::vertex::DIM_128; use crate::utils::{file_exists, load_metadata_from_file}; use super::get_test_file_path; // f32, 128 DIM and 256 points source data const TEST_DATA_FILE: &str = "tests/data/siftsmall_learn_256pts.fbin"; const NUM_POINTS_TO_LOAD: usize = 256; pub fn create_index_with_test_data() -> InmemIndex<f32, DIM_128> { let index_write_parameters = IndexWriteParametersBuilder::new(50, 4).with_alpha(1.2).build(); let config = IndexConfiguration::new( Metric::L2, 128, 128, 256, false, 0, false, 0, 1.0f32, index_write_parameters); let mut index: InmemIndex<f32, DIM_128> = InmemIndex::new(config).unwrap(); build_test_index(&mut index, get_test_file_path(TEST_DATA_FILE).as_str(), NUM_POINTS_TO_LOAD); index.start = index.dataset.calculate_medoid_point_id().unwrap(); index } fn build_test_index(index: &mut InmemIndex<f32, DIM_128>, filename: &str, num_points_to_load: usize) { if !file_exists(filename) { panic!("ERROR: Data file {} does not exist.", filename); } let (file_num_points, file_dim) = load_metadata_from_file(filename).unwrap(); if file_num_points > index.configuration.max_points { panic!( "ERROR: Driver requests loading {} points and file has {} points, but index can support only {} points as specified in configuration.", num_points_to_load, file_num_points, index.configuration.max_points ); } if num_points_to_load > file_num_points { panic!( "ERROR: Driver requests loading {} points and file has only {} points.", num_points_to_load, file_num_points ); } if file_dim != index.configuration.dim { panic!( "ERROR: Driver requests loading {} dimension, but file has {} dimension.", index.configuration.dim, file_dim ); } index.dataset.build_from_file(filename, num_points_to_load).unwrap(); println!("Using only first {} from file.", num_points_to_load); index.num_active_pts = num_points_to_load; }
use diesel::{self, prelude::*}; mod schema { table! { questbooks { id -> Nullable<Integer>, uid -> Text, message -> Text, } } } use self::schema::questbooks; use self::schema::questbooks::dsl::{questbooks as all_questbooks}; #[table_name="questbooks"] #[derive(Serialize, Queryable, Insertable, Debug, Clone)] pub struct Questbook { pub id: Option<i32>, pub uid: String, pub message: String, } #[derive(FromForm)] pub struct NewQuest { pub uid: String, pub message: String, } #[derive(FromForm)] pub struct DeleteQuest { pub id: String, } #[derive(FromForm)] pub struct UpdateQuest { pub id: String, pub uid: String, pub message: String, } impl Questbook { pub fn all(conn: &SqliteConnection) -> Vec<Questbook> { all_questbooks.order(questbooks::id.desc()).load::<Questbook>(conn).unwrap() } pub fn insert(questbook: NewQuest, conn: &SqliteConnection) -> bool { let t = Questbook { id: None, uid: questbook.uid, message: questbook.message }; diesel::insert_into(questbooks::table).values(&t).execute(conn).is_ok() } pub fn delete(questbook: DeleteQuest, conn: &SqliteConnection) -> bool { let my_int_id: i32 = questbook.id.parse().unwrap(); diesel::delete(questbooks::table).filter(questbooks::id.eq(my_int_id)).execute(conn).is_ok() } pub fn update(questbook: UpdateQuest, conn: &SqliteConnection) -> bool { let int_id: i32 = questbook.id.parse().unwrap(); diesel::update(questbooks::table).filter(questbooks::id.eq(int_id)) .set( (questbooks::uid.eq(questbook.uid), questbooks::message.eq(questbook.message)) ) .execute(conn) .is_ok() } }
use simdjson_rust::dom; use std::fmt; use flate2::read; use flate2::write; use flate2::Compression; use std::error::Error; use std::ffi::OsStr; use std::fs::File; use std::io::{self, BufRead, BufReader, BufWriter, Write}; use std::path::Path; use std::time::Instant; /// Read normal or compressed files seamlessly /// Uses the presence of a `.gz` extension to decide pub fn reader(filename: &str) -> Box<dyn BufRead> { let path = Path::new(filename); let file = match File::open(&path) { Err(why) => panic!("couldn't open {}: {}", path.display(), why.description()), Ok(file) => file, }; if path.extension() == Some(OsStr::new("gz")) { Box::new(BufReader::with_capacity( 128 * 1024 *100 * 10, read::GzDecoder::new(file), )) } else { Box::new(BufReader::with_capacity(128 * 1024, file)) } } /// Write normal or compressed files seamlessly /// Uses the presence of a `.gz` extension to decide pub fn writer(filename: &str) -> Box<dyn Write> { let path = Path::new(filename); let file = match File::create(&path) { Err(why) => panic!("couldn't open {}: {}", path.display(), why.description()), Ok(file) => file, }; if path.extension() == Some(OsStr::new("gz")) { // Error is here: Created file isn't gzip-compressed Box::new(BufWriter::with_capacity( 128 * 1024, write::GzEncoder::new(file, Compression::default()), )) } else { Box::new(BufWriter::with_capacity(128 * 1024, file)) } } fn main() -> Result<(), Box<dyn std::error::Error>> { let start = Instant::now(); let mut parser = dom::Parser::default(); // let filename = "oct1.json"; // println!("writer with regular file: '{}'", filename); // let mut writer = writer(filename); // writer.write_all(b"[")?; // // // let filename = "mq_oct_1.csv.gz"; // let mut count = 0; // println!("reader with compressed file: '{}'", filename); // let reader_file_gz = reader(filename); // for line in reader_file_gz.lines() { // //println!("{}", line?); // for tab in line.unwrap().split('\t') { // //let _tp1 = parser.parse(&tab); // writer.write_all(tab.as_bytes()); // writer.write_all(b",\n"); // //println!("Display: {}", tp1.unwrap()); // break; // } // // count +=1 ; // // if count >2 { // // break; // // } // if count % 10000 == 0 { // println!("thousands {:10?}", count); // } // } // // writer.write_all(b"{}]"); // println!("count {:10?}", count); let tweets = parser.load("oct1.json")?; //let tweets = parser.parse(&buffer)?; let elapsed = start.elapsed(); println!("Elapsed: {:.2?}", elapsed); println!("What a relieve!!!"); // // //print!("Display: {}", tweets); // // let mut index = 0; // for tweet in tweets.at_index(3).iter() { // print!("Display: {} is {}", index, tweet); // index +=1; // } Ok(()) }
use std::ffi::CString; use criterion::measurement::{Measurement, ValueFormatter}; use libpapi_sys::*; mod formatter; use formatter::InsFormatter; pub struct PapiMeasurement { event_set: std::os::raw::c_int, } impl PapiMeasurement { pub fn new(event: &str) -> PapiMeasurement { let mut event_set = PAPI_NULL; let mut papi_tot_ins: std::os::raw::c_int = 0; let event_name = CString::new(event).expect("CString::new failed"); unsafe { let retval = PAPI_library_init(PAPI_VER_CURRENT); if retval != PAPI_VER_CURRENT { panic!("PAPI_library_init failed {}", retval) } let retval = PAPI_event_name_to_code(event_name.into_raw(), &mut papi_tot_ins); if retval != PAPI_OK as i32 { panic!("PAPI_event_name_to_code failed {}", retval) } let retval = PAPI_create_eventset(&mut event_set); if retval != PAPI_OK as i32 { panic!("PAPI_create_eventset error {}", retval) } let retval = PAPI_add_event(event_set.clone(), papi_tot_ins as i32); if retval != PAPI_OK as i32 { panic!("PAPI_add_event failed {}", retval) } } return PapiMeasurement { event_set, } } } impl Measurement for PapiMeasurement { type Intermediate = i64; type Value = i64; fn start(&self) -> Self::Intermediate { let mut values = [0i64]; unsafe { let retval = PAPI_start(self.event_set.clone()); if retval != PAPI_OK as i32 { panic!("PAPI_start failed {}", retval) } let retval = PAPI_read(self.event_set.clone(), values.as_mut_ptr()); if retval != PAPI_OK as i32 { panic!("PAPI_read failed") } } values[0] } fn end(&self, i: Self::Intermediate) -> Self::Value { let mut values = [0i64]; unsafe { let retval = PAPI_stop(self.event_set.clone(), values.as_mut_ptr()); if retval != PAPI_OK as i32 { panic!("PAPI_read failed") } } values[0] - i } fn add(&self, v1: &Self::Value, v2: &Self::Value) -> Self::Value { v1 + v2 } fn zero(&self) -> Self::Value { 0 } fn to_f64(&self, value: &Self::Value) -> f64 { *value as f64 } fn formatter(&self) -> &dyn ValueFormatter { &InsFormatter } }
#[derive(Debug, PartialEq, Eq, Deserialize)] pub struct Photo { pub id: Id, pub album_id: Id, pub owner_id: OwnerId, pub user_id: Id, pub photo_75: String, pub photo_130: String, pub photo_604: String, pub width: u16, pub height: u16, pub text: String, pub date: Timestamp, }
use super::*; use crate::{graphics::*}; pub struct ProgressBar { text : &'static str, min : usize, max : usize, fill : f32, scale : usize, empty_color : Color, filled_color : Color, } impl Renderable for ProgressBar { fn draw(&self, x : usize, y : usize) { let lbl = label::Label::new(self.text, Color::White, Color::Blue); lbl.draw(x, y); let (pb_ox, pb_oy) = lbl.dimensions(); draw_filled_rect( ((pb_ox + x) as isize, (y) as isize), ((self.scale as f32) as isize,pb_oy as isize), self.empty_color ); draw_filled_rect( ((pb_ox + x) as isize, (y) as isize), ((self.scale as f32 * self.fill) as isize,pb_oy as isize), self.filled_color ); } } impl ProgressBar { pub fn new(text : &'static str, max : usize, fill : Color, base : Color, scale : usize) -> Self { Self { filled_color : fill, empty_color : base, max, scale, text, min : 0, fill : 0.0 } } pub fn set_value(&mut self, value : usize) { self.fill = (value as f32 / self.max as f32) as f32 } pub fn min(&self) -> usize { self.min } }
/// Helps write correct plurals. /// /// # Examples /// /// ``` /// assert_eq!(plural(0, "coin"), "0 coins"); /// assert_eq!(plural(1, "coin"), "1 coin"); /// assert_eq!(plural(2, "coin"), "2 coins"); /// ``` pub fn plural(count: i32, word: &str) -> String { if count == 1 { format!("{} {}", count, word) } else { format!("{} {}s", count, word) } }
extern crate cbindgen; const VERSION: &'static str = env!("CARGO_PKG_VERSION"); fn main() { let crate_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); let cfg = cbindgen::Config::from_root_or_default(std::path::Path::new(&crate_dir)); let c = cbindgen::Builder::new() .with_config(cfg) .with_crate(crate_dir) .with_header(format!("/* libhasher header version {} */", VERSION)) .with_language(cbindgen::Language::C) .generate(); match c { Ok(res) => { res.write_to_file("libhasher.h"); } Err(err) => { eprintln!("unable to generate bindings: {:?}", err); std::process::exit(1); } } }
struct Solution; /// https://leetcode.com/problems/majority-element/ impl Solution { /// 0 ms 2.3 MB pub fn majority_element(nums: Vec<i32>) -> i32 { let mut major = nums[0]; let mut count = 1; for &num in &nums[1..] { if major == num { count += 1; } else if major != num { count -= 1; } if count == 0 { major = num; count = 1; } } major } } #[cfg(test)] mod tests { use super::*; #[test] fn test() { fn assert(nums: Vec<i32>, expected: i32) { assert_eq!(Solution::majority_element(nums), expected); } assert(vec![3], 3); assert(vec![3, 2, 3], 3); assert(vec![10, 9, 9, 9, 10], 9); assert(vec![2, 2, 1, 1, 1, 2, 2], 2); } }
// Generated by the capnpc-rust plugin to the Cap'n Proto schema compiler. // DO NOT EDIT. // source: protos/src/bronx.capnp pub mod set { #![allow(unused_imports)] use capnp::capability::{FromClientHook, FromTypelessPipeline}; use capnp::{text, data, Result}; use capnp::private::layout; use capnp::traits::{FromStructBuilder, FromStructReader}; use capnp::{primitive_list, enum_list, struct_list, text_list, data_list, list_list}; #[derive(Clone, Copy)] pub struct Reader<'a> { reader : layout::StructReader<'a> } impl <'a> ::capnp::traits::HasTypeId for Reader<'a> { #[inline] fn type_id() -> u64 { _private::TYPE_ID } } impl <'a> ::capnp::traits::FromStructReader<'a> for Reader<'a> { fn new(reader: ::capnp::private::layout::StructReader<'a>) -> Reader<'a> { Reader { reader : reader } } } impl <'a> ::capnp::traits::FromPointerReader<'a> for Reader<'a> { fn get_from_pointer(reader: &::capnp::private::layout::PointerReader<'a>) -> Result<Reader<'a>> { ::std::result::Result::Ok(::capnp::traits::FromStructReader::new(try!(reader.get_struct(::std::ptr::null())))) } } impl <'a, 'b : 'a> ::capnp::traits::CastableTo<Reader<'a>> for Reader<'b> { fn cast(self) -> Reader<'a> { Reader { reader : self.reader } } } impl <'a> Reader<'a> { pub fn borrow<'b>(&'b self) -> Reader<'b> { Reader { reader : self.reader} } pub fn total_size(&self) -> Result<::capnp::MessageSize> { self.reader.total_size() } #[inline] pub fn get_items(self) -> Result<text_list::Reader<'a>> { ::capnp::traits::FromPointerReader::get_from_pointer(&self.reader.get_pointer_field(0)) } pub fn has_items(&self) -> bool { !self.reader.get_pointer_field(0).is_null() } } pub struct Builder<'a> { builder : ::capnp::private::layout::StructBuilder<'a> } impl <'a> ::capnp::traits::HasStructSize for Builder<'a> { #[inline] fn struct_size() -> layout::StructSize { _private::STRUCT_SIZE } } impl <'a> ::capnp::traits::HasTypeId for Builder<'a> { #[inline] fn type_id() -> u64 { _private::TYPE_ID } } impl <'a> ::capnp::traits::FromStructBuilder<'a> for Builder<'a> { fn new(builder : ::capnp::private::layout::StructBuilder<'a>) -> Builder<'a> { Builder { builder : builder } } } impl <'a> ::capnp::traits::FromPointerBuilder<'a> for Builder<'a> { fn init_pointer(builder: ::capnp::private::layout::PointerBuilder<'a>, _size : u32) -> Builder<'a> { ::capnp::traits::FromStructBuilder::new(builder.init_struct(_private::STRUCT_SIZE)) } fn get_from_pointer(builder: ::capnp::private::layout::PointerBuilder<'a>) -> Result<Builder<'a>> { ::std::result::Result::Ok(::capnp::traits::FromStructBuilder::new(try!(builder.get_struct(_private::STRUCT_SIZE, ::std::ptr::null())))) } } impl <'a> ::capnp::traits::SetPointerBuilder<Builder<'a>> for Reader<'a> { fn set_pointer_builder<'b>(pointer : ::capnp::private::layout::PointerBuilder<'b>, value : Reader<'a>) -> Result<()> { pointer.set_struct(&value.reader) } } impl <'a, 'b : 'a> ::capnp::traits::CastableTo<Builder<'a>> for Builder<'b> { fn cast(self) -> Builder<'a> { Builder { builder : self.builder } } } impl <'a> Builder<'a> { pub fn as_reader(self) -> Reader<'a> { ::capnp::traits::FromStructReader::new(self.builder.as_reader()) } pub fn borrow<'b>(&'b mut self) -> Builder<'b> { Builder { builder : self.builder} } pub fn total_size(&self) -> Result<::capnp::MessageSize> { self.builder.as_reader().total_size() } #[inline] pub fn get_items(self) -> Result<text_list::Builder<'a>> { ::capnp::traits::FromPointerBuilder::get_from_pointer(self.builder.get_pointer_field(0)) } #[inline] pub fn set_items(&mut self, value : text_list::Reader) -> Result<()> { ::capnp::traits::SetPointerBuilder::set_pointer_builder(self.builder.get_pointer_field(0), value) } #[inline] pub fn init_items(self, size : u32) -> text_list::Builder<'a> { ::capnp::traits::FromPointerBuilder::init_pointer(self.builder.get_pointer_field(0), size) } pub fn has_items(&self) -> bool { !self.builder.get_pointer_field(0).is_null() } } pub struct Pipeline { _typeless : ::capnp::any_pointer::Pipeline } impl FromTypelessPipeline for Pipeline { fn new(typeless : ::capnp::any_pointer::Pipeline) -> Pipeline { Pipeline { _typeless : typeless } } } impl Pipeline { } mod _private { use capnp::private::layout; pub const STRUCT_SIZE : layout::StructSize = layout::StructSize { data : 0, pointers : 1 }; pub const TYPE_ID: u64 = 0xdd792cf86f1581a3; } } pub mod set_service_response { #![allow(unused_imports)] use capnp::capability::{FromClientHook, FromTypelessPipeline}; use capnp::{text, data, Result}; use capnp::private::layout; use capnp::traits::{FromStructBuilder, FromStructReader}; use capnp::{primitive_list, enum_list, struct_list, text_list, data_list, list_list}; #[derive(Clone, Copy)] pub struct Reader<'a> { reader : layout::StructReader<'a> } impl <'a> ::capnp::traits::HasTypeId for Reader<'a> { #[inline] fn type_id() -> u64 { _private::TYPE_ID } } impl <'a> ::capnp::traits::FromStructReader<'a> for Reader<'a> { fn new(reader: ::capnp::private::layout::StructReader<'a>) -> Reader<'a> { Reader { reader : reader } } } impl <'a> ::capnp::traits::FromPointerReader<'a> for Reader<'a> { fn get_from_pointer(reader: &::capnp::private::layout::PointerReader<'a>) -> Result<Reader<'a>> { ::std::result::Result::Ok(::capnp::traits::FromStructReader::new(try!(reader.get_struct(::std::ptr::null())))) } } impl <'a, 'b : 'a> ::capnp::traits::CastableTo<Reader<'a>> for Reader<'b> { fn cast(self) -> Reader<'a> { Reader { reader : self.reader } } } impl <'a> Reader<'a> { pub fn borrow<'b>(&'b self) -> Reader<'b> { Reader { reader : self.reader} } pub fn total_size(&self) -> Result<::capnp::MessageSize> { self.reader.total_size() } #[inline] pub fn get_status(self) -> ::std::result::Result<::bronx_capnp::set_service_response::Status, ::capnp::NotInSchema> { ::capnp::traits::FromU16::from_u16(self.reader.get_data_field::<u16>(0)) } } pub struct Builder<'a> { builder : ::capnp::private::layout::StructBuilder<'a> } impl <'a> ::capnp::traits::HasStructSize for Builder<'a> { #[inline] fn struct_size() -> layout::StructSize { _private::STRUCT_SIZE } } impl <'a> ::capnp::traits::HasTypeId for Builder<'a> { #[inline] fn type_id() -> u64 { _private::TYPE_ID } } impl <'a> ::capnp::traits::FromStructBuilder<'a> for Builder<'a> { fn new(builder : ::capnp::private::layout::StructBuilder<'a>) -> Builder<'a> { Builder { builder : builder } } } impl <'a> ::capnp::traits::FromPointerBuilder<'a> for Builder<'a> { fn init_pointer(builder: ::capnp::private::layout::PointerBuilder<'a>, _size : u32) -> Builder<'a> { ::capnp::traits::FromStructBuilder::new(builder.init_struct(_private::STRUCT_SIZE)) } fn get_from_pointer(builder: ::capnp::private::layout::PointerBuilder<'a>) -> Result<Builder<'a>> { ::std::result::Result::Ok(::capnp::traits::FromStructBuilder::new(try!(builder.get_struct(_private::STRUCT_SIZE, ::std::ptr::null())))) } } impl <'a> ::capnp::traits::SetPointerBuilder<Builder<'a>> for Reader<'a> { fn set_pointer_builder<'b>(pointer : ::capnp::private::layout::PointerBuilder<'b>, value : Reader<'a>) -> Result<()> { pointer.set_struct(&value.reader) } } impl <'a, 'b : 'a> ::capnp::traits::CastableTo<Builder<'a>> for Builder<'b> { fn cast(self) -> Builder<'a> { Builder { builder : self.builder } } } impl <'a> Builder<'a> { pub fn as_reader(self) -> Reader<'a> { ::capnp::traits::FromStructReader::new(self.builder.as_reader()) } pub fn borrow<'b>(&'b mut self) -> Builder<'b> { Builder { builder : self.builder} } pub fn total_size(&self) -> Result<::capnp::MessageSize> { self.builder.as_reader().total_size() } #[inline] pub fn get_status(self) -> ::std::result::Result<::bronx_capnp::set_service_response::Status, ::capnp::NotInSchema> { ::capnp::traits::FromU16::from_u16(self.builder.get_data_field::<u16>(0)) } #[inline] pub fn set_status(&mut self, value : ::bronx_capnp::set_service_response::Status) { self.builder.set_data_field::<u16>(0, value as u16) } } pub struct Pipeline { _typeless : ::capnp::any_pointer::Pipeline } impl FromTypelessPipeline for Pipeline { fn new(typeless : ::capnp::any_pointer::Pipeline) -> Pipeline { Pipeline { _typeless : typeless } } } impl Pipeline { } mod _private { use capnp::private::layout; pub const STRUCT_SIZE : layout::StructSize = layout::StructSize { data : 1, pointers : 0 }; pub const TYPE_ID: u64 = 0xc60da1026ecb3646; } #[repr(u16)] #[derive(Clone, Copy, PartialEq)] pub enum Status { Success = 0, Failure = 1, } impl ::capnp::traits::FromU16 for Status { #[inline] fn from_u16(value : u16) -> ::std::result::Result<Status, ::capnp::NotInSchema> { match value { 0 => ::std::result::Result::Ok(Status::Success), 1 => ::std::result::Result::Ok(Status::Failure), n => ::std::result::Result::Err(::capnp::NotInSchema(n)), } } } impl ::capnp::traits::ToU16 for Status { #[inline] fn to_u16(self) -> u16 { self as u16 } } impl ::capnp::traits::HasTypeId for Status { #[inline] fn type_id() -> u64 { 0xca040e0fcfd29d80u64 } } } pub mod set_service { #![allow(unused_variables)] #![allow(unused_imports)] use capnp::capability::{FromClientHook, Request, FromServer}; use capnp::private::capability::{ClientHook, ServerHook}; use capnp::capability; pub type GetSetContext<'a> = capability::CallContext<get_set_params::Reader<'a>, get_set_results::Builder<'a>>; pub type AddItemContext<'a> = capability::CallContext<add_item_params::Reader<'a>, add_item_results::Builder<'a>>; pub struct Client{ pub client : ::capnp::private::capability::Client } impl FromClientHook for Client { fn new(hook : Box<ClientHook+Send>) -> Client { Client { client : ::capnp::private::capability::Client::new(hook) } } } pub struct ToClient<U>(pub U); impl <T:ServerHook, U : Server + Send + 'static> FromServer<T, Client> for ToClient<U> { fn from_server(self, _hook : Option<T>) -> Client { Client { client : ServerHook::new_client(None::<T>, ::std::boxed::Box::new(ServerDispatch { server : ::std::boxed::Box::new(self.0)}))} } } impl ::capnp::traits::HasTypeId for Client { #[inline] fn type_id() -> u64 { _private::TYPE_ID } } impl Clone for Client { fn clone(&self) -> Client { Client { client : ::capnp::private::capability::Client::new(self.client.hook.copy()) } } } impl Client { pub fn get_set_request<'a>(&self) -> Request<get_set_params::Builder<'a>,get_set_results::Reader<'a>,get_set_results::Pipeline> { self.client.new_call(_private::TYPE_ID, 0, None) } pub fn add_item_request<'a>(&self) -> Request<add_item_params::Builder<'a>,add_item_results::Reader<'a>,add_item_results::Pipeline> { self.client.new_call(_private::TYPE_ID, 1, None) } } pub trait Server { fn get_set<'a>(&mut self, GetSetContext<'a>); fn add_item<'a>(&mut self, AddItemContext<'a>); } pub struct ServerDispatch<T> { pub server : Box<T>, } impl <T : Server> ::capnp::capability::Server for ServerDispatch<T> { fn dispatch_call(&mut self, interface_id : u64, method_id : u16, context : capability::CallContext<::capnp::any_pointer::Reader, ::capnp::any_pointer::Builder>) { match interface_id { _private::TYPE_ID => ServerDispatch::<T>::dispatch_call_internal(&mut *self.server, method_id, context), _ => {} } } } impl <T : Server> ServerDispatch<T> { pub fn dispatch_call_internal(server :&mut T, method_id : u16, context : capability::CallContext<::capnp::any_pointer::Reader, ::capnp::any_pointer::Builder>) { match method_id { 0 => server.get_set(::capnp::private::capability::internal_get_typed_context(context)), 1 => server.add_item(::capnp::private::capability::internal_get_typed_context(context)), _ => {} } } } pub mod _private { pub const TYPE_ID: u64 = 0xef8dc9cdd2da8d74; } pub mod get_set_params { #![allow(unused_imports)] use capnp::capability::{FromClientHook, FromTypelessPipeline}; use capnp::{text, data, Result}; use capnp::private::layout; use capnp::traits::{FromStructBuilder, FromStructReader}; use capnp::{primitive_list, enum_list, struct_list, text_list, data_list, list_list}; #[derive(Clone, Copy)] pub struct Reader<'a> { reader : layout::StructReader<'a> } impl <'a> ::capnp::traits::HasTypeId for Reader<'a> { #[inline] fn type_id() -> u64 { _private::TYPE_ID } } impl <'a> ::capnp::traits::FromStructReader<'a> for Reader<'a> { fn new(reader: ::capnp::private::layout::StructReader<'a>) -> Reader<'a> { Reader { reader : reader } } } impl <'a> ::capnp::traits::FromPointerReader<'a> for Reader<'a> { fn get_from_pointer(reader: &::capnp::private::layout::PointerReader<'a>) -> Result<Reader<'a>> { ::std::result::Result::Ok(::capnp::traits::FromStructReader::new(try!(reader.get_struct(::std::ptr::null())))) } } impl <'a, 'b : 'a> ::capnp::traits::CastableTo<Reader<'a>> for Reader<'b> { fn cast(self) -> Reader<'a> { Reader { reader : self.reader } } } impl <'a> Reader<'a> { pub fn borrow<'b>(&'b self) -> Reader<'b> { Reader { reader : self.reader} } pub fn total_size(&self) -> Result<::capnp::MessageSize> { self.reader.total_size() } #[inline] pub fn get_key(self) -> Result<text::Reader<'a>> { self.reader.get_pointer_field(0).get_text(::std::ptr::null(), 0) } pub fn has_key(&self) -> bool { !self.reader.get_pointer_field(0).is_null() } } pub struct Builder<'a> { builder : ::capnp::private::layout::StructBuilder<'a> } impl <'a> ::capnp::traits::HasStructSize for Builder<'a> { #[inline] fn struct_size() -> layout::StructSize { _private::STRUCT_SIZE } } impl <'a> ::capnp::traits::HasTypeId for Builder<'a> { #[inline] fn type_id() -> u64 { _private::TYPE_ID } } impl <'a> ::capnp::traits::FromStructBuilder<'a> for Builder<'a> { fn new(builder : ::capnp::private::layout::StructBuilder<'a>) -> Builder<'a> { Builder { builder : builder } } } impl <'a> ::capnp::traits::FromPointerBuilder<'a> for Builder<'a> { fn init_pointer(builder: ::capnp::private::layout::PointerBuilder<'a>, _size : u32) -> Builder<'a> { ::capnp::traits::FromStructBuilder::new(builder.init_struct(_private::STRUCT_SIZE)) } fn get_from_pointer(builder: ::capnp::private::layout::PointerBuilder<'a>) -> Result<Builder<'a>> { ::std::result::Result::Ok(::capnp::traits::FromStructBuilder::new(try!(builder.get_struct(_private::STRUCT_SIZE, ::std::ptr::null())))) } } impl <'a> ::capnp::traits::SetPointerBuilder<Builder<'a>> for Reader<'a> { fn set_pointer_builder<'b>(pointer : ::capnp::private::layout::PointerBuilder<'b>, value : Reader<'a>) -> Result<()> { pointer.set_struct(&value.reader) } } impl <'a, 'b : 'a> ::capnp::traits::CastableTo<Builder<'a>> for Builder<'b> { fn cast(self) -> Builder<'a> { Builder { builder : self.builder } } } impl <'a> Builder<'a> { pub fn as_reader(self) -> Reader<'a> { ::capnp::traits::FromStructReader::new(self.builder.as_reader()) } pub fn borrow<'b>(&'b mut self) -> Builder<'b> { Builder { builder : self.builder} } pub fn total_size(&self) -> Result<::capnp::MessageSize> { self.builder.as_reader().total_size() } #[inline] pub fn get_key(self) -> Result<text::Builder<'a>> { self.builder.get_pointer_field(0).get_text(::std::ptr::null(), 0) } #[inline] pub fn set_key(&mut self, value : text::Reader) { self.builder.get_pointer_field(0).set_text(value); } #[inline] pub fn init_key(self, size : u32) -> text::Builder<'a> { self.builder.get_pointer_field(0).init_text(size) } pub fn has_key(&self) -> bool { !self.builder.get_pointer_field(0).is_null() } } pub struct Pipeline { _typeless : ::capnp::any_pointer::Pipeline } impl FromTypelessPipeline for Pipeline { fn new(typeless : ::capnp::any_pointer::Pipeline) -> Pipeline { Pipeline { _typeless : typeless } } } impl Pipeline { } mod _private { use capnp::private::layout; pub const STRUCT_SIZE : layout::StructSize = layout::StructSize { data : 0, pointers : 1 }; pub const TYPE_ID: u64 = 0x8940d948e7804be0; } } pub mod get_set_results { #![allow(unused_imports)] use capnp::capability::{FromClientHook, FromTypelessPipeline}; use capnp::{text, data, Result}; use capnp::private::layout; use capnp::traits::{FromStructBuilder, FromStructReader}; use capnp::{primitive_list, enum_list, struct_list, text_list, data_list, list_list}; #[derive(Clone, Copy)] pub struct Reader<'a> { reader : layout::StructReader<'a> } impl <'a> ::capnp::traits::HasTypeId for Reader<'a> { #[inline] fn type_id() -> u64 { _private::TYPE_ID } } impl <'a> ::capnp::traits::FromStructReader<'a> for Reader<'a> { fn new(reader: ::capnp::private::layout::StructReader<'a>) -> Reader<'a> { Reader { reader : reader } } } impl <'a> ::capnp::traits::FromPointerReader<'a> for Reader<'a> { fn get_from_pointer(reader: &::capnp::private::layout::PointerReader<'a>) -> Result<Reader<'a>> { ::std::result::Result::Ok(::capnp::traits::FromStructReader::new(try!(reader.get_struct(::std::ptr::null())))) } } impl <'a, 'b : 'a> ::capnp::traits::CastableTo<Reader<'a>> for Reader<'b> { fn cast(self) -> Reader<'a> { Reader { reader : self.reader } } } impl <'a> Reader<'a> { pub fn borrow<'b>(&'b self) -> Reader<'b> { Reader { reader : self.reader} } pub fn total_size(&self) -> Result<::capnp::MessageSize> { self.reader.total_size() } #[inline] pub fn get_set(self) -> Result<::bronx_capnp::set::Reader<'a>> { ::capnp::traits::FromPointerReader::get_from_pointer(&self.reader.get_pointer_field(0)) } pub fn has_set(&self) -> bool { !self.reader.get_pointer_field(0).is_null() } } pub struct Builder<'a> { builder : ::capnp::private::layout::StructBuilder<'a> } impl <'a> ::capnp::traits::HasStructSize for Builder<'a> { #[inline] fn struct_size() -> layout::StructSize { _private::STRUCT_SIZE } } impl <'a> ::capnp::traits::HasTypeId for Builder<'a> { #[inline] fn type_id() -> u64 { _private::TYPE_ID } } impl <'a> ::capnp::traits::FromStructBuilder<'a> for Builder<'a> { fn new(builder : ::capnp::private::layout::StructBuilder<'a>) -> Builder<'a> { Builder { builder : builder } } } impl <'a> ::capnp::traits::FromPointerBuilder<'a> for Builder<'a> { fn init_pointer(builder: ::capnp::private::layout::PointerBuilder<'a>, _size : u32) -> Builder<'a> { ::capnp::traits::FromStructBuilder::new(builder.init_struct(_private::STRUCT_SIZE)) } fn get_from_pointer(builder: ::capnp::private::layout::PointerBuilder<'a>) -> Result<Builder<'a>> { ::std::result::Result::Ok(::capnp::traits::FromStructBuilder::new(try!(builder.get_struct(_private::STRUCT_SIZE, ::std::ptr::null())))) } } impl <'a> ::capnp::traits::SetPointerBuilder<Builder<'a>> for Reader<'a> { fn set_pointer_builder<'b>(pointer : ::capnp::private::layout::PointerBuilder<'b>, value : Reader<'a>) -> Result<()> { pointer.set_struct(&value.reader) } } impl <'a, 'b : 'a> ::capnp::traits::CastableTo<Builder<'a>> for Builder<'b> { fn cast(self) -> Builder<'a> { Builder { builder : self.builder } } } impl <'a> Builder<'a> { pub fn as_reader(self) -> Reader<'a> { ::capnp::traits::FromStructReader::new(self.builder.as_reader()) } pub fn borrow<'b>(&'b mut self) -> Builder<'b> { Builder { builder : self.builder} } pub fn total_size(&self) -> Result<::capnp::MessageSize> { self.builder.as_reader().total_size() } #[inline] pub fn get_set(self) -> Result<::bronx_capnp::set::Builder<'a>> { ::capnp::traits::FromPointerBuilder::get_from_pointer(self.builder.get_pointer_field(0)) } #[inline] pub fn set_set(&mut self, value : ::bronx_capnp::set::Reader) -> Result<()> { ::capnp::traits::SetPointerBuilder::set_pointer_builder(self.builder.get_pointer_field(0), value) } #[inline] pub fn init_set(self, ) -> ::bronx_capnp::set::Builder<'a> { ::capnp::traits::FromPointerBuilder::init_pointer(self.builder.get_pointer_field(0), 0) } pub fn has_set(&self) -> bool { !self.builder.get_pointer_field(0).is_null() } } pub struct Pipeline { _typeless : ::capnp::any_pointer::Pipeline } impl FromTypelessPipeline for Pipeline { fn new(typeless : ::capnp::any_pointer::Pipeline) -> Pipeline { Pipeline { _typeless : typeless } } } impl Pipeline { pub fn get_set(&self) -> ::bronx_capnp::set::Pipeline { FromTypelessPipeline::new(self._typeless.get_pointer_field(0)) } } mod _private { use capnp::private::layout; pub const STRUCT_SIZE : layout::StructSize = layout::StructSize { data : 0, pointers : 1 }; pub const TYPE_ID: u64 = 0xa0f7a40c25cc25a7; } } pub mod add_item_params { #![allow(unused_imports)] use capnp::capability::{FromClientHook, FromTypelessPipeline}; use capnp::{text, data, Result}; use capnp::private::layout; use capnp::traits::{FromStructBuilder, FromStructReader}; use capnp::{primitive_list, enum_list, struct_list, text_list, data_list, list_list}; #[derive(Clone, Copy)] pub struct Reader<'a> { reader : layout::StructReader<'a> } impl <'a> ::capnp::traits::HasTypeId for Reader<'a> { #[inline] fn type_id() -> u64 { _private::TYPE_ID } } impl <'a> ::capnp::traits::FromStructReader<'a> for Reader<'a> { fn new(reader: ::capnp::private::layout::StructReader<'a>) -> Reader<'a> { Reader { reader : reader } } } impl <'a> ::capnp::traits::FromPointerReader<'a> for Reader<'a> { fn get_from_pointer(reader: &::capnp::private::layout::PointerReader<'a>) -> Result<Reader<'a>> { ::std::result::Result::Ok(::capnp::traits::FromStructReader::new(try!(reader.get_struct(::std::ptr::null())))) } } impl <'a, 'b : 'a> ::capnp::traits::CastableTo<Reader<'a>> for Reader<'b> { fn cast(self) -> Reader<'a> { Reader { reader : self.reader } } } impl <'a> Reader<'a> { pub fn borrow<'b>(&'b self) -> Reader<'b> { Reader { reader : self.reader} } pub fn total_size(&self) -> Result<::capnp::MessageSize> { self.reader.total_size() } #[inline] pub fn get_key(self) -> Result<text::Reader<'a>> { self.reader.get_pointer_field(0).get_text(::std::ptr::null(), 0) } pub fn has_key(&self) -> bool { !self.reader.get_pointer_field(0).is_null() } #[inline] pub fn get_item(self) -> Result<text::Reader<'a>> { self.reader.get_pointer_field(1).get_text(::std::ptr::null(), 0) } pub fn has_item(&self) -> bool { !self.reader.get_pointer_field(1).is_null() } } pub struct Builder<'a> { builder : ::capnp::private::layout::StructBuilder<'a> } impl <'a> ::capnp::traits::HasStructSize for Builder<'a> { #[inline] fn struct_size() -> layout::StructSize { _private::STRUCT_SIZE } } impl <'a> ::capnp::traits::HasTypeId for Builder<'a> { #[inline] fn type_id() -> u64 { _private::TYPE_ID } } impl <'a> ::capnp::traits::FromStructBuilder<'a> for Builder<'a> { fn new(builder : ::capnp::private::layout::StructBuilder<'a>) -> Builder<'a> { Builder { builder : builder } } } impl <'a> ::capnp::traits::FromPointerBuilder<'a> for Builder<'a> { fn init_pointer(builder: ::capnp::private::layout::PointerBuilder<'a>, _size : u32) -> Builder<'a> { ::capnp::traits::FromStructBuilder::new(builder.init_struct(_private::STRUCT_SIZE)) } fn get_from_pointer(builder: ::capnp::private::layout::PointerBuilder<'a>) -> Result<Builder<'a>> { ::std::result::Result::Ok(::capnp::traits::FromStructBuilder::new(try!(builder.get_struct(_private::STRUCT_SIZE, ::std::ptr::null())))) } } impl <'a> ::capnp::traits::SetPointerBuilder<Builder<'a>> for Reader<'a> { fn set_pointer_builder<'b>(pointer : ::capnp::private::layout::PointerBuilder<'b>, value : Reader<'a>) -> Result<()> { pointer.set_struct(&value.reader) } } impl <'a, 'b : 'a> ::capnp::traits::CastableTo<Builder<'a>> for Builder<'b> { fn cast(self) -> Builder<'a> { Builder { builder : self.builder } } } impl <'a> Builder<'a> { pub fn as_reader(self) -> Reader<'a> { ::capnp::traits::FromStructReader::new(self.builder.as_reader()) } pub fn borrow<'b>(&'b mut self) -> Builder<'b> { Builder { builder : self.builder} } pub fn total_size(&self) -> Result<::capnp::MessageSize> { self.builder.as_reader().total_size() } #[inline] pub fn get_key(self) -> Result<text::Builder<'a>> { self.builder.get_pointer_field(0).get_text(::std::ptr::null(), 0) } #[inline] pub fn set_key(&mut self, value : text::Reader) { self.builder.get_pointer_field(0).set_text(value); } #[inline] pub fn init_key(self, size : u32) -> text::Builder<'a> { self.builder.get_pointer_field(0).init_text(size) } pub fn has_key(&self) -> bool { !self.builder.get_pointer_field(0).is_null() } #[inline] pub fn get_item(self) -> Result<text::Builder<'a>> { self.builder.get_pointer_field(1).get_text(::std::ptr::null(), 0) } #[inline] pub fn set_item(&mut self, value : text::Reader) { self.builder.get_pointer_field(1).set_text(value); } #[inline] pub fn init_item(self, size : u32) -> text::Builder<'a> { self.builder.get_pointer_field(1).init_text(size) } pub fn has_item(&self) -> bool { !self.builder.get_pointer_field(1).is_null() } } pub struct Pipeline { _typeless : ::capnp::any_pointer::Pipeline } impl FromTypelessPipeline for Pipeline { fn new(typeless : ::capnp::any_pointer::Pipeline) -> Pipeline { Pipeline { _typeless : typeless } } } impl Pipeline { } mod _private { use capnp::private::layout; pub const STRUCT_SIZE : layout::StructSize = layout::StructSize { data : 0, pointers : 2 }; pub const TYPE_ID: u64 = 0xf2b52b75f48c50ec; } } pub mod add_item_results { #![allow(unused_imports)] use capnp::capability::{FromClientHook, FromTypelessPipeline}; use capnp::{text, data, Result}; use capnp::private::layout; use capnp::traits::{FromStructBuilder, FromStructReader}; use capnp::{primitive_list, enum_list, struct_list, text_list, data_list, list_list}; #[derive(Clone, Copy)] pub struct Reader<'a> { reader : layout::StructReader<'a> } impl <'a> ::capnp::traits::HasTypeId for Reader<'a> { #[inline] fn type_id() -> u64 { _private::TYPE_ID } } impl <'a> ::capnp::traits::FromStructReader<'a> for Reader<'a> { fn new(reader: ::capnp::private::layout::StructReader<'a>) -> Reader<'a> { Reader { reader : reader } } } impl <'a> ::capnp::traits::FromPointerReader<'a> for Reader<'a> { fn get_from_pointer(reader: &::capnp::private::layout::PointerReader<'a>) -> Result<Reader<'a>> { ::std::result::Result::Ok(::capnp::traits::FromStructReader::new(try!(reader.get_struct(::std::ptr::null())))) } } impl <'a, 'b : 'a> ::capnp::traits::CastableTo<Reader<'a>> for Reader<'b> { fn cast(self) -> Reader<'a> { Reader { reader : self.reader } } } impl <'a> Reader<'a> { pub fn borrow<'b>(&'b self) -> Reader<'b> { Reader { reader : self.reader} } pub fn total_size(&self) -> Result<::capnp::MessageSize> { self.reader.total_size() } #[inline] pub fn get_status(self) -> Result<::bronx_capnp::set_service_response::Reader<'a>> { ::capnp::traits::FromPointerReader::get_from_pointer(&self.reader.get_pointer_field(0)) } pub fn has_status(&self) -> bool { !self.reader.get_pointer_field(0).is_null() } } pub struct Builder<'a> { builder : ::capnp::private::layout::StructBuilder<'a> } impl <'a> ::capnp::traits::HasStructSize for Builder<'a> { #[inline] fn struct_size() -> layout::StructSize { _private::STRUCT_SIZE } } impl <'a> ::capnp::traits::HasTypeId for Builder<'a> { #[inline] fn type_id() -> u64 { _private::TYPE_ID } } impl <'a> ::capnp::traits::FromStructBuilder<'a> for Builder<'a> { fn new(builder : ::capnp::private::layout::StructBuilder<'a>) -> Builder<'a> { Builder { builder : builder } } } impl <'a> ::capnp::traits::FromPointerBuilder<'a> for Builder<'a> { fn init_pointer(builder: ::capnp::private::layout::PointerBuilder<'a>, _size : u32) -> Builder<'a> { ::capnp::traits::FromStructBuilder::new(builder.init_struct(_private::STRUCT_SIZE)) } fn get_from_pointer(builder: ::capnp::private::layout::PointerBuilder<'a>) -> Result<Builder<'a>> { ::std::result::Result::Ok(::capnp::traits::FromStructBuilder::new(try!(builder.get_struct(_private::STRUCT_SIZE, ::std::ptr::null())))) } } impl <'a> ::capnp::traits::SetPointerBuilder<Builder<'a>> for Reader<'a> { fn set_pointer_builder<'b>(pointer : ::capnp::private::layout::PointerBuilder<'b>, value : Reader<'a>) -> Result<()> { pointer.set_struct(&value.reader) } } impl <'a, 'b : 'a> ::capnp::traits::CastableTo<Builder<'a>> for Builder<'b> { fn cast(self) -> Builder<'a> { Builder { builder : self.builder } } } impl <'a> Builder<'a> { pub fn as_reader(self) -> Reader<'a> { ::capnp::traits::FromStructReader::new(self.builder.as_reader()) } pub fn borrow<'b>(&'b mut self) -> Builder<'b> { Builder { builder : self.builder} } pub fn total_size(&self) -> Result<::capnp::MessageSize> { self.builder.as_reader().total_size() } #[inline] pub fn get_status(self) -> Result<::bronx_capnp::set_service_response::Builder<'a>> { ::capnp::traits::FromPointerBuilder::get_from_pointer(self.builder.get_pointer_field(0)) } #[inline] pub fn set_status(&mut self, value : ::bronx_capnp::set_service_response::Reader) -> Result<()> { ::capnp::traits::SetPointerBuilder::set_pointer_builder(self.builder.get_pointer_field(0), value) } #[inline] pub fn init_status(self, ) -> ::bronx_capnp::set_service_response::Builder<'a> { ::capnp::traits::FromPointerBuilder::init_pointer(self.builder.get_pointer_field(0), 0) } pub fn has_status(&self) -> bool { !self.builder.get_pointer_field(0).is_null() } } pub struct Pipeline { _typeless : ::capnp::any_pointer::Pipeline } impl FromTypelessPipeline for Pipeline { fn new(typeless : ::capnp::any_pointer::Pipeline) -> Pipeline { Pipeline { _typeless : typeless } } } impl Pipeline { pub fn get_status(&self) -> ::bronx_capnp::set_service_response::Pipeline { FromTypelessPipeline::new(self._typeless.get_pointer_field(0)) } } mod _private { use capnp::private::layout; pub const STRUCT_SIZE : layout::StructSize = layout::StructSize { data : 0, pointers : 1 }; pub const TYPE_ID: u64 = 0xc32944accf5981ec; } } }
#[doc = "Reader of register TEMP_OR"] pub type R = crate::R<u32, super::TEMP_OR>; #[doc = "Writer for register TEMP_OR"] pub type W = crate::W<u32, super::TEMP_OR>; #[doc = "Register TEMP_OR `reset()`'s with value 0"] impl crate::ResetValue for super::TEMP_OR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `TS_Op0`"] pub type TS_OP0_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op0`"] pub struct TS_OP0_W<'a> { w: &'a mut W, } impl<'a> TS_OP0_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Reader of field `TS_Op1`"] pub type TS_OP1_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op1`"] pub struct TS_OP1_W<'a> { w: &'a mut W, } impl<'a> TS_OP1_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Reader of field `TS_Op2`"] pub type TS_OP2_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op2`"] pub struct TS_OP2_W<'a> { w: &'a mut W, } impl<'a> TS_OP2_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Reader of field `TS_Op3`"] pub type TS_OP3_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op3`"] pub struct TS_OP3_W<'a> { w: &'a mut W, } impl<'a> TS_OP3_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Reader of field `TS_Op4`"] pub type TS_OP4_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op4`"] pub struct TS_OP4_W<'a> { w: &'a mut W, } impl<'a> TS_OP4_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "Reader of field `TS_Op5`"] pub type TS_OP5_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op5`"] pub struct TS_OP5_W<'a> { w: &'a mut W, } impl<'a> TS_OP5_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5); self.w } } #[doc = "Reader of field `TS_Op6`"] pub type TS_OP6_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op6`"] pub struct TS_OP6_W<'a> { w: &'a mut W, } impl<'a> TS_OP6_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6); self.w } } #[doc = "Reader of field `TS_Op7`"] pub type TS_OP7_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op7`"] pub struct TS_OP7_W<'a> { w: &'a mut W, } impl<'a> TS_OP7_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7); self.w } } #[doc = "Reader of field `TS_Op8`"] pub type TS_OP8_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op8`"] pub struct TS_OP8_W<'a> { w: &'a mut W, } impl<'a> TS_OP8_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8); self.w } } #[doc = "Reader of field `TS_Op9`"] pub type TS_OP9_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op9`"] pub struct TS_OP9_W<'a> { w: &'a mut W, } impl<'a> TS_OP9_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9); self.w } } #[doc = "Reader of field `TS_Op10`"] pub type TS_OP10_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op10`"] pub struct TS_OP10_W<'a> { w: &'a mut W, } impl<'a> TS_OP10_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10); self.w } } #[doc = "Reader of field `TS_Op11`"] pub type TS_OP11_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op11`"] pub struct TS_OP11_W<'a> { w: &'a mut W, } impl<'a> TS_OP11_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11); self.w } } #[doc = "Reader of field `TS_Op12`"] pub type TS_OP12_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op12`"] pub struct TS_OP12_W<'a> { w: &'a mut W, } impl<'a> TS_OP12_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12); self.w } } #[doc = "Reader of field `TS_Op13`"] pub type TS_OP13_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op13`"] pub struct TS_OP13_W<'a> { w: &'a mut W, } impl<'a> TS_OP13_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13); self.w } } #[doc = "Reader of field `TS_Op14`"] pub type TS_OP14_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op14`"] pub struct TS_OP14_W<'a> { w: &'a mut W, } impl<'a> TS_OP14_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 14)) | (((value as u32) & 0x01) << 14); self.w } } #[doc = "Reader of field `TS_Op15`"] pub type TS_OP15_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op15`"] pub struct TS_OP15_W<'a> { w: &'a mut W, } impl<'a> TS_OP15_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 15)) | (((value as u32) & 0x01) << 15); self.w } } #[doc = "Reader of field `TS_Op16`"] pub type TS_OP16_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op16`"] pub struct TS_OP16_W<'a> { w: &'a mut W, } impl<'a> TS_OP16_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16); self.w } } #[doc = "Reader of field `TS_Op17`"] pub type TS_OP17_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op17`"] pub struct TS_OP17_W<'a> { w: &'a mut W, } impl<'a> TS_OP17_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17); self.w } } #[doc = "Reader of field `TS_Op18`"] pub type TS_OP18_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op18`"] pub struct TS_OP18_W<'a> { w: &'a mut W, } impl<'a> TS_OP18_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u32) & 0x01) << 18); self.w } } #[doc = "Reader of field `TS_Op19`"] pub type TS_OP19_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op19`"] pub struct TS_OP19_W<'a> { w: &'a mut W, } impl<'a> TS_OP19_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u32) & 0x01) << 19); self.w } } #[doc = "Reader of field `TS_Op20`"] pub type TS_OP20_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op20`"] pub struct TS_OP20_W<'a> { w: &'a mut W, } impl<'a> TS_OP20_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 20)) | (((value as u32) & 0x01) << 20); self.w } } #[doc = "Reader of field `TS_Op21`"] pub type TS_OP21_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op21`"] pub struct TS_OP21_W<'a> { w: &'a mut W, } impl<'a> TS_OP21_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 21)) | (((value as u32) & 0x01) << 21); self.w } } #[doc = "Reader of field `TS_Op22`"] pub type TS_OP22_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op22`"] pub struct TS_OP22_W<'a> { w: &'a mut W, } impl<'a> TS_OP22_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 22)) | (((value as u32) & 0x01) << 22); self.w } } #[doc = "Reader of field `TS_Op23`"] pub type TS_OP23_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op23`"] pub struct TS_OP23_W<'a> { w: &'a mut W, } impl<'a> TS_OP23_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 23)) | (((value as u32) & 0x01) << 23); self.w } } #[doc = "Reader of field `TS_Op24`"] pub type TS_OP24_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op24`"] pub struct TS_OP24_W<'a> { w: &'a mut W, } impl<'a> TS_OP24_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24); self.w } } #[doc = "Reader of field `TS_Op25`"] pub type TS_OP25_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op25`"] pub struct TS_OP25_W<'a> { w: &'a mut W, } impl<'a> TS_OP25_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 25)) | (((value as u32) & 0x01) << 25); self.w } } #[doc = "Reader of field `TS_Op26`"] pub type TS_OP26_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op26`"] pub struct TS_OP26_W<'a> { w: &'a mut W, } impl<'a> TS_OP26_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 26)) | (((value as u32) & 0x01) << 26); self.w } } #[doc = "Reader of field `TS_Op27`"] pub type TS_OP27_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op27`"] pub struct TS_OP27_W<'a> { w: &'a mut W, } impl<'a> TS_OP27_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 27)) | (((value as u32) & 0x01) << 27); self.w } } #[doc = "Reader of field `TS_Op28`"] pub type TS_OP28_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op28`"] pub struct TS_OP28_W<'a> { w: &'a mut W, } impl<'a> TS_OP28_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 28)) | (((value as u32) & 0x01) << 28); self.w } } #[doc = "Reader of field `TS_Op29`"] pub type TS_OP29_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op29`"] pub struct TS_OP29_W<'a> { w: &'a mut W, } impl<'a> TS_OP29_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u32) & 0x01) << 29); self.w } } #[doc = "Reader of field `TS_Op30`"] pub type TS_OP30_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op30`"] pub struct TS_OP30_W<'a> { w: &'a mut W, } impl<'a> TS_OP30_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30); self.w } } #[doc = "Reader of field `TS_Op31`"] pub type TS_OP31_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TS_Op31`"] pub struct TS_OP31_W<'a> { w: &'a mut W, } impl<'a> TS_OP31_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31); self.w } } impl R { #[doc = "Bit 0 - TS_Op0"] #[inline(always)] pub fn ts_op0(&self) -> TS_OP0_R { TS_OP0_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - TS_Op1"] #[inline(always)] pub fn ts_op1(&self) -> TS_OP1_R { TS_OP1_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - TS_Op2"] #[inline(always)] pub fn ts_op2(&self) -> TS_OP2_R { TS_OP2_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 3 - TS_Op3"] #[inline(always)] pub fn ts_op3(&self) -> TS_OP3_R { TS_OP3_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 4 - TS_Op4"] #[inline(always)] pub fn ts_op4(&self) -> TS_OP4_R { TS_OP4_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 5 - TS_Op5"] #[inline(always)] pub fn ts_op5(&self) -> TS_OP5_R { TS_OP5_R::new(((self.bits >> 5) & 0x01) != 0) } #[doc = "Bit 6 - TS_Op6"] #[inline(always)] pub fn ts_op6(&self) -> TS_OP6_R { TS_OP6_R::new(((self.bits >> 6) & 0x01) != 0) } #[doc = "Bit 7 - TS_Op7"] #[inline(always)] pub fn ts_op7(&self) -> TS_OP7_R { TS_OP7_R::new(((self.bits >> 7) & 0x01) != 0) } #[doc = "Bit 8 - TS_Op8"] #[inline(always)] pub fn ts_op8(&self) -> TS_OP8_R { TS_OP8_R::new(((self.bits >> 8) & 0x01) != 0) } #[doc = "Bit 9 - TS_Op9"] #[inline(always)] pub fn ts_op9(&self) -> TS_OP9_R { TS_OP9_R::new(((self.bits >> 9) & 0x01) != 0) } #[doc = "Bit 10 - TS_Op10"] #[inline(always)] pub fn ts_op10(&self) -> TS_OP10_R { TS_OP10_R::new(((self.bits >> 10) & 0x01) != 0) } #[doc = "Bit 11 - TS_Op11"] #[inline(always)] pub fn ts_op11(&self) -> TS_OP11_R { TS_OP11_R::new(((self.bits >> 11) & 0x01) != 0) } #[doc = "Bit 12 - TS_Op12"] #[inline(always)] pub fn ts_op12(&self) -> TS_OP12_R { TS_OP12_R::new(((self.bits >> 12) & 0x01) != 0) } #[doc = "Bit 13 - TS_Op13"] #[inline(always)] pub fn ts_op13(&self) -> TS_OP13_R { TS_OP13_R::new(((self.bits >> 13) & 0x01) != 0) } #[doc = "Bit 14 - TS_Op14"] #[inline(always)] pub fn ts_op14(&self) -> TS_OP14_R { TS_OP14_R::new(((self.bits >> 14) & 0x01) != 0) } #[doc = "Bit 15 - TS_Op15"] #[inline(always)] pub fn ts_op15(&self) -> TS_OP15_R { TS_OP15_R::new(((self.bits >> 15) & 0x01) != 0) } #[doc = "Bit 16 - TS_Op16"] #[inline(always)] pub fn ts_op16(&self) -> TS_OP16_R { TS_OP16_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bit 17 - TS_Op17"] #[inline(always)] pub fn ts_op17(&self) -> TS_OP17_R { TS_OP17_R::new(((self.bits >> 17) & 0x01) != 0) } #[doc = "Bit 18 - TS_Op18"] #[inline(always)] pub fn ts_op18(&self) -> TS_OP18_R { TS_OP18_R::new(((self.bits >> 18) & 0x01) != 0) } #[doc = "Bit 19 - TS_Op19"] #[inline(always)] pub fn ts_op19(&self) -> TS_OP19_R { TS_OP19_R::new(((self.bits >> 19) & 0x01) != 0) } #[doc = "Bit 20 - TS_Op20"] #[inline(always)] pub fn ts_op20(&self) -> TS_OP20_R { TS_OP20_R::new(((self.bits >> 20) & 0x01) != 0) } #[doc = "Bit 21 - TS_Op21"] #[inline(always)] pub fn ts_op21(&self) -> TS_OP21_R { TS_OP21_R::new(((self.bits >> 21) & 0x01) != 0) } #[doc = "Bit 22 - TS_Op22"] #[inline(always)] pub fn ts_op22(&self) -> TS_OP22_R { TS_OP22_R::new(((self.bits >> 22) & 0x01) != 0) } #[doc = "Bit 23 - TS_Op23"] #[inline(always)] pub fn ts_op23(&self) -> TS_OP23_R { TS_OP23_R::new(((self.bits >> 23) & 0x01) != 0) } #[doc = "Bit 24 - TS_Op24"] #[inline(always)] pub fn ts_op24(&self) -> TS_OP24_R { TS_OP24_R::new(((self.bits >> 24) & 0x01) != 0) } #[doc = "Bit 25 - TS_Op25"] #[inline(always)] pub fn ts_op25(&self) -> TS_OP25_R { TS_OP25_R::new(((self.bits >> 25) & 0x01) != 0) } #[doc = "Bit 26 - TS_Op26"] #[inline(always)] pub fn ts_op26(&self) -> TS_OP26_R { TS_OP26_R::new(((self.bits >> 26) & 0x01) != 0) } #[doc = "Bit 27 - TS_Op27"] #[inline(always)] pub fn ts_op27(&self) -> TS_OP27_R { TS_OP27_R::new(((self.bits >> 27) & 0x01) != 0) } #[doc = "Bit 28 - TS_Op28"] #[inline(always)] pub fn ts_op28(&self) -> TS_OP28_R { TS_OP28_R::new(((self.bits >> 28) & 0x01) != 0) } #[doc = "Bit 29 - TS_Op29"] #[inline(always)] pub fn ts_op29(&self) -> TS_OP29_R { TS_OP29_R::new(((self.bits >> 29) & 0x01) != 0) } #[doc = "Bit 30 - TS_Op30"] #[inline(always)] pub fn ts_op30(&self) -> TS_OP30_R { TS_OP30_R::new(((self.bits >> 30) & 0x01) != 0) } #[doc = "Bit 31 - TS_Op31"] #[inline(always)] pub fn ts_op31(&self) -> TS_OP31_R { TS_OP31_R::new(((self.bits >> 31) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - TS_Op0"] #[inline(always)] pub fn ts_op0(&mut self) -> TS_OP0_W { TS_OP0_W { w: self } } #[doc = "Bit 1 - TS_Op1"] #[inline(always)] pub fn ts_op1(&mut self) -> TS_OP1_W { TS_OP1_W { w: self } } #[doc = "Bit 2 - TS_Op2"] #[inline(always)] pub fn ts_op2(&mut self) -> TS_OP2_W { TS_OP2_W { w: self } } #[doc = "Bit 3 - TS_Op3"] #[inline(always)] pub fn ts_op3(&mut self) -> TS_OP3_W { TS_OP3_W { w: self } } #[doc = "Bit 4 - TS_Op4"] #[inline(always)] pub fn ts_op4(&mut self) -> TS_OP4_W { TS_OP4_W { w: self } } #[doc = "Bit 5 - TS_Op5"] #[inline(always)] pub fn ts_op5(&mut self) -> TS_OP5_W { TS_OP5_W { w: self } } #[doc = "Bit 6 - TS_Op6"] #[inline(always)] pub fn ts_op6(&mut self) -> TS_OP6_W { TS_OP6_W { w: self } } #[doc = "Bit 7 - TS_Op7"] #[inline(always)] pub fn ts_op7(&mut self) -> TS_OP7_W { TS_OP7_W { w: self } } #[doc = "Bit 8 - TS_Op8"] #[inline(always)] pub fn ts_op8(&mut self) -> TS_OP8_W { TS_OP8_W { w: self } } #[doc = "Bit 9 - TS_Op9"] #[inline(always)] pub fn ts_op9(&mut self) -> TS_OP9_W { TS_OP9_W { w: self } } #[doc = "Bit 10 - TS_Op10"] #[inline(always)] pub fn ts_op10(&mut self) -> TS_OP10_W { TS_OP10_W { w: self } } #[doc = "Bit 11 - TS_Op11"] #[inline(always)] pub fn ts_op11(&mut self) -> TS_OP11_W { TS_OP11_W { w: self } } #[doc = "Bit 12 - TS_Op12"] #[inline(always)] pub fn ts_op12(&mut self) -> TS_OP12_W { TS_OP12_W { w: self } } #[doc = "Bit 13 - TS_Op13"] #[inline(always)] pub fn ts_op13(&mut self) -> TS_OP13_W { TS_OP13_W { w: self } } #[doc = "Bit 14 - TS_Op14"] #[inline(always)] pub fn ts_op14(&mut self) -> TS_OP14_W { TS_OP14_W { w: self } } #[doc = "Bit 15 - TS_Op15"] #[inline(always)] pub fn ts_op15(&mut self) -> TS_OP15_W { TS_OP15_W { w: self } } #[doc = "Bit 16 - TS_Op16"] #[inline(always)] pub fn ts_op16(&mut self) -> TS_OP16_W { TS_OP16_W { w: self } } #[doc = "Bit 17 - TS_Op17"] #[inline(always)] pub fn ts_op17(&mut self) -> TS_OP17_W { TS_OP17_W { w: self } } #[doc = "Bit 18 - TS_Op18"] #[inline(always)] pub fn ts_op18(&mut self) -> TS_OP18_W { TS_OP18_W { w: self } } #[doc = "Bit 19 - TS_Op19"] #[inline(always)] pub fn ts_op19(&mut self) -> TS_OP19_W { TS_OP19_W { w: self } } #[doc = "Bit 20 - TS_Op20"] #[inline(always)] pub fn ts_op20(&mut self) -> TS_OP20_W { TS_OP20_W { w: self } } #[doc = "Bit 21 - TS_Op21"] #[inline(always)] pub fn ts_op21(&mut self) -> TS_OP21_W { TS_OP21_W { w: self } } #[doc = "Bit 22 - TS_Op22"] #[inline(always)] pub fn ts_op22(&mut self) -> TS_OP22_W { TS_OP22_W { w: self } } #[doc = "Bit 23 - TS_Op23"] #[inline(always)] pub fn ts_op23(&mut self) -> TS_OP23_W { TS_OP23_W { w: self } } #[doc = "Bit 24 - TS_Op24"] #[inline(always)] pub fn ts_op24(&mut self) -> TS_OP24_W { TS_OP24_W { w: self } } #[doc = "Bit 25 - TS_Op25"] #[inline(always)] pub fn ts_op25(&mut self) -> TS_OP25_W { TS_OP25_W { w: self } } #[doc = "Bit 26 - TS_Op26"] #[inline(always)] pub fn ts_op26(&mut self) -> TS_OP26_W { TS_OP26_W { w: self } } #[doc = "Bit 27 - TS_Op27"] #[inline(always)] pub fn ts_op27(&mut self) -> TS_OP27_W { TS_OP27_W { w: self } } #[doc = "Bit 28 - TS_Op28"] #[inline(always)] pub fn ts_op28(&mut self) -> TS_OP28_W { TS_OP28_W { w: self } } #[doc = "Bit 29 - TS_Op29"] #[inline(always)] pub fn ts_op29(&mut self) -> TS_OP29_W { TS_OP29_W { w: self } } #[doc = "Bit 30 - TS_Op30"] #[inline(always)] pub fn ts_op30(&mut self) -> TS_OP30_W { TS_OP30_W { w: self } } #[doc = "Bit 31 - TS_Op31"] #[inline(always)] pub fn ts_op31(&mut self) -> TS_OP31_W { TS_OP31_W { w: self } } }
pub(crate) fn clone_into_array<A, T>(slice: &[T]) -> A where A: Default + AsMut<[T]>, T: Clone, { let mut a = Default::default(); <A as AsMut<[T]>>::as_mut(&mut a).clone_from_slice(slice); a } pub(crate) fn concat(ctxt: &[u8], tag: &[u8]) -> Vec<u8> { let mut out = Vec::new(); out.extend_from_slice(ctxt); out.extend_from_slice(tag); out }
#![feature(slice_strip)] use std::io::prelude::*; use std::net::{TcpListener, TcpStream}; fn main() { let listener = TcpListener::bind("localhost:9149").unwrap(); for stream in listener.incoming().take(3) { let stream = stream.unwrap(); handle_connection(stream); } } fn handle_connection(mut stream: TcpStream) { let mut buffer = [0; 1024]; stream.read(&mut buffer).unwrap(); println!("Request:\n{}", String::from_utf8_lossy(&buffer[..])); // let pingpong = b"GET /api/pingpong/ HTTP/1.1\r\n"; if let Some(message) = buffer.strip_prefix(b"GET /api/pingpong/") { // let () = message; let idx = (0..message.len()).find(|&i| message[i] == b' ').unwrap(); let content = format!("{} pong", String::from_utf8_lossy(&message[..idx])); let response = format!( "HTTP/1.1 200 OK\r\n\r\n{}", // content.len(), content, ); stream.write(response.as_bytes()).unwrap(); stream.flush().unwrap(); } else { let response = format!("HTTP/1.1 404 NOT FOUND\r\n\r\n"); stream.write(response.as_bytes()).unwrap(); stream.flush().unwrap(); } }
use crate::{Timestamp, Uint64}; use serde::{Deserialize, Serialize}; #[derive(Clone, Default, Serialize, Deserialize, PartialEq, Eq, Hash, Debug)] pub struct TxPoolInfo { pub pending: Uint64, pub proposed: Uint64, pub orphan: Uint64, pub total_tx_size: Uint64, pub total_tx_cycles: Uint64, pub last_txs_updated_at: Timestamp, }
//! Iterators over a `Rope`'s data. //! //! All iterators here can also be used with `RopeSlice`'s. When used //! with a `RopeSlice`, they iterate over only the data that the //! `RopeSlice` refers to. For the line and chunk, iterators, the data //! of the first and last yielded item will be truncated to match the //! `RopeSlice`. use std::str; use std::sync::Arc; use slice::RopeSlice; use str_utils::{ char_to_byte_idx, char_to_line_idx, ends_with_line_break, line_to_byte_idx, line_to_char_idx, }; use tree::Node; //========================================================== /// An iterator over a `Rope`'s bytes. pub struct Bytes<'a> { chunk_iter: Chunks<'a>, cur_chunk: str::Bytes<'a>, } impl<'a> Bytes<'a> { pub(crate) fn new(node: &Arc<Node>) -> Bytes { Bytes { chunk_iter: Chunks::new(node), cur_chunk: "".bytes(), } } pub(crate) fn new_with_range(node: &Arc<Node>, start_char: usize, end_char: usize) -> Bytes { Bytes { chunk_iter: Chunks::new_with_range(node, start_char, end_char), cur_chunk: "".bytes(), } } pub(crate) fn from_str(text: &str) -> Bytes { Bytes { chunk_iter: Chunks::new_empty(), cur_chunk: text.bytes(), } } } impl<'a> Iterator for Bytes<'a> { type Item = u8; fn next(&mut self) -> Option<u8> { loop { if let Some(c) = self.cur_chunk.next() { return Some(c); } else if let Some(chunk) = self.chunk_iter.next() { self.cur_chunk = chunk.bytes(); continue; } else { return None; } } } } //========================================================== /// An iterator over a `Rope`'s chars. pub struct Chars<'a> { chunk_iter: Chunks<'a>, cur_chunk: str::Chars<'a>, } impl<'a> Chars<'a> { pub(crate) fn new(node: &Arc<Node>) -> Chars { Chars { chunk_iter: Chunks::new(node), cur_chunk: "".chars(), } } pub(crate) fn new_with_range(node: &Arc<Node>, start_char: usize, end_char: usize) -> Chars { Chars { chunk_iter: Chunks::new_with_range(node, start_char, end_char), cur_chunk: "".chars(), } } pub(crate) fn from_str(text: &str) -> Chars { Chars { chunk_iter: Chunks::new_empty(), cur_chunk: text.chars(), } } } impl<'a> Iterator for Chars<'a> { type Item = char; fn next(&mut self) -> Option<char> { loop { if let Some(c) = self.cur_chunk.next() { return Some(c); } else if let Some(chunk) = self.chunk_iter.next() { self.cur_chunk = chunk.chars(); continue; } else { return None; } } } } //========================================================== /// An iterator over a `Rope`'s lines. /// /// The returned lines include the line-break at the end. /// /// The last line is returned even if blank, in which case it /// is returned as an empty slice. pub struct Lines<'a>(LinesEnum<'a>); enum LinesEnum<'a> { Full { node: &'a Arc<Node>, start_char: usize, end_char: usize, line_idx: usize, }, Light { text: &'a str, done: bool, }, } impl<'a> Lines<'a> { pub(crate) fn new(node: &Arc<Node>) -> Lines { Lines(LinesEnum::Full { node: node, start_char: 0, end_char: node.text_info().chars as usize, line_idx: 0, }) } pub(crate) fn new_with_range(node: &Arc<Node>, start_char: usize, end_char: usize) -> Lines { Lines(LinesEnum::Full { node: node, start_char: start_char, end_char: end_char, line_idx: { let (chunk, _, c, l) = node.get_chunk_at_char(start_char); l + char_to_line_idx(chunk, start_char - c) }, }) } pub(crate) fn from_str(text: &str) -> Lines { Lines(LinesEnum::Light { text: text, done: false, }) } } impl<'a> Iterator for Lines<'a> { type Item = RopeSlice<'a>; fn next(&mut self) -> Option<RopeSlice<'a>> { match *self { Lines(LinesEnum::Full { ref mut node, start_char, end_char, ref mut line_idx, }) => { if *line_idx > node.line_break_count() { return None; } else { let a = { // Find the char that corresponds to the start of the line. let (chunk, _, c, l) = node.get_chunk_at_line_break(*line_idx); let a = (c + line_to_char_idx(chunk, *line_idx - l)).max(start_char); // Early out if we're past the specified end char if a > end_char { *line_idx = node.line_break_count() + 1; return None; } a }; let b = if *line_idx < node.line_break_count() { // Find the char that corresponds to the end of the line. let (chunk, _, c, l) = node.get_chunk_at_line_break(*line_idx + 1); c + line_to_char_idx(chunk, *line_idx + 1 - l) } else { node.char_count() } .min(end_char); *line_idx += 1; return Some(RopeSlice::new_with_range(node, a, b)); } } Lines(LinesEnum::Light { ref mut text, ref mut done, }) => { if *done { return None; } else { let split_idx = line_to_byte_idx(text, 1); let t = &text[..split_idx]; *text = &text[split_idx..]; if text.is_empty() { *done = !ends_with_line_break(t); } return Some(t.into()); } } } } } //========================================================== /// An iterator over a `Rope`'s contiguous `str` chunks. /// /// Internally, each `Rope` stores text as a segemented collection of utf8 /// strings. This iterator iterates over those segments, returning a /// `&str` slice for each one. It is useful for situations such as: /// /// - Writing a rope's utf8 text data to disk (but see /// [`Rope::write_to()`](../struct.Rope.html#method.write_to) for a /// convenience function that does this). /// - Streaming a rope's text data somewhere. /// - Saving a rope to a non-utf8 encoding, doing the encoding conversion /// incrementally as you go. /// - Writing custom iterators over a rope's text data. /// /// There are precisely two guarantees about the yielded chunks: /// /// - All chunks are yielded, and they are yielded in order. /// - CRLF pairs are never split across chunks. /// /// There are no guarantees about the size of yielded chunks, and except for /// CRLF pairs there are no guarantees about where the chunks are split. For /// example, they may be zero-sized, they don't necessarily align with line /// breaks, etc. pub struct Chunks<'a>(ChunksEnum<'a>); enum ChunksEnum<'a> { Full { node_stack: Vec<&'a Arc<Node>>, start: usize, end: usize, idx: usize, }, Light { text: &'a str, }, } impl<'a> Chunks<'a> { pub(crate) fn new(node: &Arc<Node>) -> Chunks { Chunks(ChunksEnum::Full { node_stack: vec![node], start: 0, end: node.text_info().bytes as usize, idx: 0, }) } pub(crate) fn new_empty() -> Chunks<'static> { Chunks(ChunksEnum::Light { text: "" }) } pub(crate) fn new_with_range(node: &Arc<Node>, start_char: usize, end_char: usize) -> Chunks { let start_byte = { let (chunk, b, c, _) = node.get_chunk_at_char(start_char); b + char_to_byte_idx(chunk, start_char - c) }; let end_byte = { let (chunk, b, c, _) = node.get_chunk_at_char(end_char); b + char_to_byte_idx(chunk, end_char - c) }; Chunks(ChunksEnum::Full { node_stack: vec![node], start: start_byte, end: end_byte, idx: 0, }) } pub(crate) fn from_str(text: &str) -> Chunks { Chunks(ChunksEnum::Light { text: text }) } } impl<'a> Iterator for Chunks<'a> { type Item = &'a str; fn next(&mut self) -> Option<&'a str> { match *self { Chunks(ChunksEnum::Full { ref mut node_stack, start, end, ref mut idx, }) => { if *idx >= end { return None; } loop { if let Some(node) = node_stack.pop() { match **node { Node::Leaf(ref text) => { let start_byte = if start <= *idx { 0 } else { start - *idx }; let end_byte = if end >= (*idx + text.len()) { text.len() } else { end - *idx }; *idx += text.len(); return Some(&text[start_byte..end_byte]); } Node::Internal(ref children) => { // Find the first child that isn't before `start`, // updating `idx` as we go. let mut child_i = 0; for inf in children.info().iter() { if (*idx + inf.bytes as usize) > start { break; } else { *idx += inf.bytes as usize; child_i += 1; } } // Push relevant children to the stack. for child in (&children.nodes()[child_i..]).iter().rev() { node_stack.push(child); } } } } else { return None; } } } Chunks(ChunksEnum::Light { ref mut text }) => { if text.is_empty() { return None; } else { let t = *text; *text = ""; return Some(t); } } } } } //=========================================================== #[cfg(test)] mod tests { use Rope; const TEXT: &str = "\r\n\ Hello there! How're you doing? It's a fine day, \ isn't it? Aren't you glad we're alive?\r\n\ こんにちは!元気ですか?日はいいですね。\ 私たちが生きだって嬉しいではないか?\r\n\ Hello there! How're you doing? It's a fine day, \ isn't it? Aren't you glad we're alive?\r\n\ こんにちは!元気ですか?日はいいですね。\ 私たちが生きだって嬉しいではないか?\r\n\ Hello there! How're you doing? It's a fine day, \ isn't it? Aren't you glad we're alive?\r\n\ こんにちは!元気ですか?日はいいですね。\ 私たちが生きだって嬉しいではないか?\r\n\ Hello there! How're you doing? It's a fine day, \ isn't it? Aren't you glad we're alive?\r\n\ こんにちは!元気ですか?日はいいですね。\ 私たちが生きだって嬉しいではないか?\r\n\ Hello there! How're you doing? It's a fine day, \ isn't it? Aren't you glad we're alive?\r\n\ こんにちは!元気ですか?日はいいですね。\ 私たちが生きだって嬉しいではないか?\r\n\ Hello there! How're you doing? It's a fine day, \ isn't it? Aren't you glad we're alive?\r\n\ こんにちは!元気ですか?日はいいですね。\ 私たちが生きだって嬉しいではないか?\r\n\ Hello there! How're you doing? It's a fine day, \ isn't it? Aren't you glad we're alive?\r\n\ こんにちは!元気ですか?日はいいですね。\ 私たちが生きだって嬉しいではないか?\r\n\ Hello there! How're you doing? It's a fine day, \ isn't it? Aren't you glad we're alive?\r\n\ こんにちは!元気ですか?日はいいですね。\ 私たちが生きだって嬉しいではないか?\r\n\ Hello there! How're you doing? It's a fine day, \ isn't it? Aren't you glad we're alive?\r\n\ こんにちは!元気ですか?日はいいですね。\ 私たちが生きだって嬉しいではないか?\r\n\ Hello there! How're you doing? It's a fine day, \ isn't it? Aren't you glad we're alive?\r\n\ こんにちは!元気ですか?日はいいですね。\ 私たちが生きだって嬉しいではないか?\r\n\ Hello there! How're you doing? It's a fine day, \ isn't it? Aren't you glad we're alive?\r\n\ こんにちは!元気ですか?日はいいですね。\ 私たちが生きだって嬉しいではないか?\r\n\ Hello there! How're you doing? It's a fine day, \ isn't it? Aren't you glad we're alive?\r\n\ こんにちは!元気ですか?日はいいですね。\ 私たちが生きだって嬉しいではないか?\r\n\ Hello there! How're you doing? It's a fine day, \ isn't it? Aren't you glad we're alive?\r\n\ こんにちは!元気ですか?日はいいですね。\ 私たちが生きだって嬉しいではないか?\r\n\ Hello there! How're you doing? It's a fine day, \ isn't it? Aren't you glad we're alive?\r\n\ こんにちは!元気ですか?日はいいですね。\ 私たちが生きだって嬉しいではないか?\r\n\ Hello there! How're you doing? It's a fine day, \ isn't it? Aren't you glad we're alive?\r\n\ こんにちは!元気ですか?日はいいですね。\ 私たちが生きだって嬉しいではないか?\r\n\ Hello there! How're you doing? It's a fine day, \ isn't it? Aren't you glad we're alive?\r\n\ こんにちは!元気ですか?日はいいですね。\ 私たちが生きだって嬉しいではないか?\r\n\ "; #[test] fn bytes_01() { let r = Rope::from_str(TEXT); for (br, bt) in r.bytes().zip(TEXT.bytes()) { assert_eq!(br, bt); } } #[test] fn chars_01() { let r = Rope::from_str(TEXT); for (cr, ct) in r.chars().zip(TEXT.chars()) { assert_eq!(cr, ct); } } #[test] fn lines_01() { let r = Rope::from_str(TEXT); let s = r.slice(..); assert_eq!(34, r.lines().count()); assert_eq!(34, s.lines().count()); // Rope let mut lines = r.lines(); assert_eq!("\r\n", lines.next().unwrap()); for _ in 0..16 { assert_eq!( "Hello there! How're you doing? It's a fine day, \ isn't it? Aren't you glad we're alive?\r\n", lines.next().unwrap() ); assert_eq!( "こんにちは!元気ですか?日はいいですね。\ 私たちが生きだって嬉しいではないか?\r\n", lines.next().unwrap() ); } assert_eq!("", lines.next().unwrap()); assert!(lines.next().is_none()); // Slice let mut lines = s.lines(); assert_eq!("\r\n", lines.next().unwrap()); for _ in 0..16 { assert_eq!( "Hello there! How're you doing? It's a fine day, \ isn't it? Aren't you glad we're alive?\r\n", lines.next().unwrap() ); assert_eq!( "こんにちは!元気ですか?日はいいですね。\ 私たちが生きだって嬉しいではないか?\r\n", lines.next().unwrap() ); } assert_eq!("", lines.next().unwrap()); assert!(lines.next().is_none()); } #[test] fn lines_02() { let text = "Hello there!\nHow goes it?"; let r = Rope::from_str(text); let s = r.slice(..); assert_eq!(2, r.lines().count()); assert_eq!(2, s.lines().count()); let mut lines = r.lines(); assert_eq!("Hello there!\n", lines.next().unwrap()); assert_eq!("How goes it?", lines.next().unwrap()); assert!(lines.next().is_none()); let mut lines = s.lines(); assert_eq!("Hello there!\n", lines.next().unwrap()); assert_eq!("How goes it?", lines.next().unwrap()); assert!(lines.next().is_none()); } #[test] fn lines_03() { let text = "Hello there!\nHow goes it?\n"; let r = Rope::from_str(text); let s = r.slice(..); assert_eq!(3, r.lines().count()); assert_eq!(3, s.lines().count()); let mut lines = r.lines(); assert_eq!("Hello there!\n", lines.next().unwrap()); assert_eq!("How goes it?\n", lines.next().unwrap()); assert_eq!("", lines.next().unwrap()); assert!(lines.next().is_none()); let mut lines = s.lines(); assert_eq!("Hello there!\n", lines.next().unwrap()); assert_eq!("How goes it?\n", lines.next().unwrap()); assert_eq!("", lines.next().unwrap()); assert!(lines.next().is_none()); } #[test] fn lines_04() { let text = "Hello there!\nHow goes it?\nYeah!"; let r = Rope::from_str(text); let s1 = r.slice(..25); let s2 = r.slice(..26); assert_eq!(2, s1.lines().count()); assert_eq!(3, s2.lines().count()); let mut lines = s1.lines(); assert_eq!("Hello there!\n", lines.next().unwrap()); assert_eq!("How goes it?", lines.next().unwrap()); assert!(lines.next().is_none()); let mut lines = s2.lines(); assert_eq!("Hello there!\n", lines.next().unwrap()); assert_eq!("How goes it?\n", lines.next().unwrap()); assert_eq!("", lines.next().unwrap()); assert!(lines.next().is_none()); } #[test] fn lines_05() { let text = ""; let r = Rope::from_str(text); let s = r.slice(..); assert_eq!(1, r.lines().count()); assert_eq!(1, s.lines().count()); let mut lines = r.lines(); assert_eq!("", lines.next().unwrap()); assert!(lines.next().is_none()); let mut lines = s.lines(); assert_eq!("", lines.next().unwrap()); assert!(lines.next().is_none()); } #[test] fn lines_06() { let text = "a"; let r = Rope::from_str(text); let s = r.slice(..); assert_eq!(1, r.lines().count()); assert_eq!(1, s.lines().count()); let mut lines = r.lines(); assert_eq!("a", lines.next().unwrap()); assert!(lines.next().is_none()); let mut lines = s.lines(); assert_eq!("a", lines.next().unwrap()); assert!(lines.next().is_none()); } #[test] fn lines_07() { let text = "a\nb"; let r = Rope::from_str(text); let s = r.slice(..); assert_eq!(2, r.lines().count()); assert_eq!(2, s.lines().count()); let mut lines = r.lines(); assert_eq!("a\n", lines.next().unwrap()); assert_eq!("b", lines.next().unwrap()); assert!(lines.next().is_none()); let mut lines = s.lines(); assert_eq!("a\n", lines.next().unwrap()); assert_eq!("b", lines.next().unwrap()); assert!(lines.next().is_none()); } #[test] fn lines_08() { let text = "\n"; let r = Rope::from_str(text); let s = r.slice(..); assert_eq!(2, r.lines().count()); assert_eq!(2, s.lines().count()); let mut lines = r.lines(); assert_eq!("\n", lines.next().unwrap()); assert_eq!("", lines.next().unwrap()); assert!(lines.next().is_none()); let mut lines = s.lines(); assert_eq!("\n", lines.next().unwrap()); assert_eq!("", lines.next().unwrap()); assert!(lines.next().is_none()); } #[test] fn lines_09() { let text = "a\nb\n"; let r = Rope::from_str(text); let s = r.slice(..); assert_eq!(3, r.lines().count()); assert_eq!(3, s.lines().count()); let mut lines = r.lines(); assert_eq!("a\n", lines.next().unwrap()); assert_eq!("b\n", lines.next().unwrap()); assert_eq!("", lines.next().unwrap()); assert!(lines.next().is_none()); let mut lines = s.lines(); assert_eq!("a\n", lines.next().unwrap()); assert_eq!("b\n", lines.next().unwrap()); assert_eq!("", lines.next().unwrap()); assert!(lines.next().is_none()); } #[test] fn chunks_01() { let r = Rope::from_str(TEXT); let mut idx = 0; for chunk in r.chunks() { assert_eq!(chunk, &TEXT[idx..(idx + chunk.len())]); idx += chunk.len(); } } #[test] fn bytes_sliced_01() { let r = Rope::from_str(TEXT); let s_start = 116; let s_end = 331; let s_start_byte = r.char_to_byte(s_start); let s_end_byte = r.char_to_byte(s_end); let s1 = r.slice(s_start..s_end); let s2 = &TEXT[s_start_byte..s_end_byte]; for (br, bt) in s1.bytes().zip(s2.bytes()) { assert_eq!(br, bt); } } #[test] fn chars_sliced_01() { let r = Rope::from_str(TEXT); let s_start = 116; let s_end = 331; let s_start_byte = r.char_to_byte(s_start); let s_end_byte = r.char_to_byte(s_end); let s1 = r.slice(s_start..s_end); let s2 = &TEXT[s_start_byte..s_end_byte]; for (cr, ct) in s1.chars().zip(s2.chars()) { assert_eq!(cr, ct); } } #[test] fn lines_sliced_01() { let r = Rope::from_str(TEXT); let s_start = 116; let s_end = 331; let s_start_byte = r.char_to_byte(s_start); let s_end_byte = r.char_to_byte(s_end); let s1 = r.slice(s_start..s_end); let s2 = &TEXT[s_start_byte..s_end_byte]; for (liner, linet) in s1.lines().zip(s2.lines()) { assert_eq!(liner.to_string().trim_right(), linet); } } #[test] fn chunks_sliced_01() { let r = Rope::from_str(TEXT); let s_start = 116; let s_end = 331; let s_start_byte = r.char_to_byte(s_start); let s_end_byte = r.char_to_byte(s_end); let s1 = r.slice(s_start..s_end); let s2 = &TEXT[s_start_byte..s_end_byte]; let mut idx = 0; for chunk in s1.chunks() { assert_eq!(chunk, &s2[idx..(idx + chunk.len())]); idx += chunk.len(); } } }
//! A simple implementation of an in-memory files-sytem written in Rust using the BTreeMap //! data-structure. //! //! This code is inspired from https://github.com/bparli/bpfs and was modified to work //! with node-replication for benchmarking. use std::collections::BTreeMap; use std::ffi::OsStr; use std::iter; use fuse::{ Filesystem, ReplyAttr, ReplyCreate, ReplyData, ReplyDirectory, ReplyEmpty, ReplyEntry, ReplyOpen, ReplyWrite, Request, }; use libc::{c_int, EEXIST, EINVAL, ENOENT, ENOTEMPTY}; use time::Timespec; use log::{debug, error, info, trace}; const TTL: Timespec = Timespec { sec: 1, nsec: 0 }; pub type InodeId = u64; // Re-export reused structs from fuse: pub use fuse::FileAttr; pub use fuse::FileType; #[derive(Debug, Eq, PartialEq, Clone, Copy)] pub enum Error { NoEntry, NotEmpty, AlreadyExists, ParentNotFound, ChildNotFound, NewParentNotFound, InvalidInput, } /// Converts FS Errors to FUSE compatible libc errno types. impl Into<c_int> for Error { fn into(self) -> c_int { match self { Error::NoEntry => ENOENT, Error::NotEmpty => ENOTEMPTY, Error::AlreadyExists => EEXIST, Error::InvalidInput => EINVAL, Error::ParentNotFound => ENOENT, Error::NewParentNotFound => EINVAL, Error::ChildNotFound => ENOENT, } } } #[derive(Debug, Eq, PartialEq, Copy, Clone)] pub struct SetAttrRequest { pub mode: Option<u32>, pub uid: Option<u32>, pub gid: Option<u32>, pub size: Option<u64>, pub atime: Option<Timespec>, pub mtime: Option<Timespec>, pub fh: Option<u64>, pub crtime: Option<Timespec>, pub chgtime: Option<Timespec>, pub bkuptime: Option<Timespec>, pub flags: Option<u32>, } #[derive(Clone, Debug)] pub struct MemFile { bytes: Vec<u8>, } impl MemFile { pub fn new() -> MemFile { MemFile { bytes: Vec::new() } } fn size(&self) -> u64 { self.bytes.len() as u64 } fn update(&mut self, new_bytes: &[u8], offset: i64) -> u64 { let offset: usize = offset as usize; if offset >= self.bytes.len() { // extend with zeroes until we are at least at offset self.bytes .extend(iter::repeat(0).take(offset - self.bytes.len())); } if offset + new_bytes.len() > self.bytes.len() { self.bytes.splice(offset.., new_bytes.iter().cloned()); } else { self.bytes .splice(offset..offset + new_bytes.len(), new_bytes.iter().cloned()); } debug!( "update(): len of new bytes is {}, total len is {}, offset was {}", new_bytes.len(), self.size(), offset ); new_bytes.len() as u64 } fn truncate(&mut self, size: u64) { self.bytes.truncate(size as usize); } } #[derive(Debug, Clone)] pub struct Inode { name: String, children: BTreeMap<String, u64>, parent: u64, } impl Inode { fn new(name: String, parent: u64) -> Inode { Inode { name: name, children: BTreeMap::new(), parent: parent, } } } pub struct MemFilesystem { files: BTreeMap<u64, MemFile>, attrs: BTreeMap<u64, FileAttr>, inodes: BTreeMap<u64, Inode>, next_inode: u64, } impl MemFilesystem { pub fn new() -> MemFilesystem { let files = BTreeMap::new(); let root = Inode::new("/".to_string(), 1 as u64); let mut attrs = BTreeMap::new(); let mut inodes = BTreeMap::new(); let ts = time::now().to_timespec(); let attr = FileAttr { ino: 1, size: 0, blocks: 0, atime: ts, mtime: ts, ctime: ts, crtime: ts, kind: FileType::Directory, perm: 0o777, nlink: 0, uid: 0, gid: 0, rdev: 0, flags: 0, }; attrs.insert(1, attr); inodes.insert(1, root); MemFilesystem { files: files, attrs: attrs, inodes: inodes, next_inode: 2, } } /// Generates inode numbers. fn get_next_ino(&mut self) -> u64 { self.next_inode += 1; self.next_inode } pub fn getattr(&mut self, ino: u64) -> Result<&FileAttr, Error> { debug!("getattr(ino={})", ino); self.attrs.get(&ino).ok_or(Error::NoEntry) } /// Updates the attributes on an inode with values in `new_attrs`. pub fn setattr(&mut self, ino: u64, new_attrs: SetAttrRequest) -> Result<&FileAttr, Error> { debug!("setattr(ino={}, new_attrs={:?})", ino, new_attrs); let mut file_attrs = self.attrs.get_mut(&ino).ok_or(Error::NoEntry)?; // This should be first, so if we don't find the file don't update other attributes: match new_attrs.size { Some(new_size) => { let memfile = self.files.get_mut(&ino).ok_or(Error::NoEntry)?; memfile.truncate(new_size); file_attrs.size = new_size; } _ => (), } new_attrs.uid.map(|new_uid| file_attrs.uid = new_uid); new_attrs.gid.map(|new_gid| file_attrs.gid = new_gid); new_attrs .atime .map(|new_atime| file_attrs.atime = new_atime); new_attrs .mtime .map(|new_mtime| file_attrs.mtime = new_mtime); new_attrs .crtime .map(|new_crtime| file_attrs.crtime = new_crtime); Ok(file_attrs) } pub fn readdir( &mut self, ino: InodeId, _fh: u64, ) -> Result<Vec<(InodeId, FileType, String)>, Error> { debug!("readdir(ino={}, fh={})", ino, _fh); let mut entries: Vec<(u64, FileType, String)> = Vec::with_capacity(32); entries.push((ino, FileType::Directory, String::from("."))); self.inodes.get(&ino).map_or(Err(Error::NoEntry), |inode| { entries.push((inode.parent, FileType::Directory, String::from(".."))); for (child, child_ino) in inode.children.iter() { let child_attrs = &self.attrs.get(child_ino).unwrap(); trace!("\t inode={}, child={}", child_ino, child); entries.push((child_attrs.ino, child_attrs.kind, String::from(child))); } Ok(entries) }) } pub fn lookup(&mut self, parent: u64, name: &OsStr) -> Result<&FileAttr, Error> { let name_str = name.to_str().unwrap(); debug!("lookup(parent={}, name={})", parent, name_str); let parent_inode = self.inodes.get(&parent).ok_or(Error::NoEntry)?; let inode = parent_inode.children.get(name_str).ok_or(Error::NoEntry)?; self.attrs.get(inode).ok_or(Error::NoEntry) } pub fn rmdir(&mut self, parent: u64, name: &OsStr) -> Result<(), Error> { let name_str = name.to_str().unwrap(); debug!("rmdir(parent={}, name={})", parent, name_str); let parent_inode: &Inode = self.inodes.get(&parent).ok_or(Error::ParentNotFound)?; let rmdir_inode_id: InodeId = *parent_inode.children.get(name_str).ok_or(Error::NoEntry)?; let dir = self.inodes.get(&rmdir_inode_id).ok_or(Error::NoEntry)?; if dir.children.is_empty() { self.attrs.remove(&rmdir_inode_id); let parent_inode: &mut Inode = self.inodes.get_mut(&parent).ok_or(Error::ParentNotFound)?; parent_inode .children .remove(&name.to_str().unwrap().to_string()); self.inodes.remove(&rmdir_inode_id); Ok(()) } else { Err(Error::NotEmpty) } } pub fn mkdir(&mut self, parent: u64, name: &OsStr, _mode: u32) -> Result<&FileAttr, Error> { let name_str = name.to_str().unwrap(); debug!("mkdir(parent={}, name={})", parent, name_str); let new_inode_nr = self.get_next_ino(); let parent_ino = self.inodes.get_mut(&parent).ok_or(Error::ParentNotFound)?; if !parent_ino.children.contains_key(name_str) { let ts = time::now().to_timespec(); let attr = FileAttr { ino: new_inode_nr, size: 0, blocks: 0, atime: ts, mtime: ts, ctime: ts, crtime: ts, kind: FileType::Directory, perm: 0o644, nlink: 0, uid: 0, gid: 0, rdev: 0, flags: 0, }; parent_ino .children .insert(name_str.to_string(), new_inode_nr); self.attrs.insert(new_inode_nr, attr); self.inodes .insert(new_inode_nr, Inode::new(name_str.to_string(), parent)); let stored_attr = self .attrs .get(&new_inode_nr) .expect("Shouldn't fail we just inserted it"); Ok(stored_attr) } else { // A child with the given name already exists Err(Error::AlreadyExists) } } pub fn unlink(&mut self, parent: u64, name: &OsStr) -> Result<(), Error> { let name_str = name.to_str().unwrap(); debug!("unlink(parent={}, name={})", parent, name_str); let parent_ino = self.inodes.get_mut(&parent).ok_or(Error::ParentNotFound)?; trace!("parent is {} for name={}", parent_ino.name, name_str); let old_ino = parent_ino .children .remove(&name_str.to_string()) .ok_or(Error::NoEntry)?; let attr = self .attrs .remove(&old_ino) .expect("Inode needs to be in `attrs`."); if attr.kind == FileType::RegularFile { self.files .remove(&old_ino) .expect("Regular file inode needs to be in `files`."); } self.inodes .remove(&old_ino) .expect("Child inode (to be unlinked) needs to in `inodes`."); Ok(()) } pub fn create( &mut self, parent: u64, name: &OsStr, mode: u32, flags: u32, ) -> Result<&FileAttr, Error> { let name_str = name.to_str().unwrap(); debug!( "create(parent={}, name={}, mode={}, flags={})", parent, name_str, mode, flags, ); let new_inode_nr = self.get_next_ino(); // TODO: should only generate it when we're sure it'll succeed. let parent_ino = self.inodes.get_mut(&parent).ok_or(Error::ParentNotFound)?; match parent_ino.children.get_mut(&name_str.to_string()) { Some(child_ino) => { let attrs = self .attrs .get(&child_ino) .expect("Existing child inode needs to be in `attrs`."); Ok(attrs) } None => { trace!( "create file not found( parent={}, name={})", parent, name_str ); let ts = time::now().to_timespec(); self.attrs.insert( new_inode_nr, FileAttr { ino: new_inode_nr, size: 0, blocks: 0, atime: ts, mtime: ts, ctime: ts, crtime: ts, kind: FileType::RegularFile, perm: 0o644, nlink: 0, uid: 0, gid: 0, rdev: 0, flags: 0, }, ); self.files.insert(new_inode_nr, MemFile::new()); parent_ino .children .insert(name_str.to_string(), new_inode_nr); self.inodes .insert(new_inode_nr, Inode::new(name_str.to_string(), parent)); let stored_attr = self .attrs .get(&new_inode_nr) .expect("Shouldn't fail we just inserted it."); Ok(stored_attr) } } } pub fn write( &mut self, ino: u64, fh: u64, offset: i64, data: &[u8], _flags: u32, ) -> Result<u64, Error> { debug!("write(ino={}, fh={}, offset={})", ino, fh, offset); let ts = time::now().to_timespec(); let fp = self.files.get_mut(&ino).ok_or(Error::NoEntry)?; let attr = self .attrs .get_mut(&ino) .expect("Need to have attrs for file if it exists"); let size = fp.update(data, offset); // Update file attributes attr.atime = ts; attr.mtime = ts; attr.size = fp.size(); trace!( "write done(ino={}, wrote={}, offset={}, new size={})", ino, size, offset, fp.size() ); Ok(size) } pub fn read(&mut self, ino: u64, fh: u64, offset: i64, size: u32) -> Result<&[u8], Error> { debug!( "read(ino={}, fh={}, offset={}, size={})", ino, fh, offset, size ); let fp = self.files.get_mut(&ino).ok_or(Error::NoEntry)?; let attr = self .attrs .get_mut(&ino) .expect("Need to have attrs for file if it exists"); attr.atime = time::now().to_timespec(); Ok(&fp.bytes[offset as usize..]) } /// Rename a file. pub fn rename( &mut self, parent: u64, current_name: &OsStr, new_parent: u64, new_name: &OsStr, ) -> Result<(), Error> { let current_name_str = current_name.to_str().unwrap(); let new_name_str = new_name.to_str().unwrap(); debug!( "rename(parent={}, current_name={}, new_parent={}, new_name={})", parent, current_name_str, new_parent, new_name_str ); let child_ino = { let parent_ino = self.inodes.get_mut(&parent).ok_or(Error::ParentNotFound)?; parent_ino .children .remove(&current_name_str.to_string()) .ok_or(Error::ChildNotFound)? }; let new_parent_ino = self .inodes .get_mut(&new_parent) .ok_or(Error::NewParentNotFound)?; new_parent_ino .children .insert(new_name_str.to_string(), child_ino); Ok(()) } } impl Default for MemFilesystem { fn default() -> Self { MemFilesystem::new() } } impl Filesystem for MemFilesystem { fn getattr(&mut self, _req: &Request, ino: u64, reply: ReplyAttr) { match self.getattr(ino) { Ok(attr) => { info!("getattr reply with attrs = {:?}", attr); reply.attr(&TTL, attr) } Err(e) => { error!("getattr reply with errno = {:?}", e); reply.error(e.into()) } } } fn setattr( &mut self, _req: &Request, ino: u64, mode: Option<u32>, uid: Option<u32>, gid: Option<u32>, size: Option<u64>, atime: Option<Timespec>, mtime: Option<Timespec>, fh: Option<u64>, crtime: Option<Timespec>, chgtime: Option<Timespec>, bkuptime: Option<Timespec>, flags: Option<u32>, reply: ReplyAttr, ) { let new_attrs = SetAttrRequest { mode, uid, gid, size, atime, mtime, fh, crtime, chgtime, bkuptime, flags, }; let r = self.setattr(ino, new_attrs); match r { Ok(fattrs) => { reply.attr(&TTL, fattrs); } Err(e) => reply.error(e.into()), }; } fn readdir( &mut self, _req: &Request, ino: u64, fh: u64, offset: i64, mut reply: ReplyDirectory, ) { match self.readdir(ino, fh) { Ok(entries) => { // Offset of 0 means no offset. // Non-zero offset means the passed offset has already been seen, // and we should start after it. let to_skip = if offset == 0 { 0 } else { offset + 1 } as usize; for (i, entry) in entries.into_iter().enumerate().skip(to_skip) { reply.add(entry.0, i as i64, entry.1, entry.2); } reply.ok(); } Err(e) => reply.error(e.into()), }; } fn lookup(&mut self, _req: &Request, parent: u64, name: &OsStr, reply: ReplyEntry) { match self.lookup(parent, name) { Ok(attr) => { reply.entry(&TTL, attr, 0); } Err(e) => { reply.error(e.into()); } } } fn rmdir(&mut self, _req: &Request, parent: u64, name: &OsStr, reply: ReplyEmpty) { match self.rmdir(parent, name) { Ok(()) => reply.ok(), Err(e) => reply.error(e.into()), } } fn mkdir(&mut self, _req: &Request, parent: u64, name: &OsStr, mode: u32, reply: ReplyEntry) { match self.mkdir(parent, name, mode) { Ok(attr) => reply.entry(&TTL, &attr, 0), Err(e) => reply.error(e.into()), } } fn open(&mut self, _req: &Request, _ino: u64, flags: u32, reply: ReplyOpen) { trace!("open(ino={}, _flags={})", _ino, flags); reply.opened(0, 0); } fn unlink(&mut self, _req: &Request, parent: u64, name: &OsStr, reply: ReplyEmpty) { match self.unlink(parent, name) { Ok(()) => reply.ok(), Err(e) => reply.error(e.into()), } } fn create( &mut self, _req: &Request, parent: u64, name: &OsStr, mode: u32, flags: u32, reply: ReplyCreate, ) { match self.create(parent, name, mode, flags) { Ok(attr) => reply.created(&TTL, attr, 0, 0, 0), Err(e) => reply.error(e.into()), } } fn write( &mut self, _req: &Request, ino: u64, fh: u64, offset: i64, data: &[u8], flags: u32, reply: ReplyWrite, ) { match self.write(ino, fh, offset, data, flags) { Ok(bytes_written) => reply.written(bytes_written as u32), Err(e) => reply.error(e.into()), } } fn read( &mut self, _req: &Request, ino: u64, fh: u64, offset: i64, size: u32, reply: ReplyData, ) { match self.read(ino, fh, offset, size) { Ok(slice) => reply.data(slice), Err(e) => reply.error(e.into()), } } fn rename( &mut self, _req: &Request, parent: u64, name: &OsStr, newparent: u64, newname: &OsStr, reply: ReplyEmpty, ) { match self.rename(parent, name, newparent, newname) { Ok(()) => reply.ok(), Err(e) => reply.error(e.into()), } } } #[cfg(test)] mod test { use super::*; #[test] fn memfs_update() { let mut f = MemFile::new(); f.update(&[0, 1, 2, 3, 4, 5, 6, 7, 8], 0); assert_eq!(f.size(), 9); f.update(&[0, 0], 0); assert_eq!(f.size(), 9); assert_eq!(f.bytes, &[0, 0, 2, 3, 4, 5, 6, 7, 8]); f.update(&[1, 1], 8); assert_eq!(f.bytes, &[0, 0, 2, 3, 4, 5, 6, 7, 1, 1]); assert_eq!(f.size(), 10); f.update(&[2, 2], 13); assert_eq!(f.bytes, &[0, 0, 2, 3, 4, 5, 6, 7, 1, 1, 0, 0, 0, 2, 2]); assert_eq!(f.size(), 15); } }
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![no_std] #![feature(coerce_unsized)] #![feature(unsize)] use core::fmt; use core::option::Option; use core::marker::{PhantomData, Send, Sized, Sync}; use core::mem; use core::ptr::NonNull; /// A wrapper around a raw non-null `*mut T` that indicates that the possessor /// of this wrapper owns the referent. Useful for building abstractions like /// `Box<T>`, `Vec<T>`, `String`, and `HashMap<K, V>`. /// /// Unlike `*mut T`, `Unique<T>` behaves "as if" it were an instance of `T`. /// It implements `Send`/`Sync` if `T` is `Send`/`Sync`. It also implies /// the kind of strong aliasing guarantees an instance of `T` can expect: /// the referent of the pointer should not be modified without a unique path to /// its owning Unique. /// /// If you're uncertain of whether it's correct to use `Unique` for your purposes, /// consider using `Shared`, which has weaker semantics. /// /// Unlike `*mut T`, the pointer must always be non-null, even if the pointer /// is never dereferenced. This is so that enums may use this forbidden value /// as a discriminant -- `Option<Unique<T>>` has the same size as `Unique<T>`. /// However the pointer may still dangle if it isn't dereferenced. /// /// Unlike `*mut T`, `Unique<T>` is covariant over `T`. This should always be correct /// for any type which upholds Unique's aliasing requirements. #[allow(missing_debug_implementations)] pub struct Unique<T: ?Sized> { ptr: NonNull<T>, // NOTE: this marker has no consequences for variance, but is necessary // for dropck to understand that we logically own a `T`. // // For details, see: // https://github.com/rust-lang/rfcs/blob/master/text/0769-sound-generic-drop.md#phantom-data _marker: PhantomData<T>, } impl<T: ?Sized> Clone for Unique<T> { #[inline(always)] fn clone(&self) -> Self { *self } } impl<T: ?Sized> Copy for Unique<T> {} /// `Unique` pointers are `Send` if `T` is `Send` because the data they /// reference is unaliased. Note that this aliasing invariant is /// unenforced by the type system; the abstraction using the /// `Unique` must enforce it. unsafe impl<T: Send + ?Sized> Send for Unique<T> { } /// `Unique` pointers are `Sync` if `T` is `Sync` because the data they /// reference is unaliased. Note that this aliasing invariant is /// unenforced by the type system; the abstraction using the /// `Unique` must enforce it. unsafe impl<T: Sync + ?Sized> Sync for Unique<T> { } impl<T: Sized> Unique<T> { /// Creates a new `Unique` that is dangling, but well-aligned. /// /// This is useful for initializing types which lazily allocate, like /// `Vec::new` does. pub const fn empty() -> Self { unsafe { Self::new_unchecked(mem::align_of::<T>() as *mut T) } } } impl<T: ?Sized> Unique<T> { /// Creates a new `Unique`. /// /// # Safety /// /// `ptr` must be non-null. pub const unsafe fn new_unchecked(ptr: *mut T) -> Self { Unique { ptr: NonNull::new_unchecked(ptr), _marker: PhantomData } } /// Creates a new `Unique` if `ptr` is non-null. pub fn new(ptr: *mut T) -> Option<Self> { NonNull::new(ptr).map(Self::from) } /// Acquires the underlying `*mut` pointer. pub const fn as_ptr(self) -> NonNull<T> { self.ptr } /// Dereferences the content. /// /// The resulting lifetime is bound to self so this behaves "as if" /// it were actually an instance of T that is getting borrowed. If a longer /// (unbound) lifetime is needed, use `&*my_ptr.ptr()`. pub unsafe fn as_ref(&self) -> &T { self.ptr.as_ref() } /// Mutably dereferences the content. /// /// The resulting lifetime is bound to self so this behaves "as if" /// it were actually an instance of T that is getting borrowed. If a longer /// (unbound) lifetime is needed, use `&mut *my_ptr.ptr()`. pub unsafe fn as_mut(&mut self) -> &mut T { self.ptr.as_mut() } } impl<T: ?Sized> fmt::Pointer for Unique<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Pointer::fmt(&self.ptr, f) } } impl<'a, T: ?Sized> From<&'a mut T> for Unique<T> { fn from(reference: &'a mut T) -> Self { Unique { ptr: reference.into(), _marker: PhantomData } } } impl<'a, T: ?Sized> From<&'a T> for Unique<T> { fn from(reference: &'a T) -> Self { Unique { ptr: reference.into(), _marker: PhantomData } } } impl<T: ?Sized> From<NonNull<T>> for Unique<T> { fn from(ptr: NonNull<T>) -> Self { Self { ptr, _marker: PhantomData } } } /// `*mut T` but non-zero and covariant. /// /// This is often the correct thing to use when building data structures using /// raw pointers, but is ultimately more dangerous to use because of its additional /// properties. If you're not sure if you should use `Shared<T>`, just use `*mut T`! /// /// Unlike `*mut T`, the pointer must always be non-null, even if the pointer /// is never dereferenced. This is so that enums may use this forbidden value /// as a discriminant -- `Option<Shared<T>>` has the same size as `Shared<T>`. /// However the pointer may still dangle if it isn't dereferenced. /// /// Unlike `*mut T`, `Shared<T>` is covariant over `T`. If this is incorrect /// for your use case, you should include some PhantomData in your type to /// provide invariance, such as `PhantomData<Cell<T>>` or `PhantomData<&'a mut T>`. /// Usually this won't be necessary; covariance is correct for most safe abstractions, /// such as Box, Rc, Arc, Vec, and LinkedList. This is the case because they /// provide a public API that follows the normal shared XOR mutable rules of Rust. #[allow(missing_debug_implementations)] pub struct Shared<T: ?Sized> { ptr: NonNull<T>, } impl<T: Sized> Shared<T> { /// Creates a new `Shared` that is dangling, but well-aligned. /// /// This is useful for initializing types which lazily allocate, like /// `Vec::new` does. pub const fn empty() -> Self { unsafe { Self::new_unchecked(mem::align_of::<T>() as *mut T) } } } impl<T: ?Sized> Shared<T> { /// Creates a new `Shared`. /// /// # Safety /// /// `ptr` must be non-null. pub const unsafe fn new_unchecked(ptr: *mut T) -> Self { Self { ptr: NonNull::new_unchecked(ptr) } } /// Creates a new `Shared` if `ptr` is non-null. pub fn new(ptr: *mut T) -> Option<Self> { NonNull::new(ptr).map(Self::from) } /// Acquires the underlying `*mut` pointer. pub const fn as_ptr(self) -> NonNull<T> { self.ptr } /// Dereferences the content. /// /// The resulting lifetime is bound to self so this behaves "as if" /// it were actually an instance of T that is getting borrowed. If a longer /// (unbound) lifetime is needed, use `&*my_ptr.ptr()`. pub unsafe fn as_ref(&self) -> &T { self.ptr.as_ref() } /// Mutably dereferences the content. /// /// The resulting lifetime is bound to self so this behaves "as if" /// it were actually an instance of T that is getting borrowed. If a longer /// (unbound) lifetime is needed, use `&mut *my_ptr.ptr_mut()`. pub unsafe fn as_mut(&mut self) -> &mut T { self.ptr.as_mut() } } impl<T: ?Sized> Clone for Shared<T> { #[inline(always)] fn clone(&self) -> Self { *self } } impl<T: ?Sized> Copy for Shared<T> {} impl<T: ?Sized> fmt::Pointer for Shared<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Pointer::fmt(&self.as_ptr(), f) } } impl<T: ?Sized> From<Unique<T>> for Shared<T> { fn from(unique: Unique<T>) -> Self { Shared { ptr: unique.ptr } } } impl<'a, T: ?Sized> From<&'a mut T> for Shared<T> { fn from(reference: &'a mut T) -> Self { Shared { ptr: reference.into() } } } impl<'a, T: ?Sized> From<&'a T> for Shared<T> { fn from(reference: &'a T) -> Self { Shared { ptr: reference.into() } } } impl<T: ?Sized> From<NonNull<T>> for Shared<T> { fn from(ptr: NonNull<T>) -> Self { Self { ptr } } } impl<S: ?Sized + core::marker::Unsize<T>, T: ?Sized> core::ops::CoerceUnsized<Unique<T>> for Unique<S> {} impl<S: ?Sized + core::marker::Unsize<T>, T: ?Sized> core::ops::CoerceUnsized<Shared<T>> for Shared<S> {}
#[doc = "Reader of register UARTPCELLID1"] pub type R = crate::R<u32, super::UARTPCELLID1>; #[doc = "Reader of field `UARTPCELLID1`"] pub type UARTPCELLID1_R = crate::R<u8, u8>; impl R { #[doc = "Bits 0:7 - These bits read back as 0xF0"] #[inline(always)] pub fn uartpcellid1(&self) -> UARTPCELLID1_R { UARTPCELLID1_R::new((self.bits & 0xff) as u8) } }
use std::sync::{Arc, Mutex, MutexGuard}; use raw::vp::{self, VPInstance}; use globals::{GLOBALS, Globals}; use instance::Instance; use std::os::raw::{c_int, c_void}; macro_rules! generate_callback { ($this_callback_name:expr, $current_callback_name:expr, $current_instance:expr, $activate:expr) => {{ if $this_callback_name == $current_callback_name { debug!("generate_callback({:?}, {:?}, ..., {:?})", $this_callback_name, $current_callback_name, $activate); extern "C" fn callback(instance: VPInstance, arg1: c_int, arg2: c_int) { debug!("Inside native vp callback, this_callback_name: {:?}", $this_callback_name); if $this_callback_name == 11 { debug!("WORLD: {:?}", unsafe { ::std::ffi::CStr::from_ptr(vp::string(instance, vp::WORLD_NAME)) }); } let globals = GLOBALS.lock().unwrap(); let maybe_closure = globals.instances.get(&(instance as usize)).and_then(|i| i.vp_callback_closures.get(&$this_callback_name)).map(|callback| callback.clone()); match maybe_closure { Some(closure) => { drop(globals); closure(instance, arg1, arg2); }, None => { debug!("Attempted to call closure not present!") } } debug!("Done calling native vp callback"); } unsafe { if $activate { debug!("Calling vp_callback_set!"); vp::callback_set($current_instance, $current_callback_name, Some(callback)); } else { vp::callback_set($current_instance, $current_callback_name, None); } } return; } }} } macro_rules! generate_event { ($this_event_name:expr, $current_event_name:expr, $current_instance:expr, $activate:expr) => {{ if $this_event_name == $current_event_name { extern "C" fn event_handler(instance: VPInstance) { let globals = GLOBALS.lock().unwrap(); let maybe_closure = globals.instances.get(&(instance as usize)).and_then(|i| i.vp_event_closures.get(&$this_event_name)).map(|handler| handler.clone()); match maybe_closure { Some(closure) => { drop(globals); closure(instance) }, None => () } } unsafe { if $activate { vp::event_set($current_instance, $current_event_name, Some(event_handler)); } else { vp::event_set($current_instance, $current_event_name, None); } } return; } }} } pub fn activate_callback(vp: VPInstance, callback_name: vp::callback_t, activate: bool) { generate_callback!(vp::CALLBACK_OBJECT_ADD, callback_name, vp, activate); generate_callback!(vp::CALLBACK_OBJECT_CHANGE, callback_name, vp, activate); generate_callback!(vp::CALLBACK_OBJECT_DELETE, callback_name, vp, activate); generate_callback!(vp::CALLBACK_GET_FRIENDS, callback_name, vp, activate); generate_callback!(vp::CALLBACK_FRIEND_ADD, callback_name, vp, activate); generate_callback!(vp::CALLBACK_FRIEND_DELETE, callback_name, vp, activate); generate_callback!(vp::CALLBACK_TERRAIN_QUERY, callback_name, vp, activate); generate_callback!(vp::CALLBACK_TERRAIN_NODE_SET, callback_name, vp, activate); generate_callback!(vp::CALLBACK_OBJECT_GET, callback_name, vp, activate); generate_callback!(vp::CALLBACK_OBJECT_LOAD, callback_name, vp, activate); generate_callback!(vp::CALLBACK_LOGIN, callback_name, vp, activate); generate_callback!(vp::CALLBACK_ENTER, callback_name, vp, activate); generate_callback!(vp::CALLBACK_JOIN, callback_name, vp, activate); generate_callback!(vp::CALLBACK_CONNECT_UNIVERSE, callback_name, vp, activate); generate_callback!(vp::CALLBACK_WORLD_PERMISSION_USER_SET, callback_name, vp, activate); generate_callback!(vp::CALLBACK_WORLD_PERMISSION_SESSION_SET, callback_name, vp, activate); generate_callback!(vp::CALLBACK_WORLD_SETTING_SET, callback_name, vp, activate); } pub fn activate_event(vp: VPInstance, event_name: vp::event_t, activate:bool) { generate_event!(vp::EVENT_CHAT, event_name, vp, activate); generate_event!(vp::EVENT_AVATAR_ADD, event_name, vp, activate); generate_event!(vp::EVENT_AVATAR_CHANGE, event_name, vp, activate); generate_event!(vp::EVENT_AVATAR_DELETE, event_name, vp, activate); generate_event!(vp::EVENT_OBJECT, event_name, vp, activate); generate_event!(vp::EVENT_OBJECT_CHANGE, event_name, vp, activate); generate_event!(vp::EVENT_OBJECT_DELETE, event_name, vp, activate); generate_event!(vp::EVENT_OBJECT_CLICK, event_name, vp, activate); generate_event!(vp::EVENT_WORLD_LIST, event_name, vp, activate); generate_event!(vp::EVENT_WORLD_SETTING, event_name, vp, activate); generate_event!(vp::EVENT_WORLD_SETTINGS_CHANGED, event_name, vp, activate); generate_event!(vp::EVENT_FRIEND, event_name, vp, activate); generate_event!(vp::EVENT_WORLD_DISCONNECT, event_name, vp, activate); generate_event!(vp::EVENT_UNIVERSE_DISCONNECT, event_name, vp, activate); generate_event!(vp::EVENT_USER_ATTRIBUTES, event_name, vp, activate); generate_event!(vp::EVENT_CELL_END, event_name, vp, activate); generate_event!(vp::EVENT_TERRAIN_NODE, event_name, vp, activate); generate_event!(vp::EVENT_AVATAR_CLICK, event_name, vp, activate); generate_event!(vp::EVENT_TELEPORT, event_name, vp, activate); generate_event!(vp::EVENT_URL, event_name, vp, activate); generate_event!(vp::EVENT_OBJECT_BUMP_BEGIN, event_name, vp, activate); generate_event!(vp::EVENT_OBJECT_BUMP_END, event_name, vp, activate); generate_event!(vp::EVENT_TERRAIN_NODE_CHANGED, event_name, vp, activate); generate_event!(vp::EVENT_JOIN, event_name, vp, activate); } pub fn callback_closure_set(instance: &mut Instance, callback_name: vp::callback_t, closure: Option<Arc<Box<Fn(VPInstance, c_int, c_int)+'static>>>) { debug!("callback_closure_set({:?}, {:?}, {:?})", instance.vp, callback_name, closure.as_ref().map(|_| Some(()))); if let Some(closure) = closure { instance.vp_callback_closures.insert(callback_name, closure); activate_callback(instance.vp, callback_name, true); } else { activate_callback(instance.vp, callback_name, false); instance.vp_callback_closures.remove(&callback_name); } } pub fn event_closure_set(instance: &mut Instance, event_name: vp::event_t, closure: Option<Arc<Box<Fn(VPInstance)+'static>>>) { if let Some(closure) = closure { instance.vp_event_closures.insert(event_name, closure); activate_event(instance.vp, event_name, true); } else { activate_event(instance.vp, event_name, false); instance.vp_event_closures.remove(&event_name); } } pub fn callback_closure_set_all<F: Fn(VPInstance, c_int, c_int)+'static>(callback_name: vp::callback_t, closure: Option<F>) { debug!("callback_closure_set_all({:?}, {:?})", callback_name, closure.as_ref().map(|_| Some(()))); let closure = closure.map(|c| Arc::new(Box::new(c) as Box<Fn(VPInstance, c_int, c_int)+'static>)); let mut globals = GLOBALS.lock().unwrap(); for instance in globals.instances.values_mut() { let closure_clone = closure.as_ref().map(|c| c.clone()); callback_closure_set(instance, callback_name, closure_clone); } match closure { Some(closure) => { globals.vp_callback_closures.insert(callback_name, closure); }, None => { globals.vp_callback_closures.remove(&callback_name); } } } pub fn call_callback_closure<'a>(globals: MutexGuard<'a, Globals>, callback_name: vp::callback_t, rc: c_int) -> c_int { let maybe_closure = try_rc!(globals.current_instance()).vp_callback_closures.get(&callback_name).map(|closure| closure.clone()); match maybe_closure { Some(closure) => { let instance = globals.current as *mut c_void; drop(globals); closure(instance, rc, 0); 0 }, None => { rc } } } pub fn event_closure_set_all<F: Fn(VPInstance)+'static>(event_name: vp::callback_t, closure: Option<F>) { let closure = closure.map(|c| Arc::new(Box::new(c) as Box<Fn(VPInstance)+'static>)); let mut globals = GLOBALS.lock().unwrap(); for instance in globals.instances.values_mut() { let closure_clone = closure.as_ref().map(|c| c.clone()); event_closure_set(instance, event_name, closure_clone); } match closure { Some(closure) => { globals.vp_event_closures.insert(event_name, closure); }, None => { globals.vp_event_closures.remove(&event_name); } } }
use fluentbit::*; use rmpv; use serde_json; #[derive(Default)] struct JsonExample {} impl FLBPluginMethods for JsonExample { fn plugin_register(&mut self, info: &mut PluginInfo) -> FLBResult { info.name = "rustout".into(); info.description = "This is a default description".into(); Ok(()) } fn plugin_init(&mut self, plugin: &FLBPlugin) -> FLBResult { println!("default init"); let param = plugin .config_param("params") .map_err(|_| FLBError::FLB_ERROR)?; if let Some(p) = param { println!("parameter {}", p); } else { println!("no params"); } Ok(()) } fn plugin_flush(&mut self, data: &[u8], tag: &str) -> FLBResult { let mut value = data.clone(); let value: rmpv::Value = rmpv::decode::value::read_value(&mut value).unwrap(); let json = serde_json::to_string_pretty(&value).unwrap(); println!("tag: {} - data: {} \n", tag, json); Ok(()) } fn plugin_exit(&mut self) -> FLBResult { println!("exiting"); Ok(()) } } create_boilerplate!(JsonExample::default());
use std::ptr; impl<T> Drop for Node<T> { #[inline] fn drop(&mut self) { while self.mask != 0 { let bit = self.mask.trailing_zeros(); unsafe { ptr::read(&mut self.values[bit as usize]); } self.mask &= !(1 << bit); } unsafe { self.values.set_len(0); } } } pub struct Node<T> { mask: u64, values: Vec<T>, } impl<T> Node<T> { #[inline] pub fn new() -> Node<T> { Node { mask: 0, values: Vec::new(), } } #[allow(unused)] #[inline] pub fn mask(&self) -> u64 { self.mask } #[inline] pub fn require(&mut self, size: usize) { let size = size + 1; if self.values.len() < size { unsafe { self.values.reserve(size); self.values.set_len(size); } } } #[inline] fn contains(&self, off: u64) -> bool { let mask = 1 << off; self.mask & mask != 0 } #[inline] pub fn get(&self, off: usize) -> Option<&T> { if self.contains(off as u64) { Some(&self.values[off]) } else { None } } #[inline] pub fn get_mut(&mut self, off: usize) -> Option<&mut T> { if self.contains(off as u64) { Some(&mut self.values[off]) } else { None } } #[inline] pub fn insert(&mut self, off: usize, mut val: T) -> Option<T> { use std::mem::swap; use std::ptr::write; if self.contains(off as u64) { swap(&mut self.values[off], &mut val); Some(val) } else { self.require(off); self.mask |= 1 << off as u64; unsafe { write(&mut self.values[off], val); } None } } #[inline] pub fn delete(&mut self, off: usize) -> Option<T> { use std::ptr::read; if self.contains(off as u64) { self.mask &= !(1 << off as u64); unsafe { Some(read(&mut self.values[off])) } } else { None } } #[inline] pub fn get_mut_or_else<F>(&mut self, off: usize, insert: F) -> &mut T where F: Fn() -> T { if self.contains(off as u64) { &mut self.values[off] } else { self.insert(off, insert()); self.get_mut(off).unwrap() } } }
use std::io::Write; use std::str::FromStr; // main関数は値を返さないので、返り値の型の記述はしない fn main () { gcd(); } // コマンドライン引数からsimple_gcdを実行する関数 fn gcd () { // mutでないと値を代入できない // vectorに入れる型を指定する必要はない。推論してくれる let mut numbers = Vec::new(); // コマンドライン引数から値をベクトルに格納 // std::env::argsはイテレータを返す for arg in std::env::args().skip(1) { numbers.push(u64::from_str(&arg).expect("error parsing argument")); } // 引数がなかったら、標準エラーストリームにエラーメッセージを書き出す // unwrap:エラーメッセージの出力の成功確認 if numbers.len() == 0 { writeln!(std::io::stderr(), "Useage: gcd Number ...").unwrap(); std::process::exit(1); } // vectorのサイズは任意なので、とても大きくなる可能性がある // rustはこういった値を慎重に扱う // 所有権はnumbersにあって、ループでは借用しているだけ。 // &で要素への参照、*で参照解決 let mut d = numbers[0]; for m in &numbers[1..]{ d = simple_gcd(d, *m) } println!("The greatest commpn divisor of{:?} is {}", numbers, d); } // シンプルにユークリッドのアルゴリズムで最大公約数を求める // gcd(A, B)で、小さい方の数で大きい方を割った余りを次の数に回してどんどん割る fn simple_gcd(mut n: u64, mut m: u64) -> u64 { assert!(n != 0 && m != 0); while m != 0 { if m < n { let t = m; m = n; n = t; } m = m % n; } n } #[test] fn test_gcd() { assert_eq!(simple_gcd(14, 15), 1); assert_eq!(simple_gcd(2 * 3 * 5 * 11 * 17, 3 * 7 * 11 * 13 * 19), 3 * 11); }
extern crate proc_macro; //extern crate syn; //extern crate quote; use proc_macro::TokenStream; use std::str::FromStr; #[proc_macro] pub fn Dingirsu_Reflect(input: TokenStream) -> TokenStream { let mut original_code =input.to_string(); //original_code=original_code.replace(" fn\n", " fn"); let mut code:Vec<&str> =original_code.splitn(original_code.len()," ").collect(); let a=code.len(); let mut fn_list:Vec<&str>=Vec::new(); for index in 0..code.len() { let item=code[index]; if ((item=="fn" || code[index]=="fn\n" ) ) { let t_fn_name:Vec<&str>=code[index+1].split("(").collect(); fn_list.insert(0, Get_FunctionName(index,code.clone()).unwrap()); } } for item in &fn_list { println!("{0}",item) } let mut inject_str:String=String::new(); inject_str+="pub fn Reflect(name :&str)-> Option< fn(&solana_program::pubkey::Pubkey,&[solana_program::account_info::AccountInfo],&[u8])->solana_program::entrypoint::ProgramResult > { match name { "; for item in &fn_list { let mut t_str:String=format!( "\"{0}\"=>{{ return Some(Self::{1}); }} ",*item,*item); inject_str+=t_str.as_mut(); } inject_str+=" _=>{ return None; } } } "; let mut inject_str_mut=inject_str.as_mut(); for index in 0..code.len() { let item=code[index]; if (item=="impl" && index+3< code.len()) { code.insert(index+3, inject_str_mut); break; } } let mut res_str=String ::new(); for index in 0..code.len() { res_str+=code[index]; res_str+=" "; } println!("{0}",res_str); let res_ts:TokenStream =TokenStream::from_str(res_str.as_mut()).unwrap(); return res_ts; } fn Get_FunctionName(index:usize,code:Vec<&str>)-> Option<&str> { let mut i=index; if(code[index]=="fn" || code[index]=="fn\n") { loop { i=i+1; if (i==code.len()) { break; } let mut t_str:&str=code[i]; if( t_str.len()>0 && !(t_str.starts_with(" ")) ) { let t_fn_name:Vec<&str>=code[i].split("(").collect(); if(t_fn_name.len()>0) { let t=t_fn_name[0]; return Some(t); } } } } return None; }
//! Sx127x Integration testing //! //! Copyright 2019 Ryan Kurte use std::collections::HashMap; use std::thread; use std::time::Duration; extern crate embedded_spi; use embedded_spi::utils::{load_config, DeviceConfig}; extern crate radio_sx127x; use radio_sx127x::prelude::*; extern crate radio; use radio::{Receive, Transmit}; #[test] #[ignore] fn integration() { // Fetch configuration file name let config_file = match std::env::var("SX127x_TEST_CONFIG") { Ok(v) => v, Err(_e) => "configs/pi-ci-sx127x.toml".to_owned(), }; println!("Using configuration file: {}", config_file); // Load configurations from file let configs = load_config::<HashMap<String, DeviceConfig>>(&config_file); let config1 = configs.get("radio-0").expect("Missing radio-0 object"); let config2 = configs.get("radio-1").expect("Missing radio-1 object"); let (w1, w2) = (config1.load(), config2.load()); let mut radio1 = Sx127x::new(w1, &Config::default()).expect("error creating radio1"); let mut radio2 = Sx127x::new(w2, &Config::default()).expect("error creating radio1"); println!("Testing send/receive"); let data = &[0xaa, 0xbb, 0xcc]; // Configure receive radio1.start_receive().unwrap(); // Start transmit radio2.start_transmit(data).unwrap(); // Poll on tx and rx complete let mut sent = false; let mut received = false; let mut buff = [0u8; 1024]; let mut n = 0; let mut info = PacketInfo::default(); for _i in 0..10 { // Check TX state if radio2.check_transmit().unwrap() { println!("TX complete"); sent = true; } // Check RX state if radio1.check_receive(false).unwrap() { n = radio1.get_received(&mut info, &mut buff).unwrap(); received = true; println!("RX complete ({:?} {:?})", info, &buff[..n]); } thread::sleep(Duration::from_millis(100)); } assert!(sent, "Send not completed"); assert!(received, "Receive not completed"); assert_eq!(data, &buff[..n]); }
use std::fs; use itertools::Itertools; pub fn day1(args: &[String]) -> i32 { println!("Day 1"); if args.len() != 1 { println!("Missing input file"); return -1; } let filename = &args[0]; println!("In file {}", filename); let contents = fs::read_to_string(filename) .expect("Something went wrong reading the file"); let entries: Vec<i32> = contents.lines().map(|l| l.parse::<i32>().unwrap()).collect(); println!("Part 1: {:}", find1(&entries, 2020).unwrap()); println!("Part 2: {:}", find2(&entries, 2020).unwrap()); println!("Part 1: {:}", find(&entries, 2020, 2).unwrap()); println!("Part 2: {:}", find(&entries, 2020, 3).unwrap()); 0 } pub fn find1(entries: &Vec<i32>, sum: i32) -> Option<i32> { let mut r = None; for i in 0..entries.len()-1 { for j in i+1..entries.len() { if entries[i] + entries[j] == sum { r = Some(entries[i] * entries[j]); } } } r } pub fn find2(entries: &Vec<i32>, sum: i32) -> Option<i32> { let mut r = None; for i in 0..entries.len()-2 { for j in i+1..entries.len()-1 { for k in j+1..entries.len() { if entries[i] + entries[j] + entries[k] == sum { r = Some(entries[i] * entries[j] * entries[k]); } } } } r } pub fn find(entries: &Vec<i32>, sum: i32, count: usize) -> Option<i32> { let mut r = None; entries.iter().cloned().combinations(count).for_each(|c| { if c.iter().sum::<i32>() == sum { r = Some(c.iter().product::<i32>()); } }); r }
#![feature(async_closure)] use chrono::{Utc}; use lobby_manager::*; use serde::{Deserialize, Serialize}; mod planetwars; mod pw_maps; mod websocket; mod game_manager; mod lobby_manager; use mozaic_core::{Token}; use uuid::Uuid; use warp::{Rejection, reply::{Reply, Response, json}}; use warp::Filter; use std::{convert::Infallible, path::Path, sync::{Arc, Mutex}}; use std::collections::HashSet; use hex::FromHex; use game_manager::GameManager; const MAPS_DIRECTORY: &'static str = "maps"; fn with_game_manager( game_manager: Arc<Mutex<GameManager>>, ) -> impl Clone + Filter<Extract = (Arc<Mutex<GameManager>>,), Error = Infallible> { warp::any().map(move || game_manager.clone()) } fn with_lobby_manager( lobby_manager: Arc<Mutex<LobbyManager>>, ) -> impl Clone + Filter<Extract = (Arc<Mutex<LobbyManager>>,), Error = Infallible> { warp::any().map(move || lobby_manager.clone()) } #[derive(Serialize, Deserialize, Debug)] pub struct LobbyConfig { pub name: String, pub public: bool, } fn create_lobby( mgr: Arc<Mutex<LobbyManager>>, lobby_config: LobbyConfig, ) -> impl Reply { let mut manager = mgr.lock().unwrap(); let lobby = manager.create_lobby(lobby_config); json(&LobbyData::from(lobby.clone())) } fn list_matches( mgr: Arc<Mutex<GameManager>> ) -> impl Reply { let manager = mgr.lock().unwrap(); json(&manager.list_matches()) } fn get_match_log( match_id: String, mgr: Arc<Mutex<GameManager>>, ) -> warp::reply::Response { let manager = mgr.lock().unwrap(); match manager.get_match_data(&match_id) { None => warp::http::StatusCode::NOT_FOUND.into_response(), Some(m) => { let log = m.log.to_vec().into_iter().map(|e| { e.as_ref().to_string() }).collect::<Vec<_>>(); json(&log).into_response() } } } enum LobbyApiError { LobbyNotFound, NotAuthenticated, NotAuthorized, NameTaken, InvalidProposalParams(String), ProposalNotFound, ProposalExpired, ProposalNotReady, } type LobbyApiResult<T> = Result<T, LobbyApiError>; fn result_to_response<T>(result: LobbyApiResult<T>) -> Response where T: warp::Reply { match result { Ok(reply) => reply.into_response(), Err(LobbyApiError::LobbyNotFound) => warp::http::StatusCode::NOT_FOUND.into_response(), Err(LobbyApiError::NotAuthenticated) => warp::http::StatusCode::BAD_REQUEST.into_response(), Err(LobbyApiError::NotAuthorized) => warp::http::StatusCode::UNAUTHORIZED.into_response(), Err(LobbyApiError::NameTaken) => warp::reply::with_status( "name is not available", warp::http::StatusCode::BAD_REQUEST, ).into_response(), Err(LobbyApiError::InvalidProposalParams(msg)) => warp::reply::with_status( msg, warp::http::StatusCode::BAD_REQUEST ).into_response(), Err(LobbyApiError::ProposalNotFound) => warp::http::StatusCode::NOT_FOUND.into_response(), Err(LobbyApiError::ProposalExpired) => warp::reply::with_status( "proposal is no longer valid", warp::http::StatusCode::BAD_REQUEST, ).into_response(), Err(LobbyApiError::ProposalNotReady) => warp::reply::with_status( "not all players have accepted", warp::http::StatusCode::BAD_REQUEST, ).into_response(), } } fn json_response<T>(result: LobbyApiResult<T>) -> Response where T: Serialize { let res = result.map(|value| json(&value)); return result_to_response(res); } struct LobbyRequestCtx { lobby_mgr: Arc<Mutex<LobbyManager>>, lobby_id: String, auth_header: Option<String>, } impl LobbyRequestCtx { fn with_lobby<F, T>(&self, fun: F) -> Result<T, LobbyApiError> where F: FnOnce(&mut Lobby) -> Result<T, LobbyApiError> { let lobby_id = self.lobby_id.to_lowercase(); let mut mgr = self.lobby_mgr.lock().unwrap(); let res = mgr.lobbies.get_mut(&lobby_id) .ok_or(LobbyApiError::LobbyNotFound) .and_then(fun); return res; } fn broadcast_event(&self, ev: LobbyEvent) { let lobby_id = self.lobby_id.to_lowercase(); let mut mgr = self.lobby_mgr.lock().unwrap(); mgr.send_update(&lobby_id, ev); } } fn parse_auth(hex_token: &str) -> Option<Token> { Token::from_hex(hex_token).ok() } fn auth_player(auth: &Option<String>, lobby: &Lobby) -> Option<usize> { auth.as_ref() .and_then(|val| val.strip_prefix("Bearer ")) .and_then(parse_auth) .and_then(|t| lobby.token_player.get(&t).cloned()) } fn lobby_context<MountPoint>( base: MountPoint, lobby_mgr: Arc<Mutex<LobbyManager>>) -> impl Filter<Extract=(LobbyRequestCtx, ), Error=Rejection> + Clone where MountPoint: Filter<Extract=(String, ), Error=Rejection> + Clone { base .and(with_lobby_manager(lobby_mgr)) .and(warp::header::optional("authorization")) .map(|lobby_id, lobby_mgr, auth_header| LobbyRequestCtx { lobby_mgr, lobby_id, auth_header, } ) } fn get_lobby_by_id(req: LobbyRequestCtx) -> Response { let res = req.with_lobby(|lobby| { Ok(json(&LobbyData::from(lobby.clone()))) }); return result_to_response(res) } #[derive(Serialize, Deserialize, Debug)] struct PlayerParams { name: String, #[serde(with = "hex")] token: Token, } fn join_lobby(req: LobbyRequestCtx, player_params: PlayerParams) -> Response { let player_token = player_params.token; // TODO: check for uniqueness of name and token let game_manager = req.lobby_mgr.lock().unwrap().game_manager.clone(); let res = req.with_lobby(|lobby| { // whether the player sending the request already has this name let owns_name = lobby.token_player .get(&player_params.token) .map(|player_id| lobby.players[player_id].name == player_params.name) .unwrap_or(false); // is the requested name available? let taken_names: HashSet<&str> = lobby.players.values().map(|player| player.name.as_str()).collect(); if taken_names.contains(player_params.name.as_str()) && !owns_name { return Err(LobbyApiError::NameTaken); } let player_id = lobby.token_player.get(&player_params.token).cloned().unwrap_or_else(|| { let id = lobby.next_player_id; lobby.next_player_id += 1; id }); let player = Player { id: player_id, name: player_params.name, token: player_params.token.clone(), // TODO? connection_count: 0, client_connected: game_manager .lock() .unwrap() .client_connected(&player_params.token) }; lobby.token_player.insert(player.token.clone(), player_id); lobby.players.insert(player_id, player.clone()); Ok(StrippedPlayer::from(player)) }); if let Ok(player_data) = &res { req.broadcast_event(LobbyEvent::PlayerData(player_data.clone())); req.lobby_mgr .lock() .unwrap() .token_player .entry(player_token) .or_insert_with(|| HashSet::new()) .insert((req.lobby_id.to_lowercase(), player_data.id)); } return json_response(res); } const MAX_TURNS_ALLOWED: usize = 500; #[derive(Serialize, Deserialize, Debug)] #[serde(rename_all="camelCase")] struct ProposalParams { config: planetwars::MatchConfig, players: Vec<usize> } fn validate_proposal_params(maps: &PwMaps, params: &ProposalParams) -> LobbyApiResult<()> { if params.config.max_turns > MAX_TURNS_ALLOWED { return Err(LobbyApiError::InvalidProposalParams( format!("max allowed turns is {}", MAX_TURNS_ALLOWED)) ); } let map_data = maps.get(&params.config.map_name) .ok_or_else(|| LobbyApiError::InvalidProposalParams( "map does not exist".to_string() ))?; if params.players.len() > map_data.max_players { return Err(LobbyApiError::InvalidProposalParams( format!("too many players") )); } Ok(()) } fn create_proposal(req: LobbyRequestCtx, params: ProposalParams) -> Response { let auth = &req.auth_header; let maps = req.lobby_mgr.lock().unwrap().maps.clone(); let res = req.with_lobby(|lobby| { validate_proposal_params(maps.as_ref(), &params)?; let creator_id = auth_player(auth, lobby) .ok_or(LobbyApiError::NotAuthenticated)?; let proposal = Proposal { owner_id: creator_id, config: params.config, players: params.players.iter().map(|&player_id| { let status = if player_id == creator_id { AcceptedState::Accepted } else { AcceptedState::Unanswered }; ProposalPlayer { player_id, status, } }).collect(), id: Uuid::new_v4().to_hyphenated().to_string(), status: ProposalStatus::Pending, }; lobby.proposals.insert(proposal.id.clone(), proposal.clone()); Ok(proposal) }); if let Ok(proposal) = &res { req.broadcast_event(LobbyEvent::ProposalData(proposal.clone())); } return json_response(res); } fn start_proposal(req: LobbyRequestCtx, proposal_id: String) -> Response { let manager = req.lobby_mgr.clone(); let game_manager = req.lobby_mgr.lock().unwrap().game_manager.clone(); let auth = &req.auth_header; let res = req.with_lobby(|lobby| { let player_id = auth_player(auth, lobby) .ok_or(LobbyApiError::NotAuthenticated)?; let proposal = lobby.proposals.get_mut(&proposal_id) .ok_or(LobbyApiError::ProposalNotFound)?; if player_id != proposal.owner_id { return Err(LobbyApiError::NotAuthorized); } match proposal.status { ProposalStatus::Pending => (), _ => return Err(LobbyApiError::ProposalExpired) } let mut tokens = vec![]; for accepting_player in proposal.players.iter() { // player should exist. TODO: maybe make this more safe? let player = lobby.players.get(&accepting_player.player_id).unwrap(); if accepting_player.status != AcceptedState::Accepted || !player.client_connected { return Err(LobbyApiError::ProposalNotReady); } tokens.push(player.token); } let match_config = proposal.config.clone(); let cb_mgr = manager.clone(); let cb_lobby_id = lobby.id.clone(); let match_id = game_manager.lock().unwrap().create_match(tokens, match_config.clone(), move |match_id| { println!("completed match {}", &match_id); let mut mgr = cb_mgr.lock().unwrap(); mgr.lobbies.get_mut(&cb_lobby_id).and_then(|lobby| { if let Some(match_meta) = lobby.matches.get_mut(&match_id) { match_meta.status = MatchStatus::Done; Some(match_meta.clone()) } else { None } }).map(|match_meta| { mgr.send_update(&cb_lobby_id, LobbyEvent::MatchData(match_meta)); }); }); let match_meta = MatchMeta { id: match_id.clone(), timestamp: Utc::now(), status: MatchStatus::Playing, config: proposal.config.clone(), players: proposal.players.iter().map(|p| p.player_id.clone()).collect(), }; lobby.matches.insert(match_meta.id.clone(), match_meta.clone()); proposal.status = ProposalStatus::Accepted { match_id }; Ok((proposal.clone(), match_meta)) }); if let Ok((proposal, match_meta)) = &res { // TODO: spurious clones req.broadcast_event(LobbyEvent::ProposalData(proposal.clone())); req.broadcast_event(LobbyEvent::MatchData(match_meta.clone())); } return json_response(res); } #[derive(Serialize, Deserialize, Debug, Clone)] struct AcceptParams { status: AcceptedState, } fn accept_proposal( req: LobbyRequestCtx, proposal_id: String, params: AcceptParams, ) -> Response { let res = req.with_lobby(|lobby| { let player_id = auth_player(&req.auth_header, lobby) .ok_or(LobbyApiError::NotAuthenticated)?; let proposal = lobby.proposals.get_mut(&proposal_id) .ok_or(LobbyApiError::ProposalNotFound)?; for player in proposal.players.iter_mut() { if player.player_id == player_id { player.status = params.status.clone(); } } if proposal.players.iter().any(|p| p.status == AcceptedState::Rejected) { proposal.status = ProposalStatus::Denied; } Ok(proposal.clone()) }); if let Ok(proposal) = &res { req.broadcast_event(LobbyEvent::ProposalData(proposal.clone())); } return json_response(res); } fn get_maps(mgr: Arc<Mutex<LobbyManager>>) -> impl Reply { let mgr = mgr.lock().unwrap(); let maps: Vec<&PwMapData> = mgr.maps.values().collect(); return json(&maps); } #[tokio::main] async fn main() { let maps = pw_maps::build_map_index(Path::new(MAPS_DIRECTORY)) .expect("failed to read maps"); let game_manager = GameManager::init("0.0.0.0:8080".to_string()); let lobby_manager = LobbyManager::create(game_manager.clone(), maps); // POST /lobbies let post_lobbies_route = warp::path!("lobbies") .and(warp::post()) .and(with_lobby_manager(lobby_manager.clone())) .and(warp::body::json()) .map(create_lobby); let m = lobby_manager.clone(); let lobby_scope = move || lobby_context( warp::path!("lobbies" / String / .. ), m.clone(), ); // GET /lobbies/<id> let get_lobbies_id_route = lobby_scope() .and(warp::path::end()) .and(warp::get()) .map(get_lobby_by_id); // POST /lobbies/<id>/join let post_lobbies_id_players_route = lobby_scope() .and(warp::path!("join")) .and(warp::post()) .and(warp::body::json()) .map(join_lobby); // POST /lobbies/<id>/proposals let post_lobbies_id_proposals_route = lobby_scope() .and(warp::path!("proposals")) .and(warp::post()) .and(warp::body::json()) .map(create_proposal); // POST /lobbies/<lobby_id>/proposals/<proposal_id>/start let post_lobbies_id_proposals_id_start_route = lobby_scope() .and(warp::path!("proposals" / String / "start")) .and(warp::post()) .map(start_proposal); // POST /lobbies/<lobby_id>/proposals/<proposal_id>/accept let post_lobbies_id_proposals_id_accept_route = lobby_scope() .and(warp::path!("proposals" / String / "accept")) .and(warp::post()) .and(warp::body::json()) .map(accept_proposal); let get_maps_route = warp::path!("maps") .and(warp::get()) .and(with_lobby_manager(lobby_manager.clone())) .map(get_maps); // GET /matches let get_matches_route = warp::path("matches") .and(warp::path::end()) .and(warp::get()) .and(with_game_manager(game_manager.clone())) .map(list_matches); // GET /matches/<id> let get_match_route = warp::path!("matches" / String) .and(warp::path::end()) .and(warp::get()) .and(with_game_manager(game_manager.clone())) .map(get_match_log); let websocket_route = warp::path("websocket") .and(warp::path::end()) .and(warp::ws()) .and(with_lobby_manager(lobby_manager.clone())) .map(|ws: warp::ws::Ws, mgr| { ws.on_upgrade(move |socket| websocket::handle_websocket(socket, mgr)) }); let routes = post_lobbies_route .or(get_lobbies_id_route) .or(post_lobbies_id_players_route) .or(post_lobbies_id_proposals_id_start_route) .or(post_lobbies_id_proposals_route) .or(post_lobbies_id_proposals_id_accept_route) .or(get_maps_route) .or(get_matches_route) .or(get_match_route) .or(websocket_route); warp::serve(routes).run(([0, 0, 0, 0], 7412)).await; }
/* * Copyright 2020 Damian Peckett <damian@pecke.tt> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use crate::strategy::bang_bang::BangBangAutoScalerStrategy; use enum_dispatch::enum_dispatch; /// Bang-bang autoscaling strategy implementation. pub mod bang_bang; /// Autoscaling strategies / control algorithms. #[enum_dispatch] #[derive(Clone, Debug)] pub enum AutoScalerStrategy { BangBang(BangBangAutoScalerStrategy), } /// Autoscaling strtategy trait. #[enum_dispatch(AutoScalerStrategy)] pub trait AutoScalerStrategyTrait { /// What is the next desired state? Return the delta in terms of the number of replicas. fn evaluate(&self, replicas: u32, value: f64) -> Option<i32>; }
use super::targeting; use super::data::{ Object, Game, Tcod, MessageLog, UseResult, Ai }; use crate::{PLAYER, HEAL_AMOUNT, LIGHTNING_RANGE, LIGHTNING_DAMAGE, CONFUSE_RANGE, CONFUSE_NUM_TURNS, FIREBALL_RADIUS, FIREBALL_DAMAGE}; pub fn cast_heal( _inventory_id: usize, objects: &mut [Object], game: &mut Game, _tcod: &mut Tcod, ) -> UseResult { let player = &mut objects[PLAYER]; if let Some(fighter) = player.fighter { if fighter.hp == player.max_hp(game) { game.log.add("You are already at full health.", tcod::colors::RED); return UseResult::Cancelled; } game.log.add( "Your wounds start to feel better!", tcod::colors::LIGHT_VIOLET, ); player.heal(HEAL_AMOUNT, game); return UseResult::UsedUp; } UseResult::Cancelled } pub fn cast_lightning( _inventory_id: usize, objects: &mut [Object], game: &mut Game, tcod: &mut Tcod, ) -> UseResult { let monster_id = targeting::closest_monster(LIGHTNING_RANGE, objects, tcod); if let Some(monster_id) = monster_id { game.log.add( format!( "A lightning bolt strikes the {} with a deafening thunderclap! It damages for {} points.", objects[monster_id].name, LIGHTNING_DAMAGE ), tcod::colors::LIGHT_BLUE, ); if let Some(xp) = objects[monster_id].take_damage(LIGHTNING_DAMAGE, game) { objects[PLAYER].fighter.as_mut().unwrap().xp += xp; } UseResult::UsedUp } else { game.log.add("No enemy is close enough to strike.", tcod::colors::RED); UseResult::Cancelled } } pub fn cast_confuse( _inventory_id: usize, objects: &mut [Object], game: &mut Game, tcod: &mut Tcod, ) -> UseResult { // let monster_id = closest_monster(CONFUSE_RANGE, objects, tcod); game.log.add( "Left-click a target tile for the faireball, or right-click to cancel.", tcod::colors::LIGHT_CYAN, ); let monster_id = targeting::target_monster(Some(CONFUSE_RANGE as f32), objects, game, tcod); if let Some(monster_id) = monster_id { let old_ai = objects[monster_id].ai.take().unwrap_or(Ai::Basic); objects[monster_id].ai = Some(Ai::Confused { previous_ai: Box::new(old_ai), num_turns: CONFUSE_NUM_TURNS, }); game.log.add( format!( "The eyes of {} look vacant, and he starts to stumble around!", objects[monster_id].name ), tcod::colors::LIGHT_GREEN, ); UseResult::UsedUp } else { game.log.add("No enemy is close enough to strike.", tcod::colors::RED); UseResult::Cancelled } } pub fn cast_fireball( _inventory_id: usize, objects: &mut [Object], game: &mut Game, tcod: &mut Tcod, ) -> UseResult { game.log.add( "Left-click a target tile for the faireball, or right-click to cancel.", tcod::colors::LIGHT_CYAN, ); let (x, y) = match targeting::target_tile(None, objects, game, tcod) { Some(tile_pos) => tile_pos, None => return UseResult::Cancelled, }; game.log.add( format!( "The fireball explodes, burning everything within {} tiles!", FIREBALL_RADIUS ), tcod::colors::ORANGE, ); let mut xp_to_gain = 0; for (id, obj) in objects.iter_mut().enumerate() { if obj.distance(x, y) < FIREBALL_RADIUS as f32 && obj.fighter.is_some() { game.log.add( format!( "The {} is burned for {} points of damage.", obj.name, FIREBALL_DAMAGE ), tcod::colors::ORANGE, ); if let Some(xp) = obj.take_damage(FIREBALL_DAMAGE, game) { if id != PLAYER { xp_to_gain += xp; } } } } // TODO something around here can cause a crash. Why? objects[PLAYER].fighter.as_mut().unwrap().xp += xp_to_gain; UseResult::UsedUp }
fn main() { let s = String::from("hello"); takes_ownership(s.clone()); println!("{}", s); let x = 5; make_copy(x); println!("{}", x); let s1 = String::from("hello"); let (s2, len) = calculate_length(s1); println!("The length of '{}' is {}.", s2, len); } fn calculate_length(s:String) -> (String, usize) { let length = s.len(); (s, length) } fn takes_ownership(some_string: String) { println!("{}", some_string); } fn make_copy(some_integer: i32) { println!("{}", some_integer); }
struct Solution(); impl Solution { pub fn gray_code(n: i32) -> Vec<i32> { //动态规划 let mut gray_vec=Vec::new(); gray_vec.push(0); // if n==0{ // return gray_vec; // } // gray_vec.push(1); // if n==1{ // return gray_vec; // } for i in 1..n+1{ let mut length=gray_vec.len(); while length>0{ length-=1; // println!("{}",1<<(i-1)); // println!("{}",gray_vec[length]); gray_vec.push(gray_vec[length]+(1<<(i-1))); } } gray_vec } } fn main(){ println!("{:?}",Solution::gray_code(2)); println!("{:?}",Solution::gray_code(0)); println!("{:?}",Solution::gray_code(1)); println!("{:?}",Solution::gray_code(3)); }
// Copyright (c) 2017 Anatoly Ikorsky // // Licensed under the Apache License, Version 2.0 // <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT // license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. All files in the project carrying such notice may not be copied, // modified, or distributed except according to those terms. use futures_core::ready; use futures_util::stream::{StreamExt, StreamFuture}; use mysql_common::packets::{parse_err_packet, parse_ok_packet}; use pin_project::pin_project; use std::future::Future; use std::pin::Pin; use std::task::{Context, Poll}; use crate::{ connection_like::{streamless::Streamless, ConnectionLike}, error::*, io, }; #[pin_project] pub struct ReadPacket<T> { conn_like: Option<Streamless<T>>, #[pin] fut: StreamFuture<io::Stream>, } impl<T: ConnectionLike> ReadPacket<T> { pub fn new(conn_like: T) -> Self { let (incomplete_conn, stream) = conn_like.take_stream(); ReadPacket { conn_like: Some(incomplete_conn), fut: stream.into_future(), } } } impl<T: ConnectionLike> Future for ReadPacket<T> { type Output = Result<(T, Vec<u8>)>; fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> { let this = self.project(); let (packet_opt, stream) = ready!(this.fut.poll(cx)); let packet_opt = packet_opt.transpose()?; let mut conn_like = this.conn_like.take().unwrap().return_stream(stream); match packet_opt { Some(packet) => { if let Ok(ok_packet) = parse_ok_packet(&*packet, conn_like.get_capabilities()) { conn_like.set_affected_rows(ok_packet.affected_rows()); conn_like.set_last_insert_id(ok_packet.last_insert_id().unwrap_or(0)); conn_like.set_status(ok_packet.status_flags()); conn_like.set_warnings(ok_packet.warnings()); } else if let Ok(err_packet) = parse_err_packet(&*packet, conn_like.get_capabilities()) { return Err(err_packet.into()).into(); } conn_like.touch(); Poll::Ready(Ok((conn_like, packet))) } None => Poll::Ready(Err(DriverError::ConnectionClosed.into())), } } }
// --- paritytech --- use pallet_timestamp::Config; // --- darwinia-network --- use crate::{weights::pallet_timestamp::WeightInfo, *}; frame_support::parameter_types! { pub const MinimumPeriod: u64 = SLOT_DURATION / 2; } impl Config for Runtime { /// A timestamp: milliseconds since the unix epoch. type Moment = Moment; type OnTimestampSet = (); type MinimumPeriod = MinimumPeriod; type WeightInfo = WeightInfo<Runtime>; }
use crate::colors::{BitDepth, ColorType}; use crate::headers::IhdrData; use crate::png::PngImage; use indexmap::map::{Entry::*, IndexMap}; use rgb::RGBA8; use std::borrow::Cow; pub mod alpha; use crate::alpha::*; pub mod bit_depth; use crate::bit_depth::*; pub mod color; use crate::color::*; pub(crate) use crate::alpha::try_alpha_reductions; pub(crate) use crate::bit_depth::reduce_bit_depth; /// Attempt to reduce the number of colors in the palette /// Returns `None` if palette hasn't changed #[must_use] pub fn reduced_palette(png: &PngImage) -> Option<PngImage> { if png.ihdr.color_type != ColorType::Indexed { // Can't reduce if there is no palette return None; } if png.ihdr.bit_depth == BitDepth::One { // Gains from 1-bit images will be at most 1 byte // Not worth the CPU time return None; } let mut palette_map = [None; 256]; let mut used = [false; 256]; { let palette = png.palette.as_ref()?; // Find palette entries that are never used for line in png.scan_lines() { match png.ihdr.bit_depth { BitDepth::Eight => { for &byte in line.data { used[byte as usize] = true; } } BitDepth::Four => { for &byte in line.data { used[(byte & 0x0F) as usize] = true; used[(byte >> 4) as usize] = true; } } BitDepth::Two => { for &byte in line.data { used[(byte & 0x03) as usize] = true; used[((byte >> 2) & 0x03) as usize] = true; used[((byte >> 4) & 0x03) as usize] = true; used[(byte >> 6) as usize] = true; } } _ => unreachable!(), } } let mut used_enumerated: Vec<(usize, &bool)> = used.iter().enumerate().collect(); used_enumerated.sort_by(|a, b| { //Sort by ascending alpha and descending luma. let color_val = |i| { let color = palette .get(i) .copied() .unwrap_or_else(|| RGBA8::new(0, 0, 0, 255)); ((color.a as i32) << 18) // These are coefficients for standard sRGB to luma conversion - (color.r as i32) * 299 - (color.g as i32) * 587 - (color.b as i32) * 114 }; color_val(a.0).cmp(&color_val(b.0)) }); let mut next_index = 0u16; let mut seen = IndexMap::with_capacity(palette.len()); for (i, used) in used_enumerated.iter().cloned() { if !used { continue; } // There are invalid files that use pixel indices beyond palette size let color = palette .get(i) .cloned() .unwrap_or_else(|| RGBA8::new(0, 0, 0, 255)); match seen.entry(color) { Vacant(new) => { palette_map[i] = Some(next_index as u8); new.insert(next_index as u8); next_index += 1; } Occupied(remap_to) => palette_map[i] = Some(*remap_to.get()), } } } do_palette_reduction(png, &palette_map) } #[must_use] fn do_palette_reduction(png: &PngImage, palette_map: &[Option<u8>; 256]) -> Option<PngImage> { let byte_map = palette_map_to_byte_map(png, palette_map)?; let mut raw_data = Vec::with_capacity(png.data.len()); // Reassign data bytes to new indices for line in png.scan_lines() { raw_data.push(line.filter); for byte in line.data { raw_data.push(byte_map[*byte as usize]); } } let mut aux_headers = png.aux_headers.clone(); if let Some(bkgd_header) = png.aux_headers.get(b"bKGD") { if let Some(Some(map_to)) = bkgd_header .get(0) .and_then(|&idx| palette_map.get(idx as usize)) { aux_headers.insert(*b"bKGD", vec![*map_to]); } } Some(PngImage { ihdr: IhdrData { color_type: ColorType::Indexed, ..png.ihdr }, data: raw_data, transparency_pixel: None, palette: Some(reordered_palette(png.palette.as_ref()?, palette_map)), aux_headers, }) } fn palette_map_to_byte_map(png: &PngImage, palette_map: &[Option<u8>; 256]) -> Option<[u8; 256]> { let len = png.palette.as_ref().map(|p| p.len()).unwrap_or(0); if (0..len).all(|i| palette_map[i].map_or(true, |to| to == i as u8)) { // No reduction necessary return None; } let mut byte_map = [0u8; 256]; // low bit-depths can be pre-computed for every byte value match png.ihdr.bit_depth { BitDepth::Eight => { for byte in 0..=255 { byte_map[byte as usize] = palette_map[byte as usize].unwrap_or(0) } } BitDepth::Four => { for byte in 0..=255 { byte_map[byte as usize] = palette_map[(byte & 0x0F) as usize].unwrap_or(0) | (palette_map[(byte >> 4) as usize].unwrap_or(0) << 4); } } BitDepth::Two => { for byte in 0..=255 { byte_map[byte as usize] = palette_map[(byte & 0x03) as usize].unwrap_or(0) | (palette_map[((byte >> 2) & 0x03) as usize].unwrap_or(0) << 2) | (palette_map[((byte >> 4) & 0x03) as usize].unwrap_or(0) << 4) | (palette_map[(byte >> 6) as usize].unwrap_or(0) << 6); } } _ => {} } Some(byte_map) } fn reordered_palette(palette: &[RGBA8], palette_map: &[Option<u8>; 256]) -> Vec<RGBA8> { let max_index = palette_map .iter() .cloned() .filter_map(|x| x) .max() .unwrap_or(0) as usize; let mut new_palette = vec![RGBA8::new(0, 0, 0, 255); max_index + 1]; for (&color, &map_to) in palette.iter().zip(palette_map.iter()) { if let Some(map_to) = map_to { new_palette[map_to as usize] = color; } } new_palette } /// Attempt to reduce the color type of the image /// Returns true if the color type was reduced, false otherwise pub fn reduce_color_type(png: &PngImage) -> Option<PngImage> { let mut should_reduce_bit_depth = false; let mut reduced = Cow::Borrowed(png); // Go down one step at a time // Maybe not the most efficient, but it's safe if reduced.ihdr.color_type == ColorType::RGBA { if let Some(r) = reduce_rgba_to_grayscale_alpha(&reduced).or_else(|| reduced_alpha_channel(&reduced)) { reduced = Cow::Owned(r); } else if let Some(r) = reduced_color_to_palette(&reduced) { reduced = Cow::Owned(r); should_reduce_bit_depth = true; } } if reduced.ihdr.color_type == ColorType::GrayscaleAlpha { if let Some(r) = reduced_alpha_channel(&reduced) { reduced = Cow::Owned(r); should_reduce_bit_depth = true; } } if reduced.ihdr.color_type == ColorType::RGB { if let Some(r) = reduce_rgb_to_grayscale(&reduced).or_else(|| reduced_color_to_palette(&reduced)) { reduced = Cow::Owned(r); should_reduce_bit_depth = true; } } //Make sure that palette gets sorted. Ideally, this should be done within reduced_color_to_palette. if should_reduce_bit_depth && reduced.ihdr.color_type == ColorType::Indexed { if let Some(r) = reduced_palette(&reduced) { reduced = Cow::Owned(r); should_reduce_bit_depth = true; } } if should_reduce_bit_depth { // Some conversions will allow us to perform bit depth reduction that // wasn't possible before if let Some(r) = reduce_bit_depth_8_or_less(&reduced, 1) { reduced = Cow::Owned(r); } } match reduced { Cow::Owned(r) => Some(r), _ => None, } }
use byteorder::{LittleEndian, WriteBytesExt}; use encoding::{codec::utf_16, Encoding}; use failure::{ensure, Error}; use crate::{chunks::TOKEN_PACKAGE, model::owned::OwnedBuf}; #[derive(Default, Debug)] pub struct PackageBuf { id: u32, package_name: String, inner_chunks: Vec<Box<dyn OwnedBuf>>, } #[allow(dead_code)] impl PackageBuf { pub fn create(id: u32, package_name: String) -> Result<Self, Error> { ensure!( package_name.as_bytes().len() <= 256, "can not create a package with a length greater than 256" ); Ok(Self { id, package_name, inner_chunks: Vec::new(), }) } pub fn add_chunk(&mut self, chunk: Box<dyn OwnedBuf>) { self.inner_chunks.push(chunk); } } impl OwnedBuf for PackageBuf { fn get_token(&self) -> u16 { TOKEN_PACKAGE } fn get_body_data(&self) -> Result<Vec<u8>, Error> { let mut out = Vec::new(); for c in &self.inner_chunks { let current_chunk = c.to_vec()?; out.extend(current_chunk); } Ok(out) } fn get_header(&self) -> Result<Vec<u8>, Error> { let mut buffer = Vec::new(); let mut encoder = utf_16::UTF_16LE_ENCODING.raw_encoder(); let mut encoded_string = Vec::new(); let (size, error) = encoder.raw_feed(&self.package_name, &mut encoded_string); ensure!(error.is_none(), "error encoding package name as UTF-16"); buffer.write_u32::<LittleEndian>(self.id)?; buffer.extend(encoded_string); // Padding package name up to 256 characters for _ in 0..(256 - size) { buffer.push(0); } // Padding (non-used data) buffer.write_u32::<LittleEndian>(0)?; buffer.write_u32::<LittleEndian>(0)?; Ok(buffer) } } #[cfg(test)] mod tests { use std::{io::Cursor, iter}; use super::{OwnedBuf, PackageBuf}; use crate::{ chunks::{Chunk, ChunkLoaderStream, PackageWrapper}, model::{owned::StringTableBuf, StringTable}, }; #[test] fn it_can_generate_a_chunk_with_the_given_data() { let some_other_chunk = PackageBuf::create(4, "com.test.test".to_string()).unwrap(); let mut package = PackageBuf::create(3, "com.test.test".to_string()).unwrap(); package.add_chunk(Box::new(some_other_chunk)); let out = package.to_vec().unwrap(); let wrapper = PackageWrapper::new(&out); assert_eq!(3, wrapper.get_id().unwrap()); assert_eq!("com.test.test", wrapper.get_name().unwrap()); } #[test] fn body_can_be_iterated_with_chunk_stream_loader() { let some_other_chunk = StringTableBuf::default(); let mut inner_chunk_2 = StringTableBuf::default(); inner_chunk_2.add_string("some string".to_string()); inner_chunk_2.add_string("another string".to_string()); let mut package = PackageBuf::create(3, "com.test.test".to_string()).unwrap(); package.add_chunk(Box::new(some_other_chunk)); package.add_chunk(Box::new(inner_chunk_2)); let out = package.to_vec().unwrap(); let cursor = Cursor::new(out.as_slice()); let mut stream = ChunkLoaderStream::new(cursor); let first_chunk = stream.next().unwrap().unwrap(); let second_chunk = stream.next().unwrap().unwrap(); let third_chunk = stream.next().unwrap().unwrap(); match first_chunk { Chunk::Package(_) => (), _ => panic!("First chunk should be a Package"), } match second_chunk { Chunk::StringTable(st) => { assert_eq!(st.get_strings_len(), 0); } _ => panic!("Second chunk should be a string table"), } match third_chunk { Chunk::StringTable(st) => { assert_eq!(st.get_strings_len(), 2); } _ => panic!("Second chunk should be string table"), } } #[test] fn it_can_not_create_a_package_with_a_too_large_package_name() { let target = iter::repeat('\u{1F624}') .take((256 / 4) + 1) .collect::<String>(); let package = PackageBuf::create(1, target); assert!(package.is_err()); } #[test] fn it_can_create_a_package_with_the_maximum_length() { let target = iter::repeat('\u{1F624}').take(256 / 4).collect::<String>(); let package = PackageBuf::create(1, target); assert!(package.is_ok()); } }
use crate::map::line::Line; use crate::map::line::LineSystem; use crate::map::sector; use crate::map::sector::Sector; use crate::map::triangulate::triangulate_sector; use crate::things::thing::Thing; use crate::things::thing::Updatable; use std::collections::HashSet; pub const WORLD_SCALE: f32 = 0.25; pub const WORLD_CELL_SHIFT: i32 = 5; #[derive(Default)] pub struct WorldCell { pub lines: Vec<usize>, pub things: Vec<usize>, } impl WorldCell { pub fn push_thing(&mut self, thing: usize) { self.things.push(thing); } pub fn remove_thing(&mut self, thing: usize) { let len = self.things.len(); for i in 0..len { if self.things[i] == thing { self.things.remove(i); return; } } } } pub struct World { pub things: Vec<Thing>, pub sectors: Vec<Sector>, pub lines: LineSystem, pub cells: Vec<WorldCell>, pub cell_columns: usize, pub cell_rows: usize, } fn build_cell_lines(cells: &mut Vec<WorldCell>, cell_columns: usize, line: &Line) { let dx = (line.b.x - line.a.x).abs(); let dy = (line.b.y - line.a.y).abs(); let mut x = line.a.x.floor() as i32; let mut y = line.a.y.floor() as i32; let mut n = 1; let mut error; let x_inc; let y_inc; if dx == 0.0 { x_inc = 0; error = std::f32::MAX; } else if line.b.x > line.a.x { x_inc = 1; n += (line.b.x).floor() as i32 - x; error = (line.a.x.floor() + 1.0 - line.a.x) * dy; } else { x_inc = -1; n += x - line.b.x.floor() as i32; error = (line.a.x - line.a.x.floor()) * dy; } if dy == 0.0 { y_inc = 0; error = std::f32::MIN; } else if line.b.y > line.a.y { y_inc = 1; n += (line.b.y).floor() as i32 - y; error -= (line.a.y.floor() + 1.0 - line.a.y) * dx; } else { y_inc = -1; n += y - line.b.y.floor() as i32; error -= (line.a.y - line.a.y.floor()) * dx; } while n > 0 { let cell = &mut cells[(x as usize >> WORLD_CELL_SHIFT) + (y as usize >> WORLD_CELL_SHIFT) * cell_columns]; cell.lines.push(line.index); if error > 0.0 { y += y_inc; error -= dx; } else { x += x_inc; error += dy; } n -= 1; } } impl World { pub fn new() -> Self { World { things: Vec::new(), lines: LineSystem::new(), sectors: Vec::new(), cells: Vec::new(), cell_columns: 0, cell_rows: 0, } } pub fn push_lines(&mut self, lines: Vec<Line>) -> Vec<usize> { let mut indices = Vec::with_capacity(lines.len()); for line in lines.iter().copied() { let index = self.lines.push(line); indices.push(index); } indices } pub fn push_sector(&mut self, mut sector: Sector) { sector.index = self.sectors.len(); self.sectors.push(sector); } pub fn push_thing(&mut self, thing: Thing) { self.things.push(thing); } pub fn find_sector(&self, x: f32, y: f32) -> Option<&Sector> { for sector in self.sectors.iter() { if sector.outside.is_some() { continue; } return Some(sector::find(&self.sectors, &sector, x, y)); } Option::None } pub fn get_sector(&self, index: usize) -> &Sector { &self.sectors[index] } fn build_lines(&mut self, index: usize) { let sector = &mut self.sectors[index]; let lines = sector.lines.len(); if lines == 0 { return; } let bottom = sector.bottom; let floor = sector.floor; let ceil = sector.ceiling; let top = sector.top; let plus; let minus; if sector.outside.is_none() { plus = Option::None; minus = Some(sector.index); } else { plus = Some(sector.index); minus = sector.outside; } let mut u = 0.0; for i in 0..lines { let line = self.lines.get_mutable(sector.lines[i]); build_cell_lines(&mut self.cells, self.cell_columns, line); line.update_sectors(plus, minus); let x = line.a.x - line.b.x; let y = line.a.y - line.b.y; let s = u + (x * x + y * y).sqrt() * WORLD_SCALE; if let Some(wall) = &mut line.bottom { wall.update(bottom, floor, u, bottom * WORLD_SCALE, s, floor * WORLD_SCALE); } if let Some(wall) = &mut line.middle { wall.update(floor, ceil, u, floor * WORLD_SCALE, s, ceil * WORLD_SCALE); } if let Some(wall) = &mut line.top { wall.update(ceil, top, u, ceil * WORLD_SCALE, s, top * WORLD_SCALE); } u = s; } } pub fn build(&mut self) { let mut top = 0.0; let mut right = 0.0; for sector in self.sectors.iter() { for vec in sector.vecs.iter().copied() { if vec.y > top { top = vec.y; } if vec.x > right { right = vec.x; } } } let len = self.sectors.len(); for i in 0..len { let mut list = Vec::new(); let sector = &self.sectors[i]; for k in 0..len { if i == k { continue; } let other = &self.sectors[k]; let mut contains = true; for vec_o in other.vecs.iter().copied() { for vec_s in sector.vecs.iter() { if vec_s.eq(vec_o) { contains = false; break; } } if !contains { break; } if !sector.contains(vec_o.x, vec_o.y) { contains = false; break; } } if contains { list.push(k); } } let sector = &mut self.sectors[i]; println!("inside list {:?}", list); sector.inside.append(&mut list); } for i in 0..len { let mut dead = HashSet::new(); { let inside_len = self.sectors[i].inside.len(); for k in 0..inside_len { let inside = self.sectors[i].inside[k]; let other_len = self.sectors[inside].inside.len(); for o in 0..other_len { let other = self.sectors[inside].inside[o]; dead.insert(other); } } } { let sector = &mut self.sectors[i]; for d in dead.iter().copied() { sector.inside.retain(|f| *f != d); } } let inside_len = self.sectors[i].inside.len(); for k in 0..inside_len { let inside = self.sectors[i].inside[k]; let inner = &mut self.sectors[inside]; inner.outside = Some(i); } } let cell_size = (1 << WORLD_CELL_SHIFT) as f32; self.cell_rows = (top / cell_size).ceil() as usize; self.cell_columns = (right / cell_size).ceil() as usize; self.cells.resize_with(self.cell_columns * self.cell_rows, Default::default); for s in 0..self.sectors.len() { triangulate_sector(&mut self.sectors, s, WORLD_SCALE); } for i in 0..self.sectors.len() { self.build_lines(i); } } pub fn update(&mut self) { for thing in self.things.iter_mut() { thing.update(); } } }
use std::fs::File; use std::io::{BufReader, BufRead}; use regex::Regex; fn main() { // println!("read begin"); let f=File::open("./src/13_134820.log").unwrap(); let buf=BufReader::new(f); let mut l_all =Vec::new(); for line in buf.lines(){ // println!("1 begin"); let line_data=line.unwrap(); let re = Regex::new(r"buyBundle:role (\d+) (ios_pay|google_pay|cafeBazaar_pay) (\d+) begin to call payItem").unwrap(); for cap in re.captures_iter(&line_data) { // println!("rid: {} type: {} tid: {}", // cap.at(1).unwrap_or(""), cap.at(2).unwrap_or(""), // cap.at(3).unwrap_or("")); let mut v=Vec::new(); let rid=cap.at(1).unwrap_or(""); let tid=cap.at(3).unwrap_or(""); let buytype=cap.at(2).unwrap_or(""); v.push(rid.to_string()); v.push(tid.to_string()); v.push(buytype.to_string()); l_all.push(v); break; } let re2=Regex::new(r"(ios_pay|google_pay|cafeBazaar_pay) successful").unwrap(); if re2.is_match(&line_data){ let len=l_all.len(); if len<=0{ continue; } l_all[len-1].push(String::from("ok")); } let re3=Regex::new(r"(pay failed!2 cancel|onPurchasesUpdated: user cancelled)").unwrap(); if re3.is_match(&line_data){ let len=l_all.len(); if len<=0{ continue; } l_all[len-1].push(String::from("cancel")); } let re4=Regex::new(r"pay failed!7 Item is already owned").unwrap(); if re4.is_match(&line_data){ let len=l_all.len(); if len<=0{ continue; } l_all[len-1].push(String::from("pay failed!7 Item is already owned")); } // println!("1 end"); } print_line(l_all); // println!("read end"); } fn print_line(v:Vec<Vec<String>>){ for line in v{ println!("{:?}",line); } }
extern crate hyper; extern crate futures; extern crate url; #[macro_use] extern crate log; extern crate env_logger; #[macro_use] extern crate serde_json; #[macro_use] extern crate serde_derive; extern crate crypto; use std::sync::{RwLock}; use hyper::{StatusCode}; use hyper::Method::{Get}; use hyper::server::{Request, Response, Service}; use hyper::header::{ContentLength, ContentType}; use futures::future::{Future, FutureResult}; mod blockchain; use blockchain::{Blockchain, Block}; mod proof; struct Microservice<> { chain: RwLock<Blockchain> } fn make_error_response(error_message: &str) -> FutureResult<hyper::Response, hyper::Error> { let payload = json!({ "error": error_message }).to_string(); let response = Response::new() .with_status(StatusCode::InternalServerError) .with_header(ContentLength(payload.len() as u64)) .with_header(ContentType::json()) .with_body(payload); debug!("{:?}", response); futures::future::ok(response) } fn handle_404() -> FutureResult<hyper::Response, hyper::Error> { let payload = json!({ "error": "Route not found" }).to_string(); let response = Response::new() .with_status(StatusCode::NotFound) .with_header(ContentLength(payload.len() as u64)) .with_header(ContentType::json()) .with_body(payload); futures::future::ok(response) } impl<> Service for Microservice<> { type Request = Request; type Response = Response; type Error = hyper::Error; type Future = Box<Future<Item = Self::Response, Error = Self::Error>>; fn call(&self, request: Request) -> Self::Future { info!("Microservice received a request: {:?}", request); match (request.method(), request.path()) { (&Get, "/mine") => { let mut c = self.chain.try_write().unwrap(); let ref new_block: Block = Blockchain::new_block(&mut c); let payload = json!({ "new_block": new_block}).to_string(); let response: Response = Response::new() .with_header(ContentLength(payload.len() as u64)) .with_header(ContentType::json()) .with_body(payload); Box::new(futures::future::ok(response)) } (&Get, "/chain") => { let payload = json!(self.chain).to_string(); let response: Response = Response::new() .with_header(ContentLength(payload.len() as u64)) .with_header(ContentType::json()) .with_body(payload); Box::new(futures::future::ok(response)) } _ => { info!("Route not found: {:?}", request.path()); let response = handle_404(); Box::new(response) } } } } fn main() { env_logger::init(); let address = "127.0.0.1:8080".parse().unwrap(); let server = hyper::server::Http::new() .bind(&address, move || Ok(Microservice {chain: RwLock::new(Blockchain::new())})) .unwrap(); info!("Running microservice at {}", address); info!("~~Bockchain service successfully started~~"); server.run().unwrap(); }
// error-pattern: the evaluated program panicked #[derive(Debug)] struct A; fn main() { // can't use assert_eq, b/c that will try to print the pointer addresses with full MIR enabled assert!(&A as *const A == &A as *const A); }
// Generated by `scripts/generate.js` pub type VkCopyAccelerationStructureMode = super::super::khr::VkCopyAccelerationStructureMode; #[doc(hidden)] pub type RawVkCopyAccelerationStructureMode = super::super::khr::RawVkCopyAccelerationStructureMode;
//! Zero-capacity channel. //! //! Also known as *rendezvous* channel. use std::time::Instant; use err::{RecvTimeoutError, SendTimeoutError, TryRecvError, TrySendError}; use exchanger::{Exchanger, ExchangeError}; use select::CaseId; /// A zero-capacity channel. pub struct Channel<T> { /// The internal two-sided exchanger. exchanger: Exchanger<Option<T>>, } impl<T> Channel<T> { /// Returns a new zero-capacity channel. pub fn new() -> Self { Channel { exchanger: Exchanger::new() } } pub fn promise_send(&self, case_id: CaseId) { self.exchanger.left().promise(case_id); } pub fn revoke_send(&self, case_id: CaseId) { self.exchanger.left().revoke(case_id); } pub fn fulfill_send(&self, value: T) { self.exchanger.left().fulfill(Some(value)); } pub fn promise_recv(&self, case_id: CaseId) { self.exchanger.right().promise(case_id); } pub fn revoke_recv(&self, case_id: CaseId) { self.exchanger.right().revoke(case_id); } pub fn fulfill_recv(&self) -> T { self.exchanger.right().fulfill(None).unwrap() } /// Attempts to send `value` into the channel. pub fn try_send(&self, value: T, case_id: CaseId) -> Result<(), TrySendError<T>> { match self.exchanger.left().try_exchange(Some(value), case_id) { Ok(_) => Ok(()), Err(ExchangeError::Disconnected(v)) => Err(TrySendError::Disconnected(v.unwrap())), Err(ExchangeError::Timeout(v)) => Err(TrySendError::Full(v.unwrap())), } } /// Attempts to send `value` into the channel until the specified `deadline`. pub fn send_until( &self, value: T, deadline: Option<Instant>, case_id: CaseId, ) -> Result<(), SendTimeoutError<T>> { match self.exchanger.left().exchange_until(Some(value), deadline, case_id) { Ok(_) => Ok(()), Err(ExchangeError::Disconnected(v)) => Err(SendTimeoutError::Disconnected(v.unwrap())), Err(ExchangeError::Timeout(v)) => Err(SendTimeoutError::Timeout(v.unwrap())), } } /// Attempts to receive a value from channel. pub fn try_recv(&self, case_id: CaseId) -> Result<T, TryRecvError> { match self.exchanger.right().try_exchange(None, case_id) { Ok(v) => Ok(v.unwrap()), Err(ExchangeError::Disconnected(_)) => Err(TryRecvError::Disconnected), Err(ExchangeError::Timeout(_)) => Err(TryRecvError::Empty), } } /// Attempts to receive a value from the channel until the specified `deadline`. pub fn recv_until( &self, deadline: Option<Instant>, case_id: CaseId, ) -> Result<T, RecvTimeoutError> { match self.exchanger.right().exchange_until(None, deadline, case_id) { Ok(v) => Ok(v.unwrap()), Err(ExchangeError::Disconnected(_)) => Err(RecvTimeoutError::Disconnected), Err(ExchangeError::Timeout(_)) => Err(RecvTimeoutError::Timeout), } } /// Returns `true` if there is a waiting sender. pub fn can_recv(&self) -> bool { self.exchanger.left().can_notify() } /// Returns `true` if there is a waiting receiver. pub fn can_send(&self) -> bool { self.exchanger.right().can_notify() } /// Closes the channel. pub fn close(&self) -> bool { self.exchanger.close() } /// Returns `true` if the channel is closed. pub fn is_closed(&self) -> bool { self.exchanger.is_closed() } }
/* * Datadog API V1 Collection * * Collection of all Datadog Public endpoints. * * The version of the OpenAPI document: 1.0 * Contact: support@datadoghq.com * Generated by: https://openapi-generator.tech */ /// LogsGeoIpParser : The GeoIP parser takes an IP address attribute and extracts if available the Continent, Country, Subdivision, and City information in the target attribute path. #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct LogsGeoIpParser { /// Whether or not the processor is enabled. #[serde(rename = "is_enabled", skip_serializing_if = "Option::is_none")] pub is_enabled: Option<bool>, /// Name of the processor. #[serde(rename = "name", skip_serializing_if = "Option::is_none")] pub name: Option<String>, /// Array of source attributes. #[serde(rename = "sources")] pub sources: Vec<String>, /// Name of the parent attribute that contains all the extracted details from the `sources`. #[serde(rename = "target")] pub target: String, #[serde(rename = "type")] pub _type: crate::models::LogsGeoIpParserType, } impl LogsGeoIpParser { /// The GeoIP parser takes an IP address attribute and extracts if available the Continent, Country, Subdivision, and City information in the target attribute path. pub fn new(sources: Vec<String>, target: String, _type: crate::models::LogsGeoIpParserType) -> LogsGeoIpParser { LogsGeoIpParser { is_enabled: None, name: None, sources, target, _type, } } }
use proc_macro2::TokenStream; use quote::quote; use chom_ir::{ Namespace, Constant, Context }; use super::{ Generate }; impl<T: Namespace> Generate<T> for Constant { fn generate(&self, _: &Context<T>) -> TokenStream { match self { Constant::Unit => quote! { () }, Constant::Bool(b) => quote! { #b }, Constant::String(s) => quote! { #s }, Constant::Int(n) => quote! { #n }, Constant::Char(c) => quote! { #c }, Constant::CharRange(a, b) => quote! { #a..=#b }, } } }
pub struct TrainStation {} pub struct TrainTrip {} pub trait BikeSharingStation{ fn get_position(&self) -> super::Coord; fn get_name(&self) -> Option<String>; fn get_id(&self) -> String; fn has_bikes(&self) -> Option<bool>; } pub trait TrainServiceProvider { fn get_name(&self) -> &str; fn get_id(&self) -> String; fn get_nearest_station(&self, from: &super::Coord) -> TrainStation; fn get_trip(&self, from: &TrainStation, to: &TrainStation, at: u64) -> TrainTrip; fn get_train_delay() -> Option<i32>; } pub trait BikeSharingServiceProvider { fn get_name(&self) -> String; fn get_id(&self) -> String; fn get_nearest_station(&self, from: &super::Coord) -> Box<dyn BikeSharingStation>; fn get_station_list(&self) -> Vec<Box<dyn BikeSharingStation>>; }
//! Component for the Murata CMWX1ZZABZ-078 LoRa Module use capsules::virtual_spi::{MuxSpiMaster, VirtualSpiMasterDevice}; use core::mem::MaybeUninit; use kernel::component::Component; use kernel::hil; use kernel::static_init_half; use capsules::lora::driver::RadioDriver; use capsules::lora::radio::Radio; use capsules::lora::radio::RadioConfig; // The LoRa radio requires buffers for its SPI operations: static mut LORA_BUF: [u8; kernel::hil::radio::MAX_BUF_SIZE] = [0x00; kernel::hil::radio::MAX_BUF_SIZE]; static mut LORA_REG_WRITE: [u8; 2] = [0x00; 2]; static mut LORA_REG_READ: [u8; 2] = [0x00; 2]; // Setup static space for the objects. #[macro_export] macro_rules! lora_component_helper { ($S: ty) => {{ use capsules::lora::driver::RadioDriver; use capsules::lora::radio::Radio; use capsules::virtual_spi::VirtualSpiMasterDevice; use core::mem::MaybeUninit; static mut BUF1: MaybeUninit<VirtualSpiMasterDevice<'static, $S>> = MaybeUninit::uninit(); static mut BUF2: MaybeUninit<Radio<'static>> = MaybeUninit::uninit(); static mut BUF3: MaybeUninit<RadioDriver<'static>> = MaybeUninit::uninit(); (&mut BUF1, &mut BUF2, &mut BUF3) };}; } pub struct LoraComponent<S: 'static + hil::spi::SpiMaster, P: 'static + hil::gpio::Pin> { mux_spi: &'static MuxSpiMaster<'static, S>, chip_select: S::ChipSelect, reset_pin: &'static P, } impl<S: 'static + hil::spi::SpiMaster, P: 'static + hil::gpio::Pin> LoraComponent<S, P> { pub fn new( mux_spi: &'static MuxSpiMaster<'static, S>, chip_select: S::ChipSelect, reset_pin: &'static P, ) -> LoraComponent<S, P> { LoraComponent { mux_spi, chip_select, reset_pin, } } } impl<S: 'static + hil::spi::SpiMaster, P: 'static + hil::gpio::Pin> Component for LoraComponent<S, P> { type StaticInput = ( &'static mut MaybeUninit<VirtualSpiMasterDevice<'static, S>>, &'static mut MaybeUninit<Radio<'static>>, &'static mut MaybeUninit<RadioDriver<'static>>, ); type Output = &'static RadioDriver<'static>; unsafe fn finalize(self, static_buffer: Self::StaticInput) -> Self::Output { // Create a SPI instance for LoRa let lora_spi = static_init_half!( static_buffer.0, VirtualSpiMasterDevice<'static, S>, VirtualSpiMasterDevice::new(self.mux_spi, self.chip_select) ); let radio_device = static_init_half!( static_buffer.1, Radio<'static>, Radio::new(lora_spi, self.reset_pin) ); radio_device.initialize(&mut LORA_BUF, &mut LORA_REG_WRITE, &mut LORA_REG_READ); radio_device.reset(); // Create the actual radio instance let radio_driver = static_init_half!( static_buffer.2, RadioDriver<'static>, RadioDriver::new(radio_device) ); lora_spi.set_client(radio_driver.device); radio_driver } }
use ::mantle; use ::memory; use mantle::kernel; use mantle::kernel::BootInfo; use ::crust; use ::core; use ::kobject::*; use core::fmt::Write; #[no_mangle] pub extern fn mantle_main(bootinfo: &BootInfo, executable_start: usize) { set_bootinfo(bootinfo, executable_start); ::main(bootinfo); } pub fn print_bootinfo(writer: &mut core::fmt::Write, bi: &BootInfo) -> core::fmt::Result { try!(writeln!(writer, "BootInfo:")); try!(writeln!(writer, " nodeID = {}", bi.node_id)); try!(writeln!(writer, " numNodes = {}", bi.num_nodes)); try!(writeln!(writer, " numIOPTLevels = {}", bi.num_iopt_levels as i64)); try!(writeln!(writer, " ipcBuffer = <object>")); try!(writeln!(writer, " empty = {}", bi.empty)); try!(writeln!(writer, " sharedFrames = {}", bi.shared_frames)); try!(writeln!(writer, " userImageFrames = {}", bi.user_image_frames)); try!(writeln!(writer, " userImagePaging = {}", bi.user_image_paging)); try!(writeln!(writer, " untyped = {}", bi.untyped)); try!(writeln!(writer, " untypedList = {{{}}}", bi.untyped.end - bi.untyped.start)); try!(writeln!(writer, " initThreadCNodeSizeBits = {}", bi.init_thread_cnode_size_bits)); writeln!(writer, " initThreadDomain = {}", bi.init_thread_domain) } fn set_bootinfo(bi: &BootInfo, executable_start: usize) { let image_len = ((bi.user_image_frames.end - bi.user_image_frames.start) as usize) * kernel::PAGE_4K_SIZE; print_bootinfo(mantle::debug(), bi).unwrap(); crust::capalloc::init_cslots(CapRange::range(bi.empty.start as usize, bi.empty.end as usize)); crust::vspace::init_vspace(executable_start, image_len); memory::device::init_untyped(CapRange::range(bi.untyped.start as usize, bi.untyped.end as usize), bi.untyped_list); match ::drivers::vga::VGAOutput::default() { Ok(mut screen) => { writeln!(screen, "Hello, world!").unwrap(); mantle::debug::set_mirror(screen); crust::start::print_bootinfo(mantle::debug(), bi).unwrap(); memory::init_allocator(); memory::untyped::init_untyped(CapRange::range(bi.untyped.start as usize, bi.untyped.end as usize), bi.untyped_list); memory::untyped::get_allocator().print_info(mantle::debug()).unwrap(); } Err(err) => panic!("could not set up default VGA output: {:?}", err) } }
use std::iter::Step; use std::ops::Neg; use std::f64::consts::PI as PI_64; use std::f32::consts::PI as PI_32; use cgmath::{ BaseNum, BaseFloat, num_traits::{FromPrimitive, ToPrimitive}, num_traits::int::PrimInt, prelude::*, }; impl BaseFloatExt for f32 { const PI: Self = PI_32; const TWO: Self = 2.0; const TWO_PI: Self = PI_32 * 2.0; } impl BaseFloatExt for f64 { const PI: Self = PI_64; const TWO: Self = 2.0; const TWO_PI: Self = PI_64 * 2.0; } pub trait BaseFloatExt: BaseFloat + FromPrimitive + ToPrimitive { const PI: Self; const TWO: Self; const TWO_PI: Self; #[inline(always)] fn normalize_angle(self, center: Self) -> Self { self - Self::TWO_PI * ((self + Self::PI - center) / Self::TWO_PI).floor() } #[inline(always)] fn accurate_normalize_angle(self) -> Self { let (sin, cos) = self.sin_cos(); sin.atan2(cos) } #[inline(always)] fn lerp(self, min: Self, max: Self) -> Self { (Self::one() - self) * min + self * max } #[inline(always)] fn clamp01(self) -> Self { self.clamp(Self::zero(), Self::one()) } #[inline(always)] fn clamp(mut self, min: Self, max: Self) -> Self { if self < min { self = min; } if self > max { self = max; } self } } pub trait BaseNumExt: BaseNum + Neg<Output=Self> + FromPrimitive + Bounded { #[inline(always)] fn abs(self) -> Self { if Self::zero() >= self { self } else { -self } } #[inline(always)] fn signum(self) -> Self { if Self::zero() == self { self } else if self > Self::one() { Self::one() } else { -Self::one() } } } impl<T> BaseNumExt for T where T: BaseNum + Neg<Output=Self> + FromPrimitive + Bounded {} pub trait BaseIntExt: BaseNumExt + Step + PrimInt {} impl<T> BaseIntExt for T where T: BaseNumExt + Step + PrimInt {}
use crate::types::{keyword_type::KeywordType, schema::Schema, schema_error::SchemaError, scope_builder::ScopeBuilder, validator_error_iterator::ValidationErrorIterator}; use json_trait_rs::JsonType; use std::{any::Any, fmt::Debug}; pub(in crate) trait Validator: Debug + Sync + Send { fn compile<T: 'static + JsonType>(scope_builder: &mut ScopeBuilder<T>, schema: &Schema) -> Result<Option<Self>, SchemaError> where Self: Sized; fn keyword_type(&self) -> KeywordType; fn validation_errors<T: 'static + JsonType>(&self, path: &str, value: &T) -> ValidationErrorIterator; fn is_valid<T: 'static + JsonType>(&self, path: &str, value: &T) -> bool { self.validation_errors(path, value).next().is_none() } fn as_any(&self) -> &dyn Any; }
use crate::features::Feature; use crate::loader; use crate::trust; use atty::{is, Stream}; use failure::{format_err, Error}; use regex::Regex; use std::collections::HashSet; use std::env; use std::fs::{self, OpenOptions}; use std::path::PathBuf; use std::time::{Duration, SystemTime}; // "shadowenv" in a gradient of lighter to darker grays. Looks good on dark backgrounds and ok on // light backgrounds. const SHADOWENV: &'static str = concat!( "\x1b[38;5;249ms\x1b[38;5;248mh\x1b[38;5;247ma\x1b[38;5;246md\x1b[38;5;245mo", "\x1b[38;5;244mw\x1b[38;5;243me\x1b[38;5;242mn\x1b[38;5;241mv\x1b[38;5;240m", ); const COOLDOWN_SECONDS: u64 = 5; fn cooldown() -> Duration { Duration::new(COOLDOWN_SECONDS, 0) } pub fn handle_hook_error(err: Error, shellpid: u32, silent: bool) -> i32 { if silent { return 1; } if let Ok(true) = check_and_trigger_cooldown(&err, shellpid) { return 1; }; let err = backticks_to_bright_green(err); eprintln!("{} \x1b[1;31mfailure: {}\x1b[0m", SHADOWENV, err); return 1; } pub fn print_activation_to_tty(activated: bool, features: HashSet<Feature>) { if !should_print_activation() { return; } if activated { if features.len() == 0 { eprint!("\x1b[1;34mactivated {}", SHADOWENV); } else { let feature_list = features .iter() .map(|s| format!("{}", s)) .collect::<Vec<String>>() .join(", "); eprint!( "\x1b[1;34mactivated {} \x1b[1;34m({})", SHADOWENV, feature_list ); } } else { eprint!("\x1b[1;34mdeactivated {}\x1b[1;34m", SHADOWENV); } eprintln!("\x1b[0m"); } fn backticks_to_bright_green(err: Error) -> String { let re = Regex::new(r"`(.*?)`").unwrap(); // this is almost certainly not the best way to do this, but this runs at most once per // execution so I only care so much. let before = format!("{}", err); re.replace_all(before.as_ref(), "\x1b[1;32m$1\x1b[1;31m") .to_string() } fn check_and_trigger_cooldown(err: &Error, shellpid: u32) -> Result<bool, Error> { // if no .shadowenv.d, then Err(_) just means no cooldown: always display error. let root = loader::find_root(&env::current_dir()?, loader::DEFAULT_RELATIVE_COMPONENT)? .ok_or_else(|| format_err!("no .shadowenv.d"))?; let _ = clean_up_stale_errors(&root, Duration::new(300, 0)); let errindex = cooldown_index(err).ok_or_else(|| format_err!("error not subject to cooldown"))?; let errfilepath = err_file(&root, errindex, shellpid)?; match check_cooldown_sentinel(&errfilepath, cooldown()) { Ok(true) => Ok(true), _ => { create_cooldown_sentinel(errfilepath)?; Ok(false) } } } fn cooldown_index(err: &Error) -> Option<u32> { match err.downcast_ref::<trust::NotTrusted>() { Some(_) => Some(0), None => None, } } fn clean_up_stale_errors(root: &PathBuf, timeout: Duration) -> Result<(), Error> { let now = SystemTime::now(); if root.is_dir() { for entry in fs::read_dir(root)? { let entry = entry?; if !entry.file_name().to_string_lossy().starts_with(".error-") { continue; } if let Ok(mtime) = entry.metadata().and_then(|md| md.modified()) { if let Ok(duration) = now.duration_since(mtime) { if duration > timeout { let _ = fs::remove_file(entry.path()); } } } } } Ok(()) } fn err_file(root: &PathBuf, errindex: u32, shellpid: u32) -> Result<PathBuf, Error> { Ok(root.join(format!(".error-{}-{}", errindex, shellpid))) } // return value of Ok(true) indicates it's on cooldown and should be suppressed. fn check_cooldown_sentinel(path: &PathBuf, timeout: Duration) -> Result<bool, Error> { let metadata = path.metadata()?; let mtime = metadata.modified()?; let now = SystemTime::now(); let elapsed = now.duration_since(mtime)?; Ok(elapsed < timeout) } fn create_cooldown_sentinel(path: PathBuf) -> Result<(), Error> { let _ = OpenOptions::new() .truncate(true) .write(true) .create(true) .open(path)?; Ok(()) } fn should_print_activation() -> bool { let configured_to_print: bool; match env::var("SHADOWENV_SILENT") { Ok(value) => match value.to_lowercase().as_str() { "0" | "false" | "no" | "" => configured_to_print = true, _ => configured_to_print = false, }, Err(_) => configured_to_print = true, }; return is(Stream::Stderr) && configured_to_print; }
#![cfg_attr(not(feature = "std"), no_std)] #[cfg(test)] mod mock; #[cfg(test)] mod tests; use frame_support::Parameter; use primitives::{ MillisecsPerBlock as MillisecsPerBlockPrimitive, SessionPeriod as SessionPeriodPrimitive, UnitCreationDelay as UnitCreationDelayPrimitive, }; use sp_std::prelude::*; use frame_support::{sp_runtime::BoundToRuntimeAppPublic, traits::OneSessionHandler}; pub use pallet::*; #[frame_support::pallet] pub mod pallet { use super::*; use frame_support::{ pallet_prelude::*, sp_runtime::{traits::OpaqueKeys, RuntimeAppPublic}, sp_std, }; use frame_system::pallet_prelude::*; use pallet_session::{Pallet as Session, SessionManager}; use primitives::{ ApiError as AlephApiError, DEFAULT_MILLISECS_PER_BLOCK, DEFAULT_SESSION_PERIOD, DEFAULT_UNIT_CREATION_DELAY, }; #[pallet::type_value] pub fn DefaultValidators<T: Config>() -> Option<Vec<T::AccountId>> { None } #[pallet::storage] #[pallet::getter(fn validators)] pub type Validators<T: Config> = StorageValue<_, Option<Vec<T::AccountId>>, ValueQuery, DefaultValidators<T>>; #[pallet::type_value] pub fn DefaultSessionForValidatorsChange<T: Config>() -> Option<u32> { None } #[pallet::storage] #[pallet::getter(fn session_for_validators_change)] pub type SessionForValidatorsChange<T: Config> = StorageValue<_, Option<u32>, ValueQuery, DefaultSessionForValidatorsChange<T>>; #[pallet::config] pub trait Config: frame_system::Config + pallet_session::Config { type AuthorityId: Member + Parameter + RuntimeAppPublic + Default + MaybeSerializeDeserialize; type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>; } #[pallet::event] #[pallet::metadata(T::AccountId = "AccountId")] #[pallet::generate_deposit(pub(super) fn deposit_event)] pub enum Event<T: Config> { ChangeValidators(Vec<T::AccountId>, u32), } pub struct AlephSessionManager<T>(sp_std::marker::PhantomData<T>); #[pallet::pallet] pub struct Pallet<T>(sp_std::marker::PhantomData<T>); #[pallet::hooks] impl<T: Config> Hooks<BlockNumberFor<T>> for Pallet<T> {} #[pallet::call] impl<T: Config> Pallet<T> { #[pallet::weight((T::BlockWeights::get().max_block, DispatchClass::Operational))] pub fn change_validators( origin: OriginFor<T>, validators: Vec<T::AccountId>, session_for_validators_change: u32, ) -> DispatchResult { ensure_root(origin)?; Validators::<T>::put(Some(validators.clone())); SessionForValidatorsChange::<T>::put(Some(session_for_validators_change)); Self::deposit_event(Event::ChangeValidators( validators, session_for_validators_change, )); Ok(()) } } #[pallet::storage] #[pallet::getter(fn authorities)] pub(super) type Authorities<T: Config> = StorageValue<_, Vec<T::AuthorityId>, ValueQuery>; const DEFAULT_SESSION_PERIOD_PRIMITIVE: SessionPeriodPrimitive = SessionPeriodPrimitive(DEFAULT_SESSION_PERIOD); const DEFAULT_MILLISECS_PER_BLOCK_PRIMITIVE: MillisecsPerBlockPrimitive = MillisecsPerBlockPrimitive(DEFAULT_MILLISECS_PER_BLOCK); const DEFAULT_UNIT_CREATION_DELAY_PRIMITIVE: UnitCreationDelayPrimitive = UnitCreationDelayPrimitive(DEFAULT_UNIT_CREATION_DELAY); #[pallet::type_value] pub(super) fn DefaultForSessionPeriod() -> SessionPeriodPrimitive { DEFAULT_SESSION_PERIOD_PRIMITIVE } #[pallet::storage] #[pallet::getter(fn session_period)] pub(super) type SessionPeriod<T: Config> = StorageValue<_, SessionPeriodPrimitive, ValueQuery, DefaultForSessionPeriod>; #[pallet::type_value] pub(super) fn DefaultForMillisecsPerBlock() -> MillisecsPerBlockPrimitive { DEFAULT_MILLISECS_PER_BLOCK_PRIMITIVE } #[pallet::storage] #[pallet::getter(fn millisecs_per_block)] pub(super) type MillisecsPerBlock<T: Config> = StorageValue<_, MillisecsPerBlockPrimitive, ValueQuery, DefaultForMillisecsPerBlock>; #[pallet::type_value] pub(super) fn DefaultForUnitCreationDelay() -> UnitCreationDelayPrimitive { DEFAULT_UNIT_CREATION_DELAY_PRIMITIVE } #[pallet::storage] #[pallet::getter(fn unit_creation_delay)] pub(super) type UnitCreationDelay<T: Config> = StorageValue<_, UnitCreationDelayPrimitive, ValueQuery, DefaultForUnitCreationDelay>; #[pallet::genesis_config] pub struct GenesisConfig<T: Config> { pub authorities: Vec<T::AuthorityId>, pub session_period: SessionPeriodPrimitive, pub millisecs_per_block: MillisecsPerBlockPrimitive, pub unit_creation_delay: UnitCreationDelayPrimitive, pub validators: Vec<T::AccountId>, } #[cfg(feature = "std")] impl<T: Config> Default for GenesisConfig<T> { fn default() -> Self { Self { authorities: Vec::new(), session_period: DEFAULT_SESSION_PERIOD_PRIMITIVE, millisecs_per_block: DEFAULT_MILLISECS_PER_BLOCK_PRIMITIVE, unit_creation_delay: DEFAULT_UNIT_CREATION_DELAY_PRIMITIVE, validators: Vec::new(), } } } #[pallet::genesis_build] impl<T: Config> GenesisBuild<T> for GenesisConfig<T> { fn build(&self) { <SessionPeriod<T>>::put(&self.session_period); <MillisecsPerBlock<T>>::put(&self.millisecs_per_block); <UnitCreationDelay<T>>::put(&self.unit_creation_delay); <Validators<T>>::put(Some(&self.validators)); <SessionForValidatorsChange<T>>::put(Some(0)); } } impl<T: Config> Pallet<T> { pub(crate) fn initialize_authorities(authorities: &[T::AuthorityId]) { if !authorities.is_empty() { assert!( <Authorities<T>>::get().is_empty(), "Authorities are already initialized!" ); <Authorities<T>>::put(authorities); } } pub(crate) fn update_authorities(authorities: &[T::AuthorityId]) { <Authorities<T>>::put(authorities); } pub fn next_session_authorities() -> Result<Vec<T::AuthorityId>, AlephApiError> { Session::<T>::queued_keys() .iter() .map(|(_, key)| key.get(T::AuthorityId::ID).ok_or(AlephApiError::DecodeKey)) .collect::<Result<Vec<T::AuthorityId>, AlephApiError>>() } } impl<T: Config> SessionManager<T::AccountId> for AlephSessionManager<T> { fn new_session(session: u32) -> Option<Vec<T::AccountId>> { if let Some(session_for_validators_change) = Pallet::<T>::session_for_validators_change() { if session_for_validators_change <= session { let validators = Pallet::<T>::validators().expect( "Validators also should be Some(), when session_for_validators_change is", ); Validators::<T>::put(None::<Vec<T::AccountId>>); SessionForValidatorsChange::<T>::put(None::<u32>); return Some(validators); } } None } fn start_session(_: u32) {} fn end_session(_: u32) {} } impl<T: Config> BoundToRuntimeAppPublic for Pallet<T> { type Public = T::AuthorityId; } impl<T: Config> OneSessionHandler<T::AccountId> for Pallet<T> { type Key = T::AuthorityId; fn on_genesis_session<'a, I: 'a>(validators: I) where I: Iterator<Item = (&'a T::AccountId, T::AuthorityId)>, T::AccountId: 'a, { let authorities = validators.map(|(_, key)| key).collect::<Vec<_>>(); Self::initialize_authorities(authorities.as_slice()); } fn on_new_session<'a, I: 'a>(_changed: bool, validators: I, _queued_validators: I) where I: Iterator<Item = (&'a T::AccountId, T::AuthorityId)>, T::AccountId: 'a, { let authorities = validators.map(|(_, key)| key).collect::<Vec<_>>(); Self::update_authorities(authorities.as_slice()); } fn on_disabled(_validator_index: usize) {} } }
pub struct ThreadSafe<'a, T> { pub(crate) t: &'a T, } unsafe impl<'a, T> Sync for ThreadSafe<'a, T> {}
/*! # Advent of Code 2020 - Day 04 [Link to task.](https://adventofcode.com/2020/day/4) Detect which passports are valid eq. have all required fields with some limitations. Passport data is validated in batch files (your puzzle input). Each passport is represented as a sequence of key:value pairs separated by spaces or newlines. Passports are separated by blank lines. Only "cid" is allowed to be missing from otherwise valid passport. All other fields are required. Fields have to validated by these rules: byr (Birth Year) - four digits; at least 1920 and at most 2002. iyr (Issue Year) - four digits; at least 2010 and at most 2020. eyr (Expiration Year) - four digits; at least 2020 and at most 2030. hgt (Height) - a number followed by either cm or in: If cm, the number must be at least 150 and at most 193. If in, the number must be at least 59 and at most 76. hcl (Hair Color) - a # followed by exactly six characters 0-9 or a-f. ecl (Eye Color) - exactly one of: amb blu brn gry grn hzl oth. pid (Passport ID) - a nine-digit number, including leading zeroes. cid (Country ID) - ignored, missing or not. ## Usage example ```text ignore PS> cargo run --bin day_04 Finished dev [unoptimized + debuginfo] target(s) in 0.16s Running `target\debug\day_04.exe` Advent of Code 2020 - Day 04 Info: Using hard-coded test data. ".aoc-session" not found. Answer: 2 valid passports. ``` !*/ use anyhow::{bail, Result}; use regex; use reqwest; use std::collections::HashMap; use std::fs::read_to_string; use std::path::Path; static AOC_URL: &'static str = "https://adventofcode.com/2020/day/4/input"; static AOC_SESSION_FILE: &'static str = ".aoc-session"; /// This function downloads input data from Advent of Code /// if .aoc-session file is available and download succeeds. fn get_input_aoc() -> Result<String> { let f = Path::new(&AOC_SESSION_FILE); if !f.is_file() { bail!("{:?} not found.", &AOC_SESSION_FILE); } // Load session key let session_key = read_to_string(f)?; // Load input data let client = reqwest::blocking::Client::new(); let response = client .get(AOC_URL) .header("Cookie", format!("session={}", session_key)) .send() .expect("Sending request failed."); if response.status().is_success() { let resp = response.text()?; return Ok(resp); } else { bail!( "Failed to load {:?}. Response: {:?}", &AOC_URL, response.status() ) } } /// If input data download was not available, this function /// returns hardcoded test data which is allowed to be shared. fn get_input_test() -> String { String::from( "ecl:gry pid:860033327 eyr:2020 hcl:#fffffd byr:1937 iyr:2017 cid:147 hgt:183cm iyr:2013 ecl:amb cid:350 eyr:2023 pid:028048884 hcl:#cfa07d byr:1929 hcl:#ae17e1 iyr:2013 eyr:2024 ecl:brn pid:760753108 byr:1931 hgt:179cm hcl:#cfa07d eyr:2025 pid:166559648 iyr:2011 ecl:brn hgt:59in", ) .to_owned() } /// Get input data either from AOC website or fall-back to local /// hard-coded test data. pub fn get_input() -> String { let input: String = match get_input_aoc() { Ok(data) => { println!("Info: Downloaded test data from: {}", AOC_URL); data } Err(e) => { println!("Info: Using hard-coded test data. {}", e); get_input_test() } }; return input; } struct F32Unit { value: f32, unit: Option<String>, } impl std::str::FromStr for F32Unit { type Err = std::num::ParseFloatError; fn from_str(input: &str) -> Result<Self, Self::Err> { let split: (&str, &str) = match input.find(|s: char| s.is_alphabetic()) { Some(splitpoint) => input.split_at(splitpoint), None => (input, ""), }; let value: f32 = split.0.parse().unwrap(); let unit = split.1; if split.1 == "" { return Ok(F32Unit { value, unit: None }); } else { return Ok(F32Unit { value, unit: Some(unit.into()), }); }; } } #[allow(dead_code)] pub struct Passport { birth_year: usize, issue_year: usize, expiration_year: usize, height: F32Unit, hair_color: String, eye_color: String, passport_id: String, country_id: Option<String>, } impl Passport { // Parse the input string into Passport instance. // Input key:value pairs are parsed to a hashmap // where the data is used to construct Passport. // The data is validated on construction. pub fn from_string(input: &str) -> Result<Passport> { let fields = Passport::str_to_hashmap(input); return Ok(Passport { birth_year: Passport::validate_number(fields.get_key_value("byr"), 1920, 2002)?, issue_year: Passport::validate_number(fields.get_key_value("iyr"), 2010, 2020)?, expiration_year: Passport::validate_number(fields.get_key_value("eyr"), 2020, 2030)?, height: Passport::validate_height( fields.get_key_value("hgt"), (150.0, 193.0), (59.0, 76.0), )?, hair_color: Passport::validate_haircolor(fields.get_key_value("hcl"))?, eye_color: Passport::validate_eyecolor(fields.get_key_value("ecl"))?, passport_id: Passport::validate_id(fields.get_key_value("pid"))?, country_id: match fields.get("cid") { Some(data) => Some(data.parse().unwrap()), None => None, }, }); } /// Get hashmap from str input data. fn str_to_hashmap(input: &str) -> HashMap<&str, &str> { let mut fields: HashMap<&str, &str> = HashMap::new(); for item in input.split_whitespace() { let kv: Vec<&str> = item.split(':').collect(); let _ = fields.insert(kv[0], kv[1]); } return fields; } /// Extract the data from hashmap fn get_kv<'a>(data: Option<(&&'a str, &&'a str)>) -> Result<(&'a str, &'a str)> { // Get value, check it's safe. let (k, v) = match data { Some(data) => (*data.0, *data.1), None => bail!("Missing field."), }; return Ok((k, v)); } // Validate data to between low and high. If not valid, return Err early. fn validate_number(data: Option<(&&str, &&str)>, low: usize, high: usize) -> Result<usize> { let (k, v) = Passport::get_kv(data)?; // Parse the value to correct type let v = match v.parse::<usize>() { Ok(v) => v, Err(_) => bail!("Malformed field {}.", k), }; // Validate the value if v < low { bail!("Invalid: {} < {}", k, low); }; if v > high { bail!("Invalid: {} < {}", k, high); }; return Ok(v); } // Validate data to between cm_low and cm_high if unit is cm. // Validate data to between in_low and in_high if unit is in. // If not valid, return Err early. fn validate_height( data: Option<(&&str, &&str)>, (cm_low, cm_high): (f32, f32), (in_low, in_high): (f32, f32), ) -> Result<F32Unit> { let (k, v) = Passport::get_kv(data)?; // Parse the value to correct type let v = match v.parse::<F32Unit>() { Ok(v) => v, Err(_) => bail!("Malformed field {}.", k), }; match &v.unit { Some(unit) => { if unit == "cm" { if v.value < cm_low { bail!("Invalid: {} < {} cm", k, cm_low); }; if v.value > cm_high { bail!("Invalid: {} < {} cm", k, cm_high); }; } else if unit == "in" { if v.value < in_low { bail!("Invalid: {} < {} inch", k, in_low); }; if v.value > in_high { bail!("Invalid: {} < {} inch", k, in_high); }; } else { bail!("Invalid: {} - unknown unit", k); } } None => bail!("Invalid: {} - no unit", k), } return Ok(v); } // Validate data to # followed by exactly six characters 0-9 or a-f. // If not valid, return Err early. fn validate_haircolor(data: Option<(&&str, &&str)>) -> Result<String> { let (k, v) = Passport::get_kv(data)?; let re = regex::Regex::new(r"^#(\d|[a-f]){6}$").unwrap(); if re.is_match(v) { return Ok(v.to_owned()); } else { bail!("Invalid: {}", k); } } // Validate data to exactly one of: amb blu brn gry grn hzl oth. // If not valid, return Err early. fn validate_eyecolor(data: Option<(&&str, &&str)>) -> Result<String> { let (k, v) = Passport::get_kv(data)?; if ["amb", "blu", "brn", "gry", "grn", "hzl", "oth"].contains(&v) { return Ok(v.to_owned()); } else { bail!("Invalid: {}", k); } } // Validate data to exactly one of: amb blu brn gry grn hzl oth. // If not valid, return Err early. fn validate_id(data: Option<(&&str, &&str)>) -> Result<String> { let (k, v) = Passport::get_kv(data)?; let re = regex::Regex::new(r"^(\d){9}$").unwrap(); if re.is_match(v) { return Ok(v.to_owned()); } else { bail!("Invalid: {}", k); } } } pub fn parse_string_to_passports(input: &str) -> Vec<Passport> { let mut output: Vec<Passport> = Vec::new(); // Parse input data and pass blocks of str to Passport constructor // if Passport returns valid passport, add it to the vec. let re = regex::RegexBuilder::new(r"^\s*$") .multi_line(true) .build() .unwrap(); for block in re.split(input) { match Passport::from_string(block) { Ok(passport) => { output.push(passport); } Err(_) => {} } } return output; } fn main() { println!("Advent of Code 2020 - Day 04"); let input_data = get_input(); let passports = parse_string_to_passports(&input_data); println!("Answer: {} valid passports.", passports.len()); } #[cfg(test)] mod day_04 { use super::*; #[test] fn run() { main(); } }
// iterative vs functional // Calculate Harmonic series: // 1 + (1/2)^2 + (1/3)^3 + (1/4)^4 + ... fn harmonic(n: i32) -> f64 { let mut sum: f64 = 0.0; for i in 1..n { sum += 1.0 / f64::from(i); } return sum; } fn harmonic_functional(n: i32) -> f64 { (1..n).map(f64::from).map(|i| 1.0 / i).sum() } // Error types, traits, polymorphism use std::error::Error; use std::fmt; #[derive(Debug)] struct MyError { details: String } impl MyError { fn new(msg: &str) -> MyError { MyError{details: msg.to_string()} } } impl fmt::Display for MyError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f,"{}",self.details) } } impl Error for MyError { fn description(&self) -> &str { &self.details } } static ERR: bool = true; fn polymorphism() -> Result<(), MyError> { let vec1: Vec<i32> = vec![1, 2, 3, 4, 5]; let vec2: Vec<f64> = vec![1.0, 2.0, 3.0, 4.0, 5.0]; println!("Sum of vec1: {}", vec1.iter().sum::<i32>()); println!("Sum of vec2: {}", vec2.iter().sum::<f64>()); if ERR { return Err(MyError::new("Fuck...")); } Ok(()) } extern crate simple_error; use simple_error::SimpleError; static CRATE_ERR: bool = false; fn polymorphism_with_crate() -> Result<(), SimpleError> { let vec1: Vec<i32> = vec![1, 2, 3, 4, 5]; let vec2: Vec<f64> = vec![1.0, 2.0, 3.0, 4.0, 5.0]; println!("Sum of vec1: {}", vec1.iter().sum::<i32>()); println!("Sum of vec2: {}", vec2.iter().sum::<f64>()); if CRATE_ERR { return Err(SimpleError::new("Fuck... At least this time the errors were cleaner!")); } let vec3: Vec<Result<(), MyError>> = vec![Ok(()), Err(MyError::new("Some error"))]; let use_return_error = |var| { match var { Ok(_) => Ok(()), Err(s) => Err(SimpleError::from(s)), } }; for each in vec3 { match use_return_error(each) { Ok(_) => println!("Went fine"), Err(e) => eprintln!("Got SimpleError {}", e), } } Ok(()) } extern crate recap; extern crate serde; use recap::Recap; use serde::Deserialize; #[derive(Debug, Deserialize, PartialEq, Recap)] #[recap(regex = r#"(?x) (?P<foo>\d+) \s+ (?P<bar>true|false) \s+ (?P<baz>\S+) "#)] struct LogEntry { foo: usize, bar: bool, baz: String, } fn macros() { let entry: LogEntry = "1 true hello".parse().unwrap(); println!("Log entry is {}, {}, {}", entry.foo, entry.bar, entry.baz); } fn main() { println!("Harmonic with n = 10 is {}", harmonic(10)); println!( "Harmonic (calculated functionally) with 10 is {}", harmonic_functional(10) ); polymorphism().unwrap_or_else(|e| eprintln!("Yikes... Error message: {}", e)); polymorphism_with_crate().unwrap_or_else(|e| eprintln!("Yikes... Error message: {}", e)); macros(); }
use super::{LoadAll, LoadArgs, ProviderContainer}; use crate::{BoxFuture, Entity, Result}; use std::ops::Deref; pub struct TransactionProvider<'a>(pub(super) &'a ProviderContainer); impl<'a> TransactionProvider<'a> { pub fn commit(&self) -> BoxFuture<'_, Result<()>> { Box::pin(async move { let mut error = None; for provider in self.0.providers() { if error.is_none() { if let Err(e) = provider.commit().await { error = Some(e); } } else { provider.cancel(); } } match error { Some(err) => Err(err), None => Ok(()), } }) } pub fn container(&self) -> &'a ProviderContainer { self.0 } } impl<'a> Deref for TransactionProvider<'a> { type Target = ProviderContainer; fn deref(&self) -> &Self::Target { self.0 } } impl<'a> Drop for TransactionProvider<'a> { fn drop(&mut self) { for provider in self.0.providers() { provider.cancel(); } } } impl<C, E, FILTER> LoadAll<E, FILTER, C> for TransactionProvider<'_> where C: Default + Extend<(E::Key, E)> + Send + 'static, E: Entity + 'static, FILTER: Send + Sync, ProviderContainer: LoadAll<E, FILTER, C>, { fn load_all_with_args<'a>( &'a self, filter: &'a FILTER, args: LoadArgs, ) -> BoxFuture<'a, Result<C>> { self.0.load_all_with_args(filter, args) } }
#![no_std] #![no_main] #![feature(slice_fill)] use panic_halt as _; use embedded_graphics::fonts::{Font6x8, Text}; use embedded_graphics::pixelcolor::Rgb565; use embedded_graphics::prelude::*; use embedded_graphics::text_style; use gd32vf103xx_hal::delay::McycleDelay; use gd32vf103xx_hal::pac; use gd32vf103xx_hal::prelude::*; use longan_nano::{lcd, lcd_pins}; use riscv_rt::entry; use rand::prelude::*; #[entry] fn main() -> ! { let dp = pac::Peripherals::take().unwrap(); // Configure clocks let mut rcu = dp .RCU .configure() .ext_hf_clock(8.mhz()) .sysclk(108.mhz()) .freeze(); let mut afio = dp.AFIO.constrain(&mut rcu); let gpioa = dp.GPIOA.split(&mut rcu); let gpiob = dp.GPIOB.split(&mut rcu); let lcd_pins = lcd_pins!(gpioa, gpiob); let mut lcd = lcd::configure(dp.SPI0, lcd_pins, &mut afio, &mut rcu); let (width, height) = (lcd.size().width as i32, lcd.size().height as i32); let style = text_style!( font = Font6x8, text_color = Rgb565::GREEN, background_color = Rgb565::BLACK ); let text = " Hi! "; let mut delay = McycleDelay::new(&rcu.clocks); let mut i = width / 2; delay.delay_ms(10000); // Just in case it doesn't already have a completely blank screen lcd.clear(Rgb565::BLACK) .expect("Failed to clear the screen"); let mut rng = rand::rngs::SmallRng::from_seed([ 0x0, 0xD, 0xD, 0xB, 0x1, 0xA, 0x5, 0xE, 0x5, 0xB, 0xA, 0xD, 0x5, 0xE, 0xE, 0xD, ]); let mut y = rng.gen_range(0..height); loop { (0..text.len()).for_each(|idx| { Text::new( &text[idx..=idx], Point::new((i + (idx * 6) as i32) % width, y), ) .into_styled(style) .draw(&mut lcd) .unwrap() }); if i == 0 { lcd.clear(Rgb565::BLACK) .expect("Failed to clear the screen"); y = rng.gen_range(0..height); i = width; } else { i -= 1; } delay.delay_ms(16); } }
use maat_graphics::math; use maat_graphics::DrawCall; use crate::modules::scenes::Scene; use crate::modules::scenes::SceneData; use crate::modules::scenes::{LoadScreen}; use crate::cgmath::{Vector2, Vector3, Vector4}; use crate::modules::collisions; use crate::rand::Rng; use rand::prelude::ThreadRng; use rand::thread_rng; use crate::modules::Boid; pub struct PlayScreen { data: SceneData, rng: ThreadRng, boids: Vec<Boid>, } impl PlayScreen { pub fn new(window_size: Vector2<f32>, model_sizes: Vec<(String, Vector3<f32>)>, terrain_data: Vec<(String, Vec<Vec<f32>>)>) -> PlayScreen { let mut rng = thread_rng(); let mut boids = Vec::new(); let amount = 650;//(rng.gen::<f32>() * 100.0 + 20.0).floor() as usize; for _ in 0..amount { boids.push(Boid::new(&mut rng, window_size)); } PlayScreen { data: SceneData::new(window_size, model_sizes, terrain_data), rng, boids, } } } impl Scene for PlayScreen { fn data(&self) -> &SceneData { &self.data } fn mut_data(&mut self) -> &mut SceneData { &mut self.data } fn future_scene(&mut self, window_size: Vector2<f32>) -> Box<dyn Scene> { let dim = self.data().window_dim; Box::new(PlayScreen::new(dim, self.data.model_sizes.clone(), self.data.terrain_data.clone())) } fn update(&mut self, delta_time: f32) { let dim = self.data().window_dim; let (width, height) = (dim.x as f32, dim.y as f32); collisions::boid_collision(&mut self.boids, delta_time); for boid in &mut self.boids { boid.update(dim, delta_time); } } fn draw(&self, draw_calls: &mut Vec<DrawCall>) { let dim = self.data().window_dim; let (width, height) = (dim.x as f32, dim.y as f32); draw_calls.push( DrawCall::draw_coloured(Vector2::new(width*0.5, height*0.5), Vector2::new(width*5.0, height*5.0), Vector4::new(0.2, 0.2, 0.2, 1.0), 0.0) ); for boid in &self.boids { boid.draw(draw_calls); } draw_calls.push(DrawCall::draw_instanced("boid".to_string())); } }
use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use cosmwasm_std::{CanonicalAddr, Decimal, ReadonlyStorage, StdResult, Storage, Uint128}; use cosmwasm_storage::{ bucket, bucket_read, singleton, singleton_read, Bucket, ReadonlyBucket, Singleton, }; static KEY_CONFIG: &[u8] = b"config"; #[derive(Default, Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct Config { pub owner: CanonicalAddr, pub spectrum_token: CanonicalAddr, pub spectrum_gov: CanonicalAddr, pub lock_start: u64, pub lock_end: u64, } impl Config { pub fn calc_locked_reward(&self, total_amount: Uint128, height: u64) -> Uint128 { if self.lock_end <= height { Uint128::zero() } else if self.lock_start >= height { total_amount } else { total_amount.multiply_ratio(self.lock_end - height, self.lock_end - self.lock_start) } } } pub fn config_store<S: Storage>(storage: &mut S) -> Singleton<S, Config> { singleton(storage, KEY_CONFIG) } pub fn read_config<S: Storage>(storage: &S) -> StdResult<Config> { singleton_read(storage, KEY_CONFIG).load() } static KEY_STATE: &[u8] = b"state"; #[derive(Serialize, Deserialize, Clone, PartialEq, JsonSchema)] pub struct State { pub contract_addr: CanonicalAddr, pub previous_spec_share: Uint128, pub spec_share_index: Decimal, // per weight pub total_weight: u32, } pub fn state_store<S: Storage>(storage: &mut S) -> Singleton<S, State> { singleton(storage, KEY_STATE) } pub fn read_state<S: Storage>(storage: &S) -> StdResult<State> { singleton_read(storage, KEY_STATE).load() } static PREFIX_POOL_INFO: &[u8] = b"pool_info"; #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct PoolInfo { pub staking_token: CanonicalAddr, pub total_bond_amount: Uint128, pub weight: u32, pub state_spec_share_index: Decimal, pub spec_share_index: Decimal, // per bond amount } pub fn pool_info_store<S: Storage>(storage: &mut S) -> Bucket<S, PoolInfo> { bucket(PREFIX_POOL_INFO, storage) } pub fn pool_info_read<S: Storage>(storage: &S) -> ReadonlyBucket<S, PoolInfo> { bucket_read(PREFIX_POOL_INFO, storage) } static PREFIX_REWARD: &[u8] = b"reward"; #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct RewardInfo { pub spec_share_index: Decimal, pub bond_amount: Uint128, pub spec_share: Uint128, pub accum_spec_share: Uint128, } pub fn rewards_store<'a, S: Storage>( storage: &'a mut S, owner: &CanonicalAddr, ) -> Bucket<'a, S, RewardInfo> { Bucket::multilevel(&[PREFIX_REWARD, owner.as_slice()], storage) } pub fn rewards_read<'a, S: ReadonlyStorage>( storage: &'a S, owner: &CanonicalAddr, ) -> ReadonlyBucket<'a, S, RewardInfo> { ReadonlyBucket::multilevel(&[PREFIX_REWARD, owner.as_slice()], storage) }
use super::*; use counter::Counter; use indicatif::ParallelProgressIterator; use indicatif::ProgressIterator; use rand::rngs::SmallRng; use rand::seq::SliceRandom; use rand::SeedableRng; use rayon::iter::IndexedParallelIterator; use rayon::iter::IntoParallelIterator; use rayon::iter::ParallelIterator; use roaring::{RoaringBitmap, RoaringTreemap}; use std::collections::HashSet; use std::iter::FromIterator; use vec_rand::xorshift::xorshift as rand_u64; use vec_rand::{gen_random_vec, sample_uniform}; /// # Holdouts. impl Graph { /// Returns Graph with given amount of negative edges as positive edges. /// /// The graph generated may be used as a testing negatives partition to be /// fed into the argument "graph_to_avoid" of the link_prediction or the /// skipgrams algorithm. /// /// /// # Arguments /// /// * `random_state`: EdgeT - random_state to use to reproduce negative edge set. /// * `negatives_number`: EdgeT - Number of negatives edges to include. /// * `seed_graph`: Option<Graph> - Optional graph to use to filter the negative edges. The negative edges generated when this variable is provided will always have a node within this graph. /// * `only_from_same_component`: bool - Wether to sample negative edges only from nodes that are from the same component. /// * `verbose`: bool - Wether to show the loading bar. /// pub fn sample_negatives( &self, mut random_state: EdgeT, negatives_number: EdgeT, seed_graph: Option<&Graph>, only_from_same_component: bool, verbose: bool, ) -> Result<Graph, String> { if negatives_number == 0 { return Err(String::from("The number of negatives cannot be zero.")); } let seed_nodes: Option<RoaringBitmap> = if let Some(sg) = &seed_graph { if !self.overlaps(&sg)? { return Err(String::from( "The given seed graph does not overlap with the current graph instance.", )); } Some( sg.get_nodes_names_iter() .map(|(_, node_name, _)| self.get_unchecked_node_id(&node_name)) .collect::<RoaringBitmap>(), ) } else { None }; // In a complete directed graph allowing selfloops with N nodes there are N^2 // edges. In a complete directed graph without selfloops there are N*(N-1) edges. // We can rewrite the first formula as (N*(N-1)) + N. // // In a complete undirected graph allowing selfloops with N nodes there are // (N*(N-1))/2 + N edges. // Here we use unique edges number because on a multigraph the negative // edges cannot have an edge type. let nodes_number = self.get_nodes_number() as EdgeT; // Wether to sample negative edges only from the same connected component. let (node_components, mut complete_edges_number) = if only_from_same_component { let node_components = self.get_node_components_vector(verbose); let complete_edges_number: EdgeT = Counter::init(node_components.clone()) .into_iter() .map(|(_, nodes_number): (_, &usize)| { (*nodes_number * (*nodes_number - 1)) as EdgeT }) .sum(); (Some(node_components), complete_edges_number) } else { (None, nodes_number * (nodes_number - 1)) }; // Here we compute the number of edges that a complete graph would have if it had the same number of nodes // of the current graph. Moreover, the complete graph will have selfloops IFF the current graph has at // least one of them. if self.has_selfloops() { complete_edges_number += nodes_number; } // Now we compute the maximum number of negative edges that we can actually generate let max_negative_edges = complete_edges_number - self.unique_edges_number; // We check that the number of requested negative edges is compatible with the // current graph instance. if negatives_number > max_negative_edges { return Err(format!( concat!( "The requested negatives number {} is more than the ", "number of negative edges that exist in the graph ({})." ), negatives_number, max_negative_edges )); } let pb1 = get_loading_bar( verbose, "Computing negative edges", negatives_number as usize, ); let pb2 = get_loading_bar( verbose, "Building negative graph", negatives_number as usize, ); // xorshift breaks if the random_state is zero // so we initialize xor it with a constat // to mitigate this problem random_state ^= SEED_XOR as EdgeT; let mut negative_edges_bitmap = RoaringTreemap::new(); let mut last_length = 0; let mut sampling_round: usize = 0; // randomly extract negative edges until we have the choosen number while negative_edges_bitmap.len() < negatives_number { // generate two random_states for reproducibility porpouses let src_random_state = rand_u64(random_state); let dst_random_state = rand_u64(src_random_state); random_state = rand_u64(dst_random_state); let tmp_tb = get_loading_bar( verbose, format!("Negatives sampling round {}", sampling_round).as_ref(), negatives_number as usize, ); sampling_round += 1; // generate the random edge-sources let sampled_edge_ids = gen_random_vec(negatives_number as usize, src_random_state) .into_par_iter() .zip(gen_random_vec(negatives_number as usize, dst_random_state).into_par_iter()) // convert them to plain (src, dst) .progress_with(tmp_tb) .filter_map(|(src_seed, dst_seed)| { let src = sample_uniform(nodes_number as u64, src_seed as u64) as NodeT; let dst = sample_uniform(nodes_number as u64, dst_seed as u64) as NodeT; if let Some(sn) = &seed_nodes { if !sn.contains(src) && !sn.contains(dst) { return None; } } if let Some(ncs) = &node_components { if ncs[src as usize] != ncs[dst as usize] { return None; } } // If the edge is not a self-loop or the user allows self-loops and // the graph is directed or the edges are inserted in a way to avoid // inserting bidirectional edges. match (self.has_selfloops() || src != dst) && !self.has_edge(src, dst, None) { true => Some((src, dst)), false => None, } }) .flat_map(|(src, dst)| { if !self.is_directed() && src != dst { vec![self.encode_edge(src, dst), self.encode_edge(dst, src)] } else { vec![self.encode_edge(src, dst)] } }) .collect::<Vec<EdgeT>>(); for edge_id in sampled_edge_ids.iter() { if negative_edges_bitmap.len() >= negatives_number { break; } negative_edges_bitmap.insert(*edge_id); } pb1.inc(negative_edges_bitmap.len() - last_length); last_length = negative_edges_bitmap.len(); } pb1.finish(); Graph::build_graph( negative_edges_bitmap.iter().progress_with(pb2).map(|edge| { let (src, dst) = self.decode_edge(edge); Ok((src, dst, None, None)) }), negative_edges_bitmap.len(), self.nodes.clone(), self.node_types.clone(), None, self.directed, format!("{} negatives", self.name.clone()), false, self.has_edge_types(), self.has_weights(), ) } /// Compute the training and validation edges number from the training rate fn get_holdouts_edges_number( &self, train_size: f64, include_all_edge_types: bool, ) -> Result<(EdgeT, EdgeT), String> { if train_size <= 0.0 || train_size >= 1.0 { return Err(String::from("Train rate must be strictly between 0 and 1.")); } if self.directed && self.get_edges_number() == 1 || !self.directed && self.get_edges_number() == 2 { return Err(String::from( "The current graph instance has only one edge. You cannot build an holdout with one edge.", )); } let total_edges_number = if include_all_edge_types { self.unique_edges_number } else { self.get_edges_number() }; let train_edges_number = (total_edges_number as f64 * train_size) as EdgeT; let valid_edges_number = total_edges_number - train_edges_number; if train_edges_number == 0 || train_edges_number >= total_edges_number { return Err(String::from( "The training set has 0 edges! Change the training rate.", )); } if valid_edges_number == 0 { return Err(String::from( "The validation set has 0 edges! Change the training rate.", )); } Ok((train_edges_number, valid_edges_number)) } fn holdout( &self, random_state: EdgeT, valid_edges_number: EdgeT, include_all_edge_types: bool, user_condition: impl Fn(EdgeT, NodeT, NodeT, Option<EdgeTypeT>) -> bool, verbose: bool, ) -> Result<(Graph, Graph), String> { let pb1 = get_loading_bar( verbose, "Picking validation edges", valid_edges_number as usize, ); // generate and shuffle the indices of the edges let mut rng = SmallRng::seed_from_u64(random_state ^ SEED_XOR as EdgeT); let mut edge_indices: Vec<EdgeT> = (0..self.get_edges_number()).collect(); edge_indices.shuffle(&mut rng); let mut valid_edges_bitmap = RoaringTreemap::new(); let mut last_length = 0; for (edge_id, (src, dst, edge_type)) in edge_indices .iter() .cloned() .map(|edge_id| (edge_id, self.get_edge_triple(edge_id))) { // If the graph is undirected and we have extracted an edge that is a // simmetric one, we can skip this iteration. if !self.directed && src > dst { continue; } // We stop adding edges when we have reached the minimum amount. if user_condition(edge_id, src, dst, edge_type) { // Compute the forward edge ids that are required. valid_edges_bitmap.extend(self.compute_edge_ids_vector( edge_id, src, dst, include_all_edge_types, )); // If the graph is undirected if !self.directed { // we compute also the backward edge ids that are required. valid_edges_bitmap.extend(self.compute_edge_ids_vector( self.get_unchecked_edge_id(dst, src, edge_type), dst, src, include_all_edge_types, )); } pb1.inc(valid_edges_bitmap.len() - last_length); last_length = valid_edges_bitmap.len(); } // We stop the iteration when we found all the edges. if valid_edges_bitmap.len() >= valid_edges_number { break; } } if valid_edges_bitmap.len() < valid_edges_number { let actual_valid_edges_number = valid_edges_bitmap.len(); return Err(format!( concat!( "With the given configuration for the holdout, it is not possible to ", "generate a validation set composed of {valid_edges_number} edges from the current graph.\n", "The validation set can be composed of at most {actual_valid_edges_number} edges.\n" ), valid_edges_number=valid_edges_number, actual_valid_edges_number=actual_valid_edges_number, )); } // Creating the loading bar for the building of both the training and validation. let pb_valid = get_loading_bar( verbose, "Building the valid partition", valid_edges_bitmap.len() as usize, ); let pb_train = get_loading_bar( verbose, "Building the train partition", (self.get_edges_number() - valid_edges_bitmap.len()) as usize, ); Ok(( Graph::build_graph( (0..self.get_edges_number()) .filter(|edge_id| !valid_edges_bitmap.contains(*edge_id)) .progress_with(pb_train) .map(|edge_id| Ok(self.get_edge_quadruple(edge_id))), self.get_edges_number() - valid_edges_bitmap.len() as EdgeT, self.nodes.clone(), self.node_types.clone(), match &self.edge_types { Some(ets) => Some(ets.vocabulary.clone()), None => None, }, self.directed, format!("{} training", self.name.clone()), false, self.has_edge_types(), self.has_weights(), )?, Graph::build_graph( valid_edges_bitmap .iter() .progress_with(pb_valid) .map(|edge_id| Ok(self.get_edge_quadruple(edge_id))), valid_edges_bitmap.len() as EdgeT, self.nodes.clone(), self.node_types.clone(), match &self.edge_types { Some(ets) => Some(ets.vocabulary.clone()), None => None, }, self.directed, format!("{} testing", self.name.clone()), false, self.has_edge_types(), self.has_weights(), )?, )) } /// Returns holdout for training ML algorithms on the graph structure. /// /// The holdouts returned are a tuple of graphs. The first one, which /// is the training graph, is garanteed to have the same number of /// graph components as the initial graph. The second graph is the graph /// meant for testing or validation of the algorithm, and has no garantee /// to be connected. It will have at most (1-train_size) edges, /// as the bound of connectivity which is required for the training graph /// may lead to more edges being left into the training partition. /// /// In the option where a list of edge types has been provided, these /// edge types will be those put into the validation set. /// /// # Arguments /// /// * `random_state`: NodeT - The random_state to use for the holdout, /// * `train_size`: f64 - Rate target to reserve for training. /// * `edge_types`: Option<Vec<String>> - Edge types to be selected for in the validation set. /// * `include_all_edge_types`: bool - Wethever to include all the edges between two nodes. /// * `verbose`: bool - Wethever to show the loading bar. /// /// pub fn connected_holdout( &self, random_state: EdgeT, train_size: f64, edge_types: Option<Vec<String>>, include_all_edge_types: bool, verbose: bool, ) -> Result<(Graph, Graph), String> { if train_size <= 0.0 || train_size >= 1.0 { return Err(String::from("Train rate must be strictly between 0 and 1.")); } let edge_type_ids = if let Some(ets) = edge_types { Some( self.translate_edge_types(ets)? .into_iter() .collect::<HashSet<EdgeTypeT>>(), ) } else { None }; let tree = self .random_spanning_arborescence_kruskal(random_state, &edge_type_ids, verbose) .0; let edge_factor = if self.is_directed() { 1 } else { 2 }; let train_edges_number = (self.get_edges_number() as f64 * train_size) as usize; let mut valid_edges_number = (self.get_edges_number() as f64 * (1.0 - train_size)) as EdgeT; if let Some(etis) = &edge_type_ids { let selected_edges_number: EdgeT = etis .iter() .map(|et| self.get_unchecked_edge_count_by_edge_type(*et) as EdgeT) .sum(); valid_edges_number = (selected_edges_number as f64 * (1.0 - train_size)) as EdgeT; } if tree.len() * edge_factor > train_edges_number { return Err(format!( concat!( "The given spanning tree of the graph contains {} edges ", "that is more than the required training edges number {}.\n", "This makes impossible to create a validation set using ", "{} edges.\nIf possible, you should increase the ", "train_size parameter which is currently equal to ", "{}.\nThe deny map, by itself, is requiring at least ", "a train rate of {}." ), tree.len() * edge_factor, train_edges_number, valid_edges_number, train_size, (tree.len() * edge_factor) as f64 / train_edges_number as f64 )); } self.holdout( random_state, valid_edges_number, include_all_edge_types, |_, src, dst, edge_type| { let is_in_tree = tree.contains(&(src, dst)); let singleton_self_loop = src == dst && self.get_node_degree(src) == 1; let correct_edge_type = match &edge_type_ids { Some(etis) => etis.contains(&edge_type.unwrap()), None => true, }; // The tree must not contain the provided edge ID // And this is not a self-loop edge with degree 1 // And the edge type of the edge ID is within the provided edge type !is_in_tree && !singleton_self_loop && correct_edge_type }, verbose, ) } /// Returns random holdout for training ML algorithms on the graph edges. /// /// The holdouts returned are a tuple of graphs. In neither holdouts the /// graph connectivity is necessarily preserved. To maintain that, use /// the method `connected_holdout`. /// /// # Arguments /// /// * `random_state`: NodeT - The random_state to use for the holdout, /// * `train_size`: f64 - rate target to reserve for training /// * `include_all_edge_types`: bool - Wethever to include all the edges between two nodes. /// * `edge_types`: Option<Vec<String>> - The edges to include in validation set. /// * `min_number_overlaps`: Option<usize> - The minimum number of overlaps to include the edge into the validation set. /// * `verbose`: bool - Wethever to show the loading bar. /// pub fn random_holdout( &self, random_state: EdgeT, train_size: f64, include_all_edge_types: bool, edge_types: Option<Vec<String>>, min_number_overlaps: Option<EdgeT>, verbose: bool, ) -> Result<(Graph, Graph), String> { let (_, valid_edges_number) = self.get_holdouts_edges_number(train_size, include_all_edge_types)?; let edge_type_ids = if let Some(ets) = edge_types { Some( self.translate_edge_types(ets)? .into_iter() .collect::<HashSet<EdgeTypeT>>(), ) } else { None }; if min_number_overlaps.is_some() && !self.is_multigraph() { return Err("Current graph is not a multigraph!".to_string()); } self.holdout( random_state, valid_edges_number, include_all_edge_types, |_, src, dst, edge_type| { // If a list of edge types was provided and the edge type // of the current edge is not within the provided list, // we skip the current edge. if let Some(etis) = &edge_type_ids { if !etis.contains(&edge_type.unwrap()) { return false; } } // If a minimum number of overlaps was provided and the current // edge has not the required minimum amount of overlaps. if let Some(mno) = min_number_overlaps { if self.get_unchecked_edge_types_number_from_tuple(src, dst) < mno { return false; } } // Otherwise we accept the provided edge for the validation set true }, verbose, ) } /// Returns subgraph with given number of nodes. /// /// This method creates a subset of the graph starting from a random node /// sampled using given random_state and includes all neighbouring nodes until /// the required number of nodes is reached. All the edges connecting any /// of the selected nodes are then inserted into this graph. /// /// /// /// # Arguments /// /// * `random_state`: usize - Random random_state to use. /// * `nodes_number`: usize - Number of nodes to extract. /// * `verbose`: bool - Wethever to show the loading bar. /// pub fn random_subgraph( &self, random_state: usize, nodes_number: NodeT, verbose: bool, ) -> Result<Graph, String> { if nodes_number <= 1 { return Err(String::from("Required nodes number must be more than 1.")); } let not_singleton_nodes_number = self.get_not_singleton_nodes_number(); if nodes_number > not_singleton_nodes_number { return Err(format!( concat!( "Required number of nodes ({}) is more than available ", "number of nodes ({}) that have edges in current graph." ), nodes_number, not_singleton_nodes_number )); } // Creating the loading bars let pb1 = get_loading_bar(verbose, "Sampling nodes subset", nodes_number as usize); let pb2 = get_loading_bar(verbose, "Computing subgraph edges", nodes_number as usize); let pb3 = get_loading_bar( verbose, "Building subgraph", self.get_edges_number() as usize, ); // Creating the random number generator let mut rnd = SmallRng::seed_from_u64((random_state ^ SEED_XOR) as u64); // Nodes indices let mut nodes: Vec<NodeT> = (0..self.get_nodes_number()).collect(); // Shuffling the components using the given random_state. nodes.shuffle(&mut rnd); // Initializing stack and set of nodes let mut unique_nodes = RoaringBitmap::new(); let mut stack: Vec<NodeT> = Vec::new(); // We iterate on the components 'outer: for node in nodes.iter() { // If the current node is a trap there is no need to continue with the current loop. if self.is_node_trap(*node) { continue; } stack.push(*node); while !stack.is_empty() { let src = stack.pop().unwrap(); for dst in self.get_source_destinations_range(src) { if !unique_nodes.contains(dst) && src != dst { stack.push(dst); } unique_nodes.insert(*node); unique_nodes.insert(dst); pb1.inc(2); // If we reach the desired number of unique nodes we can stop the iteration. if unique_nodes.len() as NodeT >= nodes_number { break 'outer; } } } } pb1.finish(); let edges_bitmap = RoaringTreemap::from_iter(unique_nodes.iter().progress_with(pb2).flat_map(|src| { let (min_edge_id, max_edge_id) = self.get_destinations_min_max_edge_ids(src); (min_edge_id..max_edge_id) .filter(|edge_id| unique_nodes.contains(self.get_destination(*edge_id))) .collect::<Vec<EdgeT>>() })); Graph::build_graph( edges_bitmap .iter() .progress_with(pb3) .map(|edge_id| Ok(self.get_edge_quadruple(edge_id))), edges_bitmap.len() as EdgeT, self.nodes.clone(), self.node_types.clone(), match &self.edge_types { Some(ets) => Some(ets.vocabulary.clone()), None => None, }, self.directed, format!("{} subgraph", self.name.clone()), false, self.has_edge_types(), self.has_weights(), ) } /// Returns train and test graph following kfold validation scheme. /// /// The edges are splitted into k chunks. The k_index-th chunk is used to build /// the validation graph, all the other edges create the training graph. /// /// # Arguments /// /// * `edge_types`: Option<Vec<String>> - Edge types to be selected when computing the folds /// (All the edge types not listed here will be always be used in the training set). /// * `k`: u64 - The number of folds. /// * `k_index`: u64 - Which fold to use for the validation. /// * `random_state`: NodeT - The random_state (seed) to use for the holdout, /// * `verbose`: bool - Wethever to show the loading bar. /// pub fn kfold( &self, k: EdgeT, k_index: u64, edge_types: Option<Vec<String>>, random_state: EdgeT, verbose: bool, ) -> Result<(Graph, Graph), String> { if k == 1 { return Err(String::from("Cannot do a k-fold with only one fold.")); } if k_index >= k { return Err(String::from( "The index of the k-fold must be strictly less than the number of folds.", )); } // If edge types is not None, to compute the chunks only use the edges // of the chosen edge_types let mut indices = if let Some(ets) = edge_types { if ets.is_empty() { return Err(String::from( "Required edge types must be a non-empty list.", )); } if !self.has_edge_types() { return Err(String::from( "Edge types-based k-fold requested but the edge types are not available in this graph." )); } let edge_type_ids: HashSet<EdgeTypeT> = self .translate_edge_types(ets)? .iter() .cloned() .collect::<HashSet<EdgeTypeT>>(); self.get_edges_triples(self.directed) .filter_map(|(edge_id, _, _, edge_type)| { if !edge_type_ids.contains(&edge_type.unwrap()) { return None; } Some(edge_id) }) .collect::<Vec<EdgeT>>() } else { self.get_edges_iter(self.directed) .map(|(edge_id, _, _)| edge_id) .collect::<Vec<EdgeT>>() }; if k >= indices.len() as EdgeT { return Err(String::from( "Cannot do a number of k-fold greater than the number of available edges.", )); } // shuffle the indices let mut rng = SmallRng::seed_from_u64(random_state ^ SEED_XOR as EdgeT); indices.shuffle(&mut rng); // Get the k_index-th chunk let chunk_size = indices.len() as f64 / k as f64; let start = (k_index as f64 * chunk_size).ceil() as EdgeT; let end = min!( indices.len() as EdgeT, (((k_index + 1) as f64) * chunk_size).ceil() as EdgeT ); let chunk = RoaringTreemap::from_iter(indices[start as usize..end as usize].iter().cloned()); // Create the two graphs self.holdout( random_state, end - start, false, |edge_id, _, _, _| chunk.contains(edge_id), verbose, ) } }
extern crate nom; use crate::types::{Attributes, GraphAST, Stmt}; use nom::branch::alt; use nom::bytes::complete::{escaped_transform, tag, tag_no_case}; use nom::character::complete::{ char, digit0, digit1, multispace0, none_of, one_of, satisfy, space0, }; use nom::combinator::{eof, map, opt, recognize, value}; use nom::multi::many0; use nom::sequence::{pair, preceded, terminated, tuple}; use nom::IResult; fn parse_strict(s: &str) -> IResult<&str, bool> { let (s, _) = space0(s)?; let (s, out) = map(opt(tag_no_case("strict")), |tag| tag.is_some())(s)?; let (s, _) = space0(s)?; Ok((s, out)) } fn parse_directed(s: &str) -> IResult<&str, bool> { let (s, _) = space0(s)?; let indirected = value(false, tag_no_case("graph")); let directed = value(true, tag_no_case("digraph")); let (s, out) = alt((directed, indirected))(s)?; let (s, _) = space0(s)?; Ok((s, out)) } fn parse_attributes(s: &str) -> IResult<&str, Attributes> { let (s, _) = space0(s)?; let a_list = many0(terminated( map(tuple((parse_id, char('='), parse_id)), |(fst, _, snd)| { (fst, snd) }), opt(terminated(one_of(",;"), space0)), )); let (s, attr_list) = many0(preceded(char('['), terminated(a_list, char(']'))))(s)?; Ok((s, attr_list.concat())) } fn parse_node_statement(s: &str) -> IResult<&str, Stmt> { let (s, id) = parse_id(s)?; let (s, attrs) = parse_attributes(s)?; Ok((s, Stmt::Node(id, attrs))) } fn parse_edge_statement<'a>(is_directed: bool) -> impl Fn(&'a str) -> IResult<&'a str, Stmt> { let parse_edge_op = if is_directed { tag("->") } else { tag("--") }; move |s| { let (s, id_from) = parse_id(s)?; let (s, _) = space0(s)?; let (s, _) = parse_edge_op(s)?; let (s, _) = space0(s)?; let (s, id_to) = parse_id(s)?; // TODO: Subgraph // TODO: Multiple edges in single statement let (s, attrs) = parse_attributes(s)?; Ok((s, Stmt::Edge(id_from, id_to, attrs))) } } fn parse_statement(is_directed: bool) -> impl Fn(&str) -> IResult<&str, Stmt> { move |s| { let (s, _) = multispace0(s)?; let (s, stmt) = alt((parse_edge_statement(is_directed), parse_node_statement))(s)?; let (s, _) = multispace0(s)?; let (s, _) = char(';')(s)?; Ok((s, stmt)) } } fn parse_graph(s: &str) -> IResult<&str, GraphAST> { let (s, is_strict) = parse_strict(s)?; let (s, is_directed) = parse_directed(s)?; let (s, id) = opt(parse_id)(s)?; let (s, _) = char('{')(s)?; let (s, stmt) = many0(parse_statement(is_directed))(s)?; let (s, _) = multispace0(s)?; let (s, _) = char('}')(s)?; let (s, _) = multispace0(s)?; let (s, _) = eof(s)?; let graph = GraphAST { is_strict, is_directed, id, stmt, }; Ok((s, graph)) } pub fn parse(s: &str) -> Result<GraphAST, nom::error::Error<&str>> { nom::Finish::finish(parse_graph(s)).map(|(_, g)| g) } fn parse_id(s: &str) -> IResult<&str, String> { // Any string of alphabetic ([a-zA-Z\200-\377]) characters, underscores ('_') or digits ([0-9]), not beginning with a digit; let non_digits = satisfy(|c| { (c == '_') || (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= char::from(0o200) && c <= char::from(0o377)) }); let all_chars = satisfy(|c| { (c == '_') || (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || (c >= char::from(0o200) && c <= char::from(0o377)) }); let id_string = map(recognize(pair(non_digits, many0(all_chars))), |s| { String::from(s) }); // a numeral [-]?(.[0-9]+ | [0-9]+(.[0-9]*)? ); let id_numeral = map( recognize(pair( opt(char('-')), alt(( recognize(pair(char('.'), digit1)), recognize(pair(digit1, opt(tuple((char('.'), digit0))))), )), )), String::from, ); // any double-quoted string ("...") possibly containing escaped quotes (\")1; let id_quoted = preceded( char('\"'), terminated( map( many0(escaped_transform( none_of("\"\\"), '\\', value('\"', char('\"')), )), |v| v.into_iter().collect(), ), char('\"'), ), ); // HTML: Not supported let (s, id) = alt((id_string, id_numeral, id_quoted))(s)?; let (s, _) = space0(s)?; Ok((s, id)) } #[cfg(test)] mod tests { use crate::types::*; fn parse(input: &str) -> (&str, GraphAST) { match crate::parser::parse_graph(input) { Ok((rest, graph)) => (rest, graph), Err(e) => panic!("{}", e), } } #[test] fn can_parse_empty_graph_1() { let input = "strict graph {}"; let (rest, graph) = parse(input); assert_eq!(rest, ""); assert_eq!(graph.is_strict, true); assert_eq!(graph.is_directed, false); } #[test] fn can_parse_empty_graph_2() { let input = "graph {}"; let (rest, graph) = parse(input); assert_eq!(rest, ""); assert_eq!(graph.is_strict, false); assert_eq!(graph.is_directed, false); } #[test] fn can_parse_empty_graph_3() { let input = "strict digraph {}"; let (rest, graph) = parse(input); assert_eq!(rest, ""); assert_eq!(graph.is_strict, true); assert_eq!(graph.is_directed, true); } #[test] fn can_parse_empty_graph_4() { let input = "digraph {}"; let (rest, graph) = parse(input); assert_eq!(rest, ""); assert_eq!(graph.is_strict, false); assert_eq!(graph.is_directed, true); } #[test] fn can_parse_empty_graph_with_id() { let input = "graph g {}"; let (rest, graph) = parse(input); assert_eq!(rest, ""); assert_eq!(graph.id, Some(String::from("g"))); } #[test] fn can_parse_numeric_id() { let input = "graph 2.34 {}"; let (rest, graph) = parse(input); assert_eq!(rest, ""); assert_eq!(graph.id, Some(String::from("2.34"))); } #[test] fn can_parse_quoted_id() { let input = "graph \"2.34\" {}"; let (rest, graph) = parse(input); assert_eq!(rest, ""); assert_eq!(graph.id, Some(String::from("2.34"))); } #[test] fn can_parse_quoted_id_with_space() { let input = "graph \"2 . 34\" {}"; let (rest, graph) = parse(input); assert_eq!(rest, ""); assert_eq!(graph.id, Some(String::from("2 . 34"))); } #[test] fn can_parse_quoted_id_with_escape() { let input = "graph \"2\\\"34\" {}"; let (rest, graph) = parse(input); assert_eq!(rest, ""); assert_eq!(graph.id, Some(String::from("2\"34"))); } #[test] fn can_parse_graph_with_nodes() { let input = "graph { 1; }"; let (rest, graph) = parse(input); assert_eq!(rest, ""); assert_eq!(graph.stmt, vec![Stmt::Node(String::from("1"), vec![])]) } #[test] fn can_parse_graph_with_nodes_and_edges() { let input = "graph { 1; 2; 1 -- 2; }"; let (rest, graph) = parse(input); assert_eq!(rest, ""); assert_eq!( graph.stmt, vec![ Stmt::Node(String::from("1"), vec![]), Stmt::Node(String::from("2"), vec![]), Stmt::Edge(String::from("1"), String::from("2"), vec![]), ] ) } #[test] fn can_parse_graph_with_nodes_and_edges_directed() { let input = "digraph { 1; 2; 1 -> 2; }"; let (rest, graph) = parse(input); assert_eq!(rest, ""); assert_eq!( graph.stmt, vec![ Stmt::Node(String::from("1"), vec![]), Stmt::Node(String::from("2"), vec![]), Stmt::Edge(String::from("1"), String::from("2"), vec![]), ] ) } }