text stringlengths 8 4.13M |
|---|
use std::ffi::OsStr;
use std::io;
use anyhow::anyhow;
use anyhow::Context;
use anyhow::Result;
use positioned_io::ReadAt;
pub trait BlockDevice: ReadAt + Send + Sync {}
impl<T: ReadAt + Send + Sync> BlockDevice for T {}
pub fn open_raw_or_first_partition<S: AsRef<OsStr>>(target: S) -> Result<Box<dyn BlockDevice>> {
let mut block_device = std::fs::File::open(target.as_ref()).unwrap();
let partitions =
match bootsector::list_partitions(&mut block_device, &bootsector::Options::default()) {
Ok(parts) => parts,
Err(e) if io::ErrorKind::NotFound == e.kind() => vec![],
Err(e) => Err(e).with_context(|| anyhow!("searching for partitions"))?,
};
let block_device: Box<dyn BlockDevice> = if partitions.len() == 0 {
Box::new(block_device)
} else {
let part = partitions
.get(0)
.ok_or_else(|| anyhow!("there wasn't at least one partition"))?;
Box::new(positioned_io::Slice::new(
block_device,
part.first_byte,
Some(part.len),
))
};
Ok(block_device)
}
impl ReadAt for Box<dyn BlockDevice> {
fn read_at(&self, pos: u64, buf: &mut [u8]) -> io::Result<usize> {
(**self).read_at(pos, buf)
}
}
|
pub mod bytecode;
pub mod convert;
mod frame;
pub mod opcode;
use crate::evaluator::builtin;
use crate::evaluator::objects;
use crate::vm::convert::Read;
mod preludes {
pub use super::super::preludes::*;
pub use crate::vm;
}
use preludes::*;
pub const STACK_SIZE: usize = 2048;
pub const GLOBALS_SIZE: usize = 65536;
const TRUE: objects::Boolean = objects::Boolean { value: true };
const FALSE: objects::Boolean = objects::Boolean { value: false };
const NULL: objects::Null = objects::Null {};
#[derive(Debug, Default)]
struct Stack {
data: Vec<objects::Object>,
pointer: usize,
}
impl Stack {
fn new() -> Self {
Self {
data: vec![NULL.into(); STACK_SIZE],
..Self::default()
}
}
fn top(&self) -> Option<&objects::Object> {
if self.pointer == 0 {
None
} else {
Some(&self.data[self.pointer - 1])
}
}
fn last_popped(&self) -> &objects::Object {
&self.data[self.pointer]
}
fn push(&mut self, o: objects::Object) -> Result<()> {
if self.pointer >= STACK_SIZE {
return Err(anyhow::format_err!("stack overflow"));
}
self.data[self.pointer] = o;
self.pointer += 1;
Ok(())
}
fn pop(&mut self) -> &objects::Object {
let o = &self.data[self.pointer - 1];
self.pointer -= 1;
o
}
fn pop_pair(&mut self) -> (&objects::Object, &objects::Object) {
let o = (&self.data[self.pointer - 2], &self.data[self.pointer - 1]);
self.pointer -= 2;
o
}
fn extract_array(&mut self, num_elements: usize) -> objects::Array {
let elements = self.data[(self.pointer - num_elements)..self.pointer].into();
self.pointer -= num_elements;
objects::Array { elements }
}
fn extract_hash(&mut self, num_elements: usize) -> Result<objects::Hash> {
let elements: Vec<objects::Object> =
self.data[(self.pointer - num_elements)..self.pointer].into();
debug_assert_eq!(elements.len() % 2, 0);
let mut pairs = objects::HashPairs::new();
for i in 0..(elements.len() / 2) {
pairs.insert(
elements[i * 2].clone().try_into()?,
elements[i * 2 + 1].clone(),
);
}
self.pointer -= num_elements;
Ok(objects::Hash { pairs })
}
}
#[derive(Debug)]
pub struct GlobalSpace(Vec<Option<objects::Object>>);
impl Default for GlobalSpace {
fn default() -> Self {
Self(vec![None; GLOBALS_SIZE])
}
}
impl GlobalSpace {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Debug)]
pub struct VM<'a> {
constants: Vec<objects::Object>,
globals: &'a mut GlobalSpace,
stack: Stack,
stack_frame: frame::StackFrame,
}
impl<'a> VM<'a> {
pub fn new_with_globals_store(
bytecode: bytecode::Bytecode,
globals: &'a mut GlobalSpace,
) -> Self {
let main_fn = objects::CompiledFunction {
instructions: bytecode.instructions,
num_locals: 0,
num_parameters: 0,
};
let closure = objects::Closure {
func: main_fn,
..Default::default()
};
let main_frame = frame::Frame::new(closure, 0);
let mut stack_frame = frame::StackFrame::new();
stack_frame.push(main_frame);
Self {
constants: bytecode.constants,
globals,
stack: Stack::new(),
stack_frame,
}
}
pub fn stack_top(&self) -> Option<&objects::Object> {
self.stack.top()
}
pub fn last_popped_stack_elem(&self) -> &objects::Object {
self.stack.last_popped()
}
pub fn run(&mut self) -> Result<()> {
while self.stack_frame.current().borrow().pointer
< self.stack_frame.current().borrow().instructions().0.len()
{
let op = {
let ip = self.stack_frame.current().borrow().pointer;
opcode::Opcode::try_from(
&self.stack_frame.current().borrow().instructions().0[ip..],
)?
};
match &op {
opcode::Opcode::Constant(constant) => {
let const_idx = constant.0;
// TODO: Rc<object::Object> ?
self.stack
.push(self.constants[usize::from(const_idx)].clone())?;
self.stack_frame.current().borrow_mut().pointer += 1 + constant.readsize();
}
opcode::Opcode::Add(_)
| opcode::Opcode::Sub(_)
| opcode::Opcode::Mul(_)
| opcode::Opcode::Div(_) => {
self.execute_binary_operation(&op)?;
self.stack_frame.current().borrow_mut().pointer += 1 + op.readsize();
}
opcode::Opcode::Pop(pop) => {
self.stack.pop();
self.stack_frame.current().borrow_mut().pointer += 1 + pop.readsize();
}
opcode::Opcode::True(t) => {
self.stack.push(TRUE.into())?;
self.stack_frame.current().borrow_mut().pointer += 1 + t.readsize();
}
opcode::Opcode::False(f) => {
self.stack.push(FALSE.into())?;
self.stack_frame.current().borrow_mut().pointer += 1 + f.readsize();
}
opcode::Opcode::Equal(_)
| opcode::Opcode::NotEqual(_)
| opcode::Opcode::GreaterThan(_) => {
self.execute_comparison(&op)?;
self.stack_frame.current().borrow_mut().pointer += 1 + op.readsize();
}
opcode::Opcode::Bang(bang) => {
self.execute_bang_oeprator()?;
self.stack_frame.current().borrow_mut().pointer += 1 + bang.readsize();
}
opcode::Opcode::Minus(minus) => {
self.execute_minus_operator()?;
self.stack_frame.current().borrow_mut().pointer += 1 + minus.readsize();
}
opcode::Opcode::JumpNotTruthy(jump) => {
self.stack_frame.current().borrow_mut().pointer += 2;
let cond = self.stack.pop();
if !Self::is_truthy(cond) {
self.stack_frame.current().borrow_mut().pointer = usize::from(jump.0) - 1;
}
self.stack_frame.current().borrow_mut().pointer += 1;
}
opcode::Opcode::Jump(jump) => {
self.stack_frame.current().borrow_mut().pointer = usize::from(jump.0) - 1;
self.stack_frame.current().borrow_mut().pointer += 1;
}
opcode::Opcode::Null(null) => {
self.stack.push(NULL.into())?;
self.stack_frame.current().borrow_mut().pointer += 1 + null.readsize();
}
opcode::Opcode::GetGlobal(global) => {
let obj = &self.globals.0[usize::from(global.0)];
match obj {
Some(obj) => {
self.stack.push(obj.clone())?;
}
None => {
return Err(anyhow::format_err!(
"Bytecode error. Undefined global object. {}",
global
))
}
}
self.stack_frame.current().borrow_mut().pointer += 1 + global.readsize();
}
opcode::Opcode::SetGlobal(global) => {
let poped = self.stack.pop();
self.globals.0[usize::from(global.0)] = Some(poped.clone());
self.stack_frame.current().borrow_mut().pointer += 1 + global.readsize();
}
opcode::Opcode::Array(arr) => {
let num_elements = usize::from(arr.0);
let array_obj = self.stack.extract_array(num_elements);
self.stack.push(array_obj.into())?;
self.stack_frame.current().borrow_mut().pointer += 1 + arr.readsize();
}
opcode::Opcode::Hash(hash) => {
let num_elements = usize::from(hash.0);
let hash_obj = self.stack.extract_hash(num_elements)?;
self.stack.push(hash_obj.into())?;
self.stack_frame.current().borrow_mut().pointer += 1 + hash.readsize();
}
opcode::Opcode::Index(index) => {
let idx = self.stack.pop().clone();
let left = self.stack.pop().clone();
self.execute_index_expressions(left, idx)?;
self.stack_frame.current().borrow_mut().pointer += 1 + index.readsize();
}
opcode::Opcode::Call(call) => {
self.stack_frame.current().borrow_mut().pointer += call.readsize();
let num_args = call.0;
let obj = &self.stack.data[self.stack.pointer - 1 - usize::from(num_args)];
match obj {
objects::Object::Closure(cl) => {
if cl.func.num_parameters != num_args {
return Err(anyhow::format_err!(
"wrong number of arguments: want={}, got={}",
cl.func.num_parameters,
num_args
));
}
let frame = frame::Frame::new(
cl.clone(),
self.stack.pointer - usize::from(num_args),
);
let bp = frame.base_pointer;
self.stack_frame.push(frame);
self.stack.pointer = bp + usize::from(cl.func.num_locals);
}
objects::Object::Builtin(builtin) => {
self.stack_frame.current().borrow_mut().pointer += 1;
let start_p = self.stack.pointer - usize::from(num_args);
let args = &self.stack.data[start_p..self.stack.pointer];
let result = builtin.call(args);
self.stack.pointer = self.stack.pointer - usize::from(num_args) - 1;
match result {
Ok(result) => {
if let Some(result) = result {
self.stack.push(result)?;
} else {
self.stack.push(NULL.into())?;
}
}
Err(e) => {
self.stack
.push(objects::Error::Standard(e.to_string()).into())?;
}
}
}
other_obj => {
return Err(anyhow::format_err!(
"calling non-function. received {}",
other_obj
))
}
};
}
opcode::Opcode::ReturnValue(_) => {
let return_value = self.stack.pop().clone();
let frame = self.stack_frame.pop();
self.stack.pointer = frame.borrow().base_pointer - 1;
self.stack.push(return_value)?;
self.stack_frame.current().borrow_mut().pointer += 1;
}
opcode::Opcode::Return(_) => {
let frame = self.stack_frame.pop();
self.stack.pointer = frame.borrow().base_pointer - 1;
self.stack.push(NULL.into())?;
self.stack_frame.current().borrow_mut().pointer += 1;
}
opcode::Opcode::GetLocal(local) => {
let frame = self.stack_frame.current();
let p = frame.borrow().base_pointer + usize::from(local.0);
self.stack.push(self.stack.data[p].clone())?;
self.stack_frame.current().borrow_mut().pointer += 1 + local.readsize();
}
opcode::Opcode::SetLocal(local) => {
let frame = self.stack_frame.current();
let p = frame.borrow().base_pointer + usize::from(local.0);
self.stack.data[p] = self.stack.pop().clone();
self.stack_frame.current().borrow_mut().pointer += 1 + local.readsize();
}
opcode::Opcode::GetBuiltin(builtin) => {
let definition = builtin::Function::by_index(usize::from(builtin.0));
self.stack.push(definition.into())?;
self.stack_frame.current().borrow_mut().pointer += 1 + builtin.readsize();
}
opcode::Opcode::Closure(closure) => {
let constant = &self.constants[usize::from(closure.0)];
match constant {
objects::Object::CompiledFunction(func) => {
let free = (0..usize::from(closure.1))
.map(|i| {
self.stack.data[self.stack.pointer - usize::from(closure.1) + i]
.clone()
})
.collect::<Vec<_>>();
self.stack.pointer -= usize::from(closure.1);
let cl_obj = objects::Closure {
func: func.clone(),
free,
};
self.stack.push(cl_obj.into())?;
}
other => return Err(anyhow::format_err!("not a function: {}", other)),
}
self.stack_frame.current().borrow_mut().pointer += 1 + closure.readsize();
}
opcode::Opcode::GetFree(free) => {
self.stack.push(
self.stack_frame.current().borrow().cl.free[usize::from(free.0)].clone(),
)?;
self.stack_frame.current().borrow_mut().pointer += 1 + free.readsize();
}
opcode::Opcode::CurrentClosure(curr_cl) => {
let current_closure = self.stack_frame.current().borrow().cl.clone();
self.stack.push(current_closure.into())?;
self.stack_frame.current().borrow_mut().pointer += 1 + curr_cl.readsize();
}
}
}
Ok(())
}
fn execute_binary_operation(&mut self, op: &opcode::Opcode) -> Result<()> {
match self.stack.pop_pair() {
(objects::Object::Integer(i1), objects::Object::Integer(i2)) => {
let int = Self::execute_binary_integer_operation(op, i1.value, i2.value)?;
self.stack.push(int.into())?;
}
(objects::Object::StringLit(s1), objects::Object::StringLit(s2)) => {
let string = Self::execute_binary_string_operation(op, &s1.value, &s2.value)?;
self.stack.push(string.into())?;
}
(unknown_obj1, unknown_obj2) => {
return Err(anyhow::format_err!(
"unsupported types for binary operation: {} {}",
unknown_obj1,
unknown_obj2
))
}
}
Ok(())
}
fn execute_binary_integer_operation(
op: &opcode::Opcode,
left_val: i64,
right_val: i64,
) -> Result<objects::Integer> {
let value = match op {
opcode::Opcode::Add(_) => left_val + right_val,
opcode::Opcode::Sub(_) => left_val - right_val,
opcode::Opcode::Mul(_) => left_val * right_val,
opcode::Opcode::Div(_) => left_val / right_val,
_ => {
return Err(anyhow::format_err!(
"unknown integer operator. received {}",
op
))
}
};
Ok(objects::Integer { value })
}
fn execute_binary_string_operation(
op: &opcode::Opcode,
left_val: &str,
right_val: &str,
) -> Result<objects::StringLit> {
let value = match op {
opcode::Opcode::Add(_) => left_val.to_string() + right_val,
_ => {
return Err(anyhow::format_err!(
"unknown string operator. received {}",
op
))
}
};
Ok(objects::StringLit { value })
}
fn execute_comparison(&mut self, op: &opcode::Opcode) -> Result<()> {
let (right, left) = self.stack.pop_pair();
match (left, right) {
(objects::Object::Integer(l), objects::Object::Integer(r)) => {
let compared =
Self::execute_integer_comparison(op, &l.clone().into(), &r.clone().into())?;
self.stack.push(compared.into())?;
}
(l, r) => match op {
opcode::Opcode::Equal(_) => {
let b = Self::native_bool_to_boolean_object(l == r);
self.stack.push(b.into())?;
}
opcode::Opcode::NotEqual(_) => {
let b = Self::native_bool_to_boolean_object(l != r);
self.stack.push(b.into())?;
}
unknown_op => {
return Err(anyhow::format_err!(
"unknown operator: {} ({} {})",
unknown_op,
l,
r
))
}
},
};
Ok(())
}
fn execute_integer_comparison(
op: &opcode::Opcode,
left: &objects::Object,
right: &objects::Object,
) -> Result<objects::Boolean> {
match (left, right) {
(objects::Object::Integer(l), objects::Object::Integer(r)) => match op {
opcode::Opcode::Equal(_) => {
Ok(Self::native_bool_to_boolean_object(r.value == l.value))
}
opcode::Opcode::NotEqual(_) => {
Ok(Self::native_bool_to_boolean_object(r.value != l.value))
}
opcode::Opcode::GreaterThan(_) => {
Ok(Self::native_bool_to_boolean_object(r.value > l.value))
}
unknown_op => Err(anyhow::format_err!(
"unknown operator: {} ({} {})",
unknown_op,
l,
r
)),
},
(unknown_l, unknown_r) => Err(anyhow::format_err!(
"expected (Integer, Integer). received ({} {})",
unknown_l,
unknown_r
)),
}
}
fn native_bool_to_boolean_object(b: bool) -> objects::Boolean {
if b {
TRUE
} else {
FALSE
}
}
fn execute_bang_oeprator(&mut self) -> Result<()> {
let operand = self.stack.pop();
match operand {
objects::Object::Boolean(b) => {
if b.value {
self.stack.push(FALSE.into())?;
} else {
self.stack.push(TRUE.into())?;
}
}
objects::Object::Null(_) => {
self.stack.push(TRUE.into())?;
}
_other => self.stack.push(FALSE.into())?,
};
Ok(())
}
fn execute_minus_operator(&mut self) -> Result<()> {
let operand = self.stack.pop();
match operand {
objects::Object::Integer(i) => {
let int = objects::Integer { value: -i.value };
self.stack.push(int.into())?
}
unknown => {
return Err(anyhow::format_err!(
"unsupported type fot negation: {}",
unknown
))
}
};
Ok(())
}
fn execute_index_expressions(
&mut self,
left: objects::Object,
index: objects::Object,
) -> Result<()> {
match (left, index) {
(objects::Object::Array(arr), objects::Object::Integer(idx)) => {
self.execute_array_index(arr, idx)?;
}
(objects::Object::Hash(hs), idx) => {
self.execute_hash_index(hs, idx)?;
}
(l, i) => {
return Err(anyhow::format_err!(
"index operator not supported: {:?}[{:?}]",
l,
i
))
}
}
Ok(())
}
fn execute_array_index(
&mut self,
array: objects::Array,
index: objects::Integer,
) -> Result<()> {
if (0..i64::try_from(array.elements.len())?).contains(&index.value) {
let ele = array.elements[usize::try_from(index.value)?].clone();
self.stack.push(ele)?;
} else {
self.stack.push(NULL.into())?;
}
Ok(())
}
fn execute_hash_index(&mut self, hash: objects::Hash, index: objects::Object) -> Result<()> {
let index = objects::HashableObject::try_from(index)?;
match hash.pairs.get(&index) {
Some(val) => self.stack.push(val.clone())?,
None => self.stack.push(NULL.into())?,
}
Ok(())
}
fn is_truthy(obj: &objects::Object) -> bool {
match obj {
objects::Object::Boolean(b) => b.value,
objects::Object::Null(_) => false,
_other => true,
}
}
}
#[cfg(test)]
mod tests {
use crate::compiler;
use crate::lexer;
use crate::parser;
use crate::parser::ast;
use super::*;
#[derive(Clone)]
enum Expected {
Int(i64),
Bool(bool),
Null,
String(String),
IntArray(Vec<i64>),
IntHash(Vec<(i64, i64)>),
Err(String),
}
struct Tests(Vec<(String, Expected)>);
impl From<i64> for Expected {
fn from(value: i64) -> Self {
Self::Int(value)
}
}
impl From<bool> for Expected {
fn from(value: bool) -> Self {
Self::Bool(value)
}
}
impl From<&str> for Expected {
fn from(value: &str) -> Self {
Self::String(value.to_string())
}
}
impl From<Vec<i64>> for Expected {
fn from(value: Vec<i64>) -> Self {
Self::IntArray(value)
}
}
impl From<Vec<(i64, i64)>> for Expected {
fn from(value: Vec<(i64, i64)>) -> Self {
Self::IntHash(value)
}
}
impl<T> From<Vec<(&str, T)>> for Tests
where
T: Into<Expected>,
{
fn from(value: Vec<(&str, T)>) -> Self {
let tests = value
.into_iter()
.map(|(input, expected)| (input.to_string(), expected.into()))
.collect::<Vec<_>>();
Tests(tests)
}
}
#[test]
fn test_integer_arithmetic() {
let tests: Tests = vec![
("1", 1),
("2", 2),
("1 + 2", 3),
("1 - 2", -1),
("1 * 2", 2),
("4 / 2", 2),
("50 / 2 * 2 + 10 - 5", 55),
("5 + 5 + 5 + 5 - 10", 10),
("2 * 2 * 2 * 2 * 2", 32),
("5 * 2 + 10", 20),
("5 + 2 * 10", 25),
("5 * (2 + 10)", 60),
("-5", -5),
("-10", -10),
("-50 + 100 + -50", 0),
("(5 + 10 * 2 + 15 / 3) * 2 + -10", 50),
]
.into();
run_vm_tests(tests);
}
#[test]
fn test_boolean_expressions() {
let tests: Tests = vec![
("true", true),
("false", false),
("1 < 2", true),
("1 > 2", false),
("1 < 1", false),
("1 > 1", false),
("1 == 1", true),
("1 != 1", false),
("1 == 2", false),
("1 != 2", true),
("true == true", true),
("false == false", true),
("true == false", false),
("true != false", true),
("false == true", false),
("false != true", true),
("(1 < 2) == true", true),
("(1 < 2) == false", false),
("(1 > 2) == true", false),
("(1 > 2) == false", true),
("!true", false),
("!false", true),
("!5", false),
("!!true", true),
("!!false", false),
("!!5", true),
("!(if (false) { 5; })", true),
]
.into();
run_vm_tests(tests);
}
#[test]
fn test_conditional() {
let tests: Tests = vec![
("if (true) { 10 }", 10),
("if (true) { 10 } else { 20 }", 10),
("if (false) { 10 } else { 20 }", 20),
("if (1) { 10 }", 10),
("if (1 < 2) { 10 }", 10),
("if (1 < 2) { 10 } else { 20 }", 10),
("if (1 > 2) { 10 } else { 20 }", 20),
("if ((if (false) { 10 })) { 10 } else { 20 }", 20),
]
.into();
run_vm_tests(tests);
let tests: Tests = vec![
("if (1 > 2) { 10 }", Expected::Null),
("if (false) { 10 }", Expected::Null),
]
.into();
run_vm_tests(tests);
}
#[test]
fn test_global_let_statements() {
let tests: Tests = vec![
("let one = 1; one", 1),
("let one = 1; let two = 2; one + two", 3),
("let one = 1; let two = one + one; one + two", 3),
]
.into();
run_vm_tests(tests);
}
#[test]
fn test_string_expression() {
let tests: Tests = vec![
(r#""monkey""#, "monkey"),
(r#""mon" + "key""#, "monkey"),
(r#""mon" + "key" + "banana""#, "monkeybanana"),
]
.into();
run_vm_tests(tests);
}
#[test]
fn test_array_literals() {
let tests: Tests = vec![
("[]", vec![]),
("[1, 2, 3]", vec![1, 2, 3]),
("[1 + 2, 3 * 4, 5 + 6]", vec![3, 12, 11]),
]
.into();
run_vm_tests(tests);
}
#[test]
fn test_hash_literals() {
let tests: Tests = vec![
("{}", vec![]),
("{1: 2, 2: 3}", vec![(1, 2), (2, 3)]),
("{1 + 1: 2 * 2, 3 + 3: 4 * 4}", vec![(2, 4), (6, 16)]),
]
.into();
run_vm_tests(tests);
}
#[test]
fn test_index_expressions() {
let tests: Tests = vec![
("[1, 2, 3][1]", 2),
("[1, 2, 3][0 + 2]", 3),
("[[1, 1, 1]][0][0]", 1),
("{1: 1, 2: 2}[1]", 1),
("{1: 1, 2: 2}[2]", 2),
]
.into();
run_vm_tests(tests);
let tests: Tests = vec![
("[][0]", Expected::Null),
("[1][-1]", Expected::Null),
("{}[0]", Expected::Null),
("{1: 1}[0]", Expected::Null),
]
.into();
run_vm_tests(tests);
}
#[test]
fn test_calling_function_without_arguments() {
let tests: Tests = vec![(
"
let five_plus_ten = fn() { 5 + 10; };
five_plus_ten();
",
15,
)]
.into();
run_vm_tests(tests);
}
#[test]
fn test_calling_functions_without_arguments() {
let tests: Tests = vec![
(
"
let one = fn() { 1; };
let two = fn() { 2; };
one() + two()
",
3,
),
(
"
let a = fn() { 1 };
let b = fn() { a() + 1 };
let c = fn() { b() + 1 };
c();
",
3,
),
]
.into();
run_vm_tests(tests);
}
#[test]
fn test_functions_with_return_statement() {
let tests: Tests = vec![
(
"
let early_exit = fn() { return 99; 100; };
early_exit();
",
99,
),
(
"
let early_exit = fn() { return 99; return 100; };
early_exit();
",
99,
),
(
"
let early_exit = fn() { 99; return 100; };
early_exit();
",
100,
),
]
.into();
run_vm_tests(tests);
}
#[test]
fn test_functions_without_return_value() {
let tests: Tests = vec![
(
"
let no_return = fn() { };
no_return();
",
Expected::Null,
),
(
"
let no_return = fn() { };
let no_return_two = fn() { no_return(); };
no_return();
no_return_two();
",
Expected::Null,
),
]
.into();
run_vm_tests(tests);
}
#[test]
fn test_first_class_functions() {
let tests: Tests = vec![
(
"
let returns_one = fn() { 1; };
let returns_one_returner = fn() { returns_one };
returns_one_returner()();
",
1,
),
(
"
let returns_one_returner = fn() {
let returns_one = fn() { 1; };
returns_one;
}
returns_one_returner()();
",
1,
),
]
.into();
run_vm_tests(tests);
}
#[test]
fn test_calling_functions_with_bindings() {
let tests: Tests = vec![
(
"
let one = fn() { let one = 1; one };
one();
",
1,
),
(
"
let one_and_two = fn() { let one = 1; let two = 2; one + two; };
one_and_two();
",
3,
),
(
"
let one_and_two = fn() { let one = 1; let two = 2; one + two; };
let three_and_four = fn() { let three = 3; let four = 4; three + four; };
one_and_two() + three_and_four();
",
10,
),
(
"
let first_foobar = fn() { let foobar = 50; foobar; };
let second_foobar = fn() { let foobar = 100; foobar; };
first_foobar() + second_foobar();
",
150,
),
(
"
let global_seed = 50;
let minus_one = fn() {
let num = 1;
global_seed - num;
}
let minus_two = fn() {
let num = 2;
global_seed - num;
}
minus_one() + minus_two();
",
97,
),
]
.into();
run_vm_tests(tests);
}
#[test]
fn test_calling_functions_with_arguments_and_bindings() {
let tests: Tests = vec![
(
"
let identity = fn(a) { a; };
identity(4);
",
4,
),
(
"
let sum = fn(a, b) { a + b; };
sum(1, 2);
",
3,
),
(
"
let global_num = 10;
let sum = fn(a, b) {
let c = a + b;
c + global_num;
}
let outer = fn() {
sum(1, 2) + sum(3, 4) + global_num;
}
outer() + global_num;
",
50,
),
]
.into();
run_vm_tests(tests);
}
#[test]
fn test_calling_funcitons_with_wrong_arguments() {
let tests = vec![
(
"fn() { 1; }(1);",
"wrong number of arguments: want=0, got=1",
),
(
"fn(a) { a; }();",
"wrong number of arguments: want=1, got=0",
),
(
"fn(a, b) { a + b; }(1);",
"wrong number of arguments: want=2, got=1",
),
];
run_vm_err_test(tests);
}
#[test]
fn test_builtin_functions() {
let tests: Tests = vec![
(r#"len("")"#, 0.into()),
(r#"len("four")"#, 4.into()),
(r#"len("hello world")"#, 11.into()),
("len([1, 2, 3])", 3.into()),
("len([])", 0.into()),
(
"len(1)",
Expected::Err("argument to 'len' not supported, got Integer".into()),
),
(
r#"len("one", "two")"#,
Expected::Err("wrong number of arguments. got=2, want=1".into()),
),
(r#"puts("hello", "world")"#, Expected::Null),
("first([1, 2, 3])", 1.into()),
("first([])", Expected::Null),
(
"first(1)",
Expected::Err("argument to 'first' must be Array, got Integer".into()),
),
("last([1, 2, 3])", 3.into()),
("last([])", Expected::Null),
(
"last(1)",
Expected::Err("argument to 'last' must be Array, got Integer".into()),
),
("rest([1, 2, 3])", vec![2, 3].into()),
("rest([])", Expected::Null),
(
"rest(1)",
Expected::Err("argument to 'rest' must be Array, got Integer".into()),
),
("push([], 1)", vec![1].into()),
(
"push(1, 1)",
Expected::Err("argument to 'push' must be Array, got Integer".into()),
),
]
.into();
run_vm_tests(tests);
}
#[test]
fn test_closures() {
let tests: Tests = vec![
(
"
let new_closure = fn(a) {
fn() { a; };
};
let closure = new_closure(99);
closure();
",
99,
),
(
"
let new_adder = fn(a, b) {
fn(c) { a + b + c };
};
let adder = new_adder(1, 2);
adder(8);
",
11,
),
(
"
let new_adder = fn(a, b) {
let c = a + b;
fn(d) { c + d };
};
let adder = new_adder(1, 2);
adder(8);
",
11,
),
(
"
let new_adder_outer = fn(a, b) {
let c = a + b;
fn(d) {
let e = d + c;
fn(f) { e + f; };
};
};
let new_adder_inner = new_adder_outer(1, 2);
let adder = new_adder_inner(3);
adder(8);
",
14,
),
(
"
let a = 1;
let new_adder_outer = fn(b) {
fn(c) {
fn(d) { a + b + c + d; };
};
};
let new_adder_inner = new_adder_outer(2);
let adder = new_adder_inner(3);
adder(8);
",
14,
),
(
"
let new_closure = fn(a, b) {
let one = fn() { a; };
let two = fn() { b; };
fn() { one() + two(); };
};
let closure = new_closure(9, 90);
closure();
",
99,
),
]
.into();
run_vm_tests(tests);
}
#[test]
fn test_recursive_functions() {
let tests: Tests = vec![
(
"
let count_down = fn(x) {
if (x == 0) {
return 0;
} else {
count_down(x - 1);
}
};
count_down(1);
",
0,
),
(
"
let count_down = fn(x) {
if (x == 0) {
return 0;
} else {
count_down(x - 1);
}
};
let wrapper = fn() {
count_down(1);
};
wrapper();
",
0,
),
(
"
let wrapper = fn() {
let count_down = fn(x) {
if (x == 0) {
return 0;
} else {
count_down(x - 1);
}
};
count_down(1);
};
wrapper();
",
0,
),
]
.into();
run_vm_tests(tests);
}
#[test]
fn test_recursive_fibonacci() {
let tests: Tests = vec![(
"
let fibonacci = fn(x) {
if (x == 0) {
return 0;
} else {
if (x == 1) {
return 1;
} else {
fibonacci(x - 1) + fibonacci(x - 2);
}
}
};
fibonacci(15);
",
610,
)]
.into();
run_vm_tests(tests);
}
fn run_vm_err_test(tests: Vec<(&'static str, &'static str)>) {
tests.into_iter().for_each(|(input, expected)| {
let program = parse(input);
let sym_table = std::rc::Rc::new(std::cell::RefCell::new(compiler::SymbolTable::new()));
let mut constants = Default::default();
let mut comp = compiler::Compiler::new_with_state(sym_table, &mut constants);
if let Err(e) = comp.compile(program.into()) {
panic!("compiler error {}: ", e);
}
let bytecode: bytecode::Bytecode = comp.into();
let mut globals = Default::default();
let mut vm = VM::new_with_globals_store(bytecode, &mut globals);
if let Err(e) = vm.run() {
assert_eq!(e.to_string(), expected);
} else {
panic!("expected VM error but resulted in none.")
}
});
}
fn run_vm_tests(tests: Tests) {
tests.0.into_iter().for_each(|(input, expected)| {
let program = parse(input.as_str());
let sym_table = std::rc::Rc::new(std::cell::RefCell::new(
compiler::SymbolTable::new_with_builtin(),
));
let mut constants = Default::default();
let mut comp = compiler::Compiler::new_with_state(sym_table, &mut constants);
if let Err(e) = comp.compile(program.into()) {
panic!("compiler error {}: ", e);
}
let bytecode: bytecode::Bytecode = comp.into();
let debug = bytecode.clone();
let mut globals = Default::default();
let mut vm = VM::new_with_globals_store(bytecode, &mut globals);
if let Err(e) = vm.run() {
panic!("vm error: {} by {:?}", e, debug);
}
let stack_elem = vm.last_popped_stack_elem();
test_object(stack_elem, &expected);
});
}
fn test_object(actual: &objects::Object, expected: &Expected) {
match expected {
Expected::Int(expected_int) => {
test_integer_object(actual, *expected_int);
}
Expected::Bool(expected_bool) => {
test_bool_object(actual, *expected_bool);
}
Expected::Null => {
test_null_object(actual);
}
Expected::String(expected_string) => {
test_string_object(actual, expected_string);
}
Expected::IntArray(expected_array) => {
test_int_array_object(actual, expected_array);
}
Expected::IntHash(expected_hash) => {
test_int_hash_object(actual.clone(), expected_hash);
}
Expected::Err(expected_err) => {
test_err_object(actual, expected_err);
}
}
}
fn test_integer_object(actual: &objects::Object, expected: i64) {
let result = match actual {
objects::Object::Integer(int) => int,
obj => panic!("expected Integer. received {}", obj),
};
assert_eq!(result.value, expected);
}
fn test_bool_object(actual: &objects::Object, expected: bool) {
let result = match actual {
objects::Object::Boolean(b) => b,
obj => panic!("expected Boolean. received {}", obj),
};
assert_eq!(result.value, expected);
}
fn test_null_object(actual: &objects::Object) {
match actual {
objects::Object::Null(_) => (),
obj => panic!("expected Null. received {}", obj),
};
}
fn test_string_object(actual: &objects::Object, expected: &str) {
match actual {
objects::Object::StringLit(s) => {
assert_eq!(s.value, expected);
}
obj => panic!("expected String. received {}", obj),
}
}
fn test_int_array_object(actual: &objects::Object, expected: &[i64]) {
match actual {
objects::Object::Array(arr) => expected
.iter()
.zip(arr.elements.iter())
.for_each(|(expected, obj)| test_integer_object(obj, *expected)),
obj => panic!("expected Array. received {}", obj),
}
}
fn test_int_hash_object(actual: objects::Object, expected: &[(i64, i64)]) {
match actual {
objects::Object::Hash(hash) => {
let mut expected_hash = objects::HashPairs::new();
expected
.iter()
.try_for_each::<_, anyhow::Result<()>>(|(key, val)| {
let key =
objects::Object::from(objects::Integer { value: *key }).try_into()?;
let val = objects::Integer { value: *val }.into();
expected_hash.insert(key, val);
Ok(())
})
.unwrap_or_else(|_| panic!("failed convert {:?} to HashPairs", expected));
assert_eq!(hash.pairs, expected_hash);
}
obj => panic!("expected hash. received {}", obj),
}
}
fn test_err_object(actual: &objects::Object, expected: &str) {
match actual {
objects::Object::Error(err) => match err {
objects::Error::Standard(msg) => assert_eq!(msg, expected),
},
obj => panic!("expected error. received {}", obj),
}
}
fn parse(input: &str) -> ast::Program {
let l = lexer::Lexer::new(input.into());
let mut p = parser::Parser::new(l);
p.parse_program().unwrap()
}
}
|
// --- paritytech ---
use pallet_authorship::Config;
use pallet_session::FindAccountFromAuthorIndex;
// --- darwinia-network ---
use crate::*;
frame_support::parameter_types! {
pub const UncleGenerations: u32 = 0;
}
impl Config for Runtime {
type FindAuthor = FindAccountFromAuthorIndex<Self, Aura>;
type UncleGenerations = UncleGenerations;
type FilterUncle = ();
type EventHandler = (CollatorSelection,);
}
|
pub struct Bytecode {
pub instructions: Vec<Instruction>,
}
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd)]
pub enum Register {
Arg(usize),
Local(usize),
}
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd)]
pub enum Instruction {
// FFI to Rust
RustCallMut { rust_fn: usize, arg: Register, out: Register},
RustCallRef { rust_fn: usize, arg: Register},
} |
/// A `usize` extension to use `usize`s as constrainable values
pub trait UsizeExt {
/// Get the constrainable value
fn _cv(&self) -> usize;
}
impl UsizeExt for usize {
fn _cv(&self) -> usize {
*self
}
}
/// A slice extension to use slice as constrainable values
pub trait SliceExt {
/// Get the constrainable value
fn _cv(&self) -> usize;
}
impl<T: AsRef<[u8]>> SliceExt for T {
fn _cv(&self) -> usize {
self.as_ref().len()
}
}
/// Verifies that
/// - `$buf` is can hold *exactly* `$size` bytes
macro_rules! vfy_keygen {
($size:expr => $buf:expr) => ({
#[allow(unused_imports)]
use $crate::verify_input::{ UsizeExt, SliceExt };
let error = match true {
_ if $buf._cv() != $size => Err("Invalid buffer size"),
_ => Ok(())
};
error.map_err(|e| $crate::ChachaPolyError::ApiMisuse(e))?;
});
}
/// Verifies that
/// - `$key` has the same size as `CHACHA20_KEY` (32 bytes)
/// - `$nonce` has the same size as `CHACHA20_NONCE` (12 bytes)
/// - `$plaintext` is not larger than the maximum plaintext limit
/// - `$buf` is large enough to hold the encrypted plaintext
macro_rules! vfy_enc {
($key:expr, $nonce:expr, $plaintext:expr => $buf:expr) => ({
#[allow(unused_imports)]
use $crate::verify_input::{ UsizeExt, SliceExt };
let error = match true {
_ if $key._cv() != CHACHA20_KEY => Err("Invalid key length"),
_ if $nonce._cv() != CHACHA20_NONCE => Err("Invalid nonce length"),
_ if $plaintext._cv() > CHACHA20_MAX => Err("Too much data"),
_ if $plaintext._cv() > $buf._cv() => Err("Buffer is too small"),
_ => Ok(())
};
error.map_err(|e| $crate::ChachaPolyError::ApiMisuse(e))?;
})
}
/// Verifies that
/// - `$key` has the same size as `CHACHA20_KEY` (32 bytes)
/// - `nonce` has the same size as `CHACHA20_NONCE` (12 bytes)
/// - `$ciphertext` is not larger than the maximum plaintext limit
/// - `$buf` is large enough to hold the decrypted ciphertext
macro_rules! vfy_dec {
($key:expr, $nonce:expr, $ciphertext:expr => $buf:expr) => ({
#[allow(unused_imports)]
use $crate::verify_input::{ UsizeExt, SliceExt };
let error = match true {
_ if $key._cv() != CHACHA20_KEY => Err("Invalid key length"),
_ if $nonce._cv() != CHACHA20_NONCE => Err("Invalid nonce length"),
_ if $ciphertext._cv() > CHACHA20_MAX => Err("Too much data"),
_ if $ciphertext._cv() > $buf._cv() => Err("Buffer is too small"),
_ => Ok(())
};
error.map_err(|e| $crate::ChachaPolyError::ApiMisuse(e))?;
})
}
/// Verifies that
/// - `$key` has the same size as `POLY1305_KEY` (32 bytes)
/// - `$buf` is large enough to a `POLY1305_TAG` (16 bytes)
macro_rules! vfy_auth {
($key:expr, => $buf:expr) => ({
#[allow(unused_imports)]
use $crate::verify_input::{ UsizeExt, SliceExt };
let error = match true {
_ if $key._cv() != POLY1305_KEY => Err("Invalid key length"),
_ if $buf._cv() < POLY1305_TAG => Err("Buffer is too small"),
_ => Ok(())
};
error.map_err(|e| $crate::ChachaPolyError::ApiMisuse(e))?;
})
}
/// Verifies that
/// - `$key` has the same size as `CHACHAPOLY_KEY` (32 bytes)
/// - `nonce` has the same size as `CHACHAPOLY_NONCE` (12 bytes)
/// - `$plaintext` is not larger than the maximum plaintext limit
/// - `$buf` is large enough to hold the encrypted plaintext and the authentication tag
macro_rules! vfy_seal {
($key:expr, $nonce:expr, $plaintext:expr => $buf:expr) => ({
#[allow(unused_imports)]
use $crate::verify_input::{ UsizeExt, SliceExt };
let error = match true {
_ if $key._cv() != CHACHAPOLY_KEY => Err("Invalid key length"),
_ if $nonce._cv() != CHACHAPOLY_NONCE => Err("Invalid nonce length"),
_ if $plaintext._cv() > CHACHAPOLY_MAX => Err("Too much data"),
_ if $buf._cv() < $plaintext._cv() + CHACHAPOLY_TAG => Err("Buffer is too small"),
_ => Ok(())
};
error.map_err(|e| $crate::ChachaPolyError::ApiMisuse(e))?;
})
}
/// Verifies that
/// - `$key` has the same size as `CHACHAPOLY_KEY` (32 bytes)
/// - `nonce` has the same size as `CHACHAPOLY_NONCE` (12 bytes)
/// - `$ciphertext` is not larger that the maximum plaintext limit and smaller than an
/// authentication tag
/// - `$buf` is large enough to hold the **encrypted** ciphertext (copy and authenticate and
/// decrypt-in-place workflow)
macro_rules! vfy_open {
($key:expr, $nonce:expr, $ciphertext:expr => $buf:expr) => ({
#[allow(unused_imports)]
use $crate::verify_input::{ UsizeExt, SliceExt };
let error = match true {
_ if $key._cv() != CHACHAPOLY_KEY => Err("Invalid key length"),
_ if $nonce._cv() != CHACHAPOLY_NONCE => Err("Invalid nonce length"),
_ if $ciphertext._cv() > CHACHAPOLY_MAX => Err("Too much data"),
_ if $ciphertext._cv() < CHACHAPOLY_TAG => Err($crate::ChachaPolyError::InvalidData)?,
_ if $buf._cv() < $ciphertext._cv() => Err("Buffer is too small"),
_ => Ok(())
};
error.map_err(|e| $crate::ChachaPolyError::ApiMisuse(e))?;
})
} |
use core::alloc::{GlobalAlloc, Layout};
use core::cell::Cell;
use core::ptr::{NonNull, null_mut};
use core::mem::{align_of, size_of};
use core::slice::{from_raw_parts, from_raw_parts_mut};
use core::ops::{Deref, DerefMut};
pub(crate) use ALLOCATOR as Global;
extern {
static kernel_phys_end: usize;
}
fn main_mem_start() -> u32 {
unsafe { &kernel_phys_end as *const usize as u32 }
}
fn main_mem_size() -> usize {
(0x28000000 - main_mem_start()) as usize
}
pub struct Allocator {
offset: Cell<usize>
}
impl Allocator {
pub const fn new() -> Self {
Self {
offset: Cell::new(0)
}
}
pub unsafe fn alloc_array<T>(&self, n: usize, align: usize) -> Result<NonNull<T>, ()> {
let layout = Layout::from_size_align_unchecked(size_of::<T>() * n, align);
let res = self.alloc(layout);
if res == null_mut() {
Err(())
} else {
Ok(NonNull::new_unchecked(res as *mut T))
}
}
pub unsafe fn dealloc_array<T>(&self, ptr: NonNull<T>, n: usize, align: usize) {
let layout = Layout::from_size_align_unchecked(size_of::<T>() * n, align);
self.dealloc(ptr.as_ptr() as *mut u8, layout);
}
}
unsafe impl GlobalAlloc for Allocator {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
let align_po2 = layout.align().trailing_zeros();
let mut offset = self.offset.get();
offset = (offset + layout.align() - 1) >> align_po2 << align_po2;
let addr = main_mem_start() + offset as u32;
offset += layout.size();
self.offset.set(offset);
if offset > main_mem_size() {
null_mut()
} else {
addr as *mut u8
}
}
unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {}
}
unsafe impl Sync for Allocator {}
pub struct Array<T> {
ptr: NonNull<T>,
size: usize,
alignment: usize,
}
impl<T> Array<T> {
pub fn new(size: usize) -> Self {
Self::aligned_new(size, align_of::<T>())
}
pub fn aligned_new(size: usize, alignment: usize) -> Self {
Self {
ptr: unsafe { Global.alloc_array(size, alignment) }
.expect("Out of memory attempting to allocate array!"),
size: size,
alignment: alignment
}
}
pub fn leak(self) -> &'static mut [T] {
unsafe { from_raw_parts_mut(self.ptr.as_ptr(), self.size) }
}
}
impl<T> Deref for Array<T> {
type Target = [T];
fn deref(&self) -> &[T] {
unsafe { from_raw_parts(self.ptr.as_ptr(), self.size) }
}
}
impl<T> DerefMut for Array<T> {
fn deref_mut(&mut self) -> &mut [T] {
unsafe { from_raw_parts_mut(self.ptr.as_ptr(), self.size) }
}
}
impl<T> Drop for Array<T> {
fn drop(&mut self) {
unsafe { Global.dealloc_array(self.ptr, self.size, self.alignment) };
}
}
|
extern crate bitintr;
use bitintr::*;
#[no_mangle]
pub fn pext_u32(x: u32, mask: u32) -> u32 {
x.pext(mask)
}
#[no_mangle]
pub fn pext_u64(x: u64, mask: u64) -> u64 {
x.pext(mask)
}
|
use rocket::Request;
use rocket_contrib::Template;
#[derive(Serialize)]
struct Context404 {
uri: String
}
#[error(404)]
fn not_found(req: &Request) -> Template
{
let context = Context404 {
uri: req.uri().as_str().to_string()
};
Template::render("errors/404", &context)
}
|
use crate::headers::*;
use crate::AddAsHeader;
use http::request::Builder;
#[derive(Debug, Clone, Copy)]
pub struct ClientRequestId<'a>(&'a str);
impl<'a> ClientRequestId<'a> {
pub fn new(client_request_id: &'a str) -> Self {
Self(client_request_id)
}
}
impl<'a> From<&'a str> for ClientRequestId<'a> {
fn from(client_request_id: &'a str) -> Self {
Self::new(client_request_id)
}
}
impl<'a> AddAsHeader for ClientRequestId<'a> {
fn add_as_header(&self, builder: Builder) -> Builder {
builder.header(CLIENT_REQUEST_ID, self.0)
}
fn add_as_header2(
&self,
request: &mut crate::Request,
) -> Result<(), crate::errors::HTTPHeaderError> {
request
.headers_mut()
.append(CLIENT_REQUEST_ID, http::HeaderValue::from_str(self.0)?);
Ok(())
}
}
|
pub fn factors(input: u64) -> Vec<u64> {
let mut factors = Vec::new();
let mut target = input;
let mut factor = 2;
while factor <= target {
if target % factor == 0 {
factors.push(factor);
target = target / factor;
} else {
factor = factor + 1;
}
}
factors
}
|
use std::convert::TryInto;
use std::{cmp::Ordering::Greater, cmp::Ordering::Less, io, io::prelude::*, mem};
use vint32::iterator::VintArrayIterator;
use vint32::vint_array::VIntArray;
const FLUSH_THRESHOLD: usize = 16_384;
const VALUE_OFFSET: u32 = 1;
#[derive(Debug)]
pub struct DocLoader<'a> {
blocks: &'a [u8],
block_index: &'a [u8],
}
impl<'a> DocLoader<'a> {
pub fn open(data: &'a [u8]) -> Self {
let index_size = u32::from_le_bytes(data[data.len() - 4..].try_into().unwrap());
let block_index = &data[..data.len() - 4];
Self {
blocks: data,
block_index: &block_index[block_index.len() - index_size as usize..],
}
}
/// offsets are the offsets produced by the `DocWriter`
pub fn get_doc(&self, doc_id: u32) -> Result<String, io::Error> {
let offsets = self.block_index;
let size = offsets.len() / mem::size_of::<(u32, u32)>();
// binary search on the slice to find the correct block where the document resides
// returns the start and end boundaries of the block
let hit = binary_search_slice(size, doc_id, offsets);
let start = hit.lower.1 - VALUE_OFFSET;
let end = hit.upper.1 - VALUE_OFFSET;
// load compressed block data into buffer
let mut output = lz4_flex::decompress_size_prepended(&self.blocks[start as usize..end as usize]).unwrap();
let mut arr = VintArrayIterator::new(&output);
let arr_size = arr.next().unwrap();
let mut data_start = arr.pos;
let mut arr = VintArrayIterator::new(&output[arr.pos..arr.pos + arr_size as usize]);
let first_id_in_block = arr.next().unwrap();
let mut doc_offsets_in_block: Vec<u32> = vec![];
for off in arr.by_ref() {
doc_offsets_in_block.push(off);
}
data_start += arr.pos;
let pos_in_block = doc_id as usize - first_id_in_block as usize;
// get the document from the decompressed data
let document_start_pos = data_start + doc_offsets_in_block[pos_in_block + 1] as usize;
let document_end_pos = data_start + doc_offsets_in_block[pos_in_block] as usize;
output.resize(document_start_pos, 0);
let doc = output.split_off(document_end_pos);
let s = unsafe { String::from_utf8_unchecked(doc) };
Ok(s)
}
}
#[test]
fn test_large_doc_store() {
let mut writer = DocStoreWriter::new(0);
let mut sink = vec![];
let doc1 = r#"{"category": "superb", "tags": ["nice", "cool"] }"#;
for _ in 0..2640 {
writer.add_doc(doc1, &mut sink).unwrap();
}
writer.finish(&mut sink).unwrap();
let doc_loader = DocLoader::open(&sink);
for i in 0..2640 {
assert_eq!(doc1.to_string(), doc_loader.get_doc(i as u32).unwrap());
}
}
#[derive(Debug)]
pub struct DocStoreWriter {
pub curr_id: u32,
pub bytes_indexed: u64,
/// the offsets holds metadata for the block
/// the tuple consists of (the first doc id in the block, the start byte of the block in the data)
pub offsets: Vec<(u32, u32)>,
pub current_offset: u32,
current_block: DocWriterBlock,
}
#[derive(Debug, Default)]
struct DocWriterBlock {
data: Vec<u8>,
doc_offsets_in_cache: Vec<u32>,
first_id_in_block: u32,
}
impl DocStoreWriter {
pub fn new(current_offset: u32) -> Self {
DocStoreWriter {
curr_id: 0,
bytes_indexed: 0,
offsets: vec![],
current_offset,
current_block: Default::default(),
}
}
/// add a document to the current block
pub fn add_doc<W: Write>(&mut self, doc: &str, out: W) -> Result<(), io::Error> {
self.bytes_indexed += doc.as_bytes().len() as u64;
let new_block = self.current_block.data.is_empty();
if new_block {
self.current_block.first_id_in_block = self.curr_id;
self.current_block.doc_offsets_in_cache.push(self.current_block.data.len() as u32);
}
self.current_block.data.extend(doc.as_bytes());
self.current_block.doc_offsets_in_cache.push(self.current_block.data.len() as u32);
if self.current_block.data.len() > FLUSH_THRESHOLD {
self.flush_block(out)?;
}
self.curr_id += 1;
Ok(())
}
/// flushes the current block to out
fn flush_block<W: Write>(&mut self, mut out: W) -> Result<(), io::Error> {
// write first_id_in_block
let mut arr = VIntArray::default();
arr.encode(self.current_block.first_id_in_block);
arr.encode_vals(&self.current_block.doc_offsets_in_cache);
let mut data = arr.serialize();
data.extend(self.current_block.data.as_slice());
let output = lz4_flex::compress_prepend_size(&data);
out.write_all(&output).unwrap();
self.offsets.push((self.current_block.first_id_in_block, self.current_offset + VALUE_OFFSET));
self.current_offset += output.len() as u32;
self.current_block.data.clear();
self.current_block.doc_offsets_in_cache.clear();
out.flush()?;
Ok(())
}
pub fn finish<W: Write>(&mut self, mut out: W) -> Result<(), io::Error> {
self.flush_block(&mut out)?;
self.offsets.push((self.curr_id + 1, self.current_offset + VALUE_OFFSET));
for (id, current_offset) in &self.offsets {
out.write_all(&id.to_le_bytes())?;
out.write_all(¤t_offset.to_le_bytes())?;
}
let index_size = self.offsets.len() * std::mem::size_of_val(&self.offsets[0]);
out.write_all(&(index_size as u32).to_le_bytes())?;
out.flush()?;
Ok(())
}
}
#[test]
fn test_doc_store() {
let mut writer = DocStoreWriter::new(0);
let mut sink = vec![];
let doc1 = r#"{"test":"ok"}"#;
let doc2 = r#"{"test2":"ok"}"#;
let doc3 = r#"{"test3":"ok"}"#;
writer.add_doc(doc1, &mut sink).unwrap();
writer.add_doc(doc2, &mut sink).unwrap();
writer.add_doc(doc3, &mut sink).unwrap();
writer.finish(&mut sink).unwrap();
let doc_loader = DocLoader::open(&sink);
assert_eq!(doc1.to_string(), doc_loader.get_doc(0_u32).unwrap());
assert_eq!(doc2.to_string(), doc_loader.get_doc(1_u32).unwrap());
assert_eq!(doc3.to_string(), doc_loader.get_doc(2_u32).unwrap());
}
#[inline]
fn decode_pos(pos: usize, slice: &[u8]) -> (u32, u32) {
let start_offset = pos * 8;
let slice = &slice[start_offset..];
let id = u32::from_le_bytes(slice[..4].try_into().unwrap());
let offset = u32::from_le_bytes(slice[4..4 + 4].try_into().unwrap());
(id, offset)
}
#[derive(Debug)]
struct SearchHit {
#[allow(dead_code)]
found: bool,
lower: (u32, u32),
upper: (u32, u32),
}
#[inline]
fn binary_search_slice(mut size: usize, id: u32, slice: &[u8]) -> SearchHit {
let mut left = 0;
let mut right = size;
while left < right {
let mid = left + size / 2;
let cmp = decode_pos(mid, slice).0.cmp(&id);
if cmp == Less {
left = mid + 1;
} else if cmp == Greater {
right = mid;
} else {
left = mid;
let hit = decode_pos(left, slice);
let hit_next = decode_pos(left + 1, slice);
return SearchHit {
lower: hit,
upper: hit_next,
found: id == hit.0,
};
// SAFETY: same as the `get_unchecked` above
}
size = right - left;
}
let hit = decode_pos(left - 1, slice);
let hit_next = decode_pos(left, slice);
SearchHit {
lower: hit,
upper: hit_next,
found: id == hit.0,
}
}
|
mod login;
mod person;
mod persons_list;
pub use login::*;
pub use person::*;
pub use persons_list::*;
|
use target::Target;
use {ir, target, support, pass};
/// A compilation builder.
pub struct CompilationBuilder<'target>
{
/// The target to build with.
target: &'target Target,
/// The target triple.
triple: target::Triple,
/// The target CPU.
cpu: target::CPU,
/// The current features.
features: Vec<String>,
}
impl<'target> CompilationBuilder<'target>
{
/// Creates a new compilation builder.
pub fn new(target: &'target Target) -> Self {
CompilationBuilder {
target: target,
triple: target::Triple::default(),
cpu: target::CPU::default(),
features: Vec::new(),
}
}
/// Sets the target triple.
pub fn triple(&mut self, triple: target::Triple) -> &mut Self {
self.triple = triple;
self
}
/// Sets the target CPU.
pub fn cpu(&mut self, cpu: target::CPU) -> &mut Self {
self.cpu = cpu;
self
}
/// Adds a feature.
pub fn add_feature(&mut self, name: &str) -> &mut Self {
self.features.push(format!("+{}", name));
self
}
/// Removes a feature.
pub fn remove_feature(&mut self, name: &str) -> &mut Self {
self.features.push(format!("-{}", name));
self
}
/// Builds the compilation.
pub fn build(&mut self) -> Compilation {
let feature_str: String = self.features.join("");
let machine = self.target.create_machine(&self.triple.0,
&self.cpu.0,
&feature_str);
Compilation { machine: machine }
}
}
/// A compilation.
pub struct Compilation
{
machine: target::Machine,
}
impl Compilation
{
/// Creates a new target using the default configuration.
///
/// Use `CompilationBuilder` for more flexibility.
pub fn new(target: &Target) -> Self {
CompilationBuilder::new(target).build()
}
/// Compiles a module.
pub fn compile<S>(&self,
module: ir::Module,
stream: S,
file_type: target::FileType)
where S: AsRef<support::OutputStream> {
let mut pm = pass::Manager::new();
self.machine.add_passes_to_emit_file(&pm, stream.as_ref(), file_type);
pm.run(module);
}
}
|
#[macro_use]
extern crate error_chain;
#[macro_use]
extern crate nom;
use nom::{le_u8, le_u16, le_u32};
use std::io::Read;
use std::fs::File;
use std::path::Path;
mod errors;
use errors::*;
pub struct FitFile {
bytes: Vec<u8>,
header: FitFileHeader,
}
impl FitFile {
pub fn open<P: AsRef<Path>>(path: P) -> Result<FitFile> {
let bytes = {
let mut buf = vec![];
let mut f = File::open(path)?;
f.read_to_end(&mut buf)?;
buf
};
let header = fitfile_header(&bytes)
.to_result()
.map_err(|e| -> ErrorKind { e.description().into() })?;
let inst = FitFile {
bytes: bytes,
header: header,
};
inst.validate_data()?;
Ok(inst)
}
pub fn get_header(&self) -> &FitFileHeader {
&self.header
}
fn validate_data(&self) -> Result<()> {
let header = self.get_header();
let data_size = self.bytes.len() - header.header_size as usize - 2;
match data_size == header.data_size as usize {
true => Ok(()),
false => Err("Data looks to be corrupted".into()),
}
}
fn data_bytes(&self) -> &[u8] {
&self.bytes[self.header.header_size as usize..self.header.data_size as usize]
}
}
#[derive(Debug)]
pub struct FitFileHeader {
header_size: u8,
protocol_version: u8,
profile_version: u16,
data_size: u32,
crc: u16,
}
named!(fitfile_header <FitFileHeader>, do_parse!(
header_size: le_u8 >>
protocol_version: le_u8 >>
profile_version: le_u16 >>
data_size: le_u32 >>
tag!(".FIT") >>
crc: le_u16 >>
(
FitFileHeader {
header_size: header_size,
protocol_version: protocol_version,
profile_version: profile_version,
data_size: data_size,
crc: crc,
}
)
));
#[derive(Debug)]
enum NormalRecordMessageType {
DefinitionMessage { developer_extensions_enabled: bool },
DataMessage,
}
#[derive(Debug)]
enum RecordHeader {
Normal {
local_message_type: u8,
message_type: NormalRecordMessageType,
},
CompressedTimestamp {
local_message_type: u8,
time_offset_secs: u8,
},
}
named!(normal_record_header <RecordHeader>, bits! ( do_parse! (
local_message_type: take_bits!(u8, 4) >>
tag_bits!(u8, 1, 0) >>
developer_extensions_enabled: take_bits!(u8, 1) >>
is_definition_message: take_bits!(u8, 1) >>
tag_bits!(u8, 1, 0) >>
(
RecordHeader::Normal {
local_message_type: local_message_type,
message_type: match is_definition_message {
1 => NormalRecordMessageType::DefinitionMessage {
developer_extensions_enabled: developer_extensions_enabled == 1
},
0 => NormalRecordMessageType::DataMessage,
_ => panic!()
}
}
)
)));
named!(compressed_timestamp_header <RecordHeader>, bits! ( do_parse! (
time_offset_secs: take_bits!(u8, 5) >>
local_message_type: take_bits!(u8, 2) >>
tag_bits!(u8, 1, 1) >>
(
RecordHeader::CompressedTimestamp {
time_offset_secs: time_offset_secs,
local_message_type: local_message_type,
}
)
)));
named!(record_header <RecordHeader>, alt!(normal_record_header | compressed_timestamp_header));
#[derive(Debug)]
struct Record {
header: RecordHeader,
}
|
// Extract the subset of boards that are strictly reachable from the initial
// board.
//
// Input: 2-analyze's output
//
// Output:
// board depth name
// idx: next-name...
// ...
// ...
//
// board: hex representation of bit-board (only white boards)
// depth: the depth of the board
// name: board name (id)
// idx: move index (the index of return value of Board#next)
// next-name: all possible white boards proceeded by the black board
#[macro_use]
extern crate precomp;
use std::cmp;
use precomp::{In, Out};
use precomp::board::{Board, Result, ELEPHANT, GIRAFFE, CHICK};
use precomp::board_collection::{BoardSet, BoardMap};
struct Move {
idx: u8,
board: Board,
}
struct Node {
board: Board,
depth: i32,
next_boards: Vec<Move>,
}
// load all possible boards
fn load() -> BoardMap {
fn log(msg: &str, boards: usize) {
log!("{} (boards: {})", msg, boards);
}
let mut oracle = BoardMap::new();
In::each(|b, depth, _| {
oracle[b] = depth;
if oracle.len() % 10000000 == 0 {
log("loading...", oracle.len());
}
});
log("loaded!", oracle.len());
oracle
}
// check if a given branch is hopeless
fn check_hopeless(b: Board, nb: Board) -> bool {
// we know the two branches are hopeless by some experiments
b.0 == 0x000a9030c41b002u64 && nb.0 == 0x400a00390c0b012u64 ||
b.0 == 0x000a0030c41b902u64 && nb.0 == 0x400a01390c0b002u64
}
// extract strictly reachable boards
fn extract(oracle: BoardMap) -> Vec<Node> {
// The definition of "strictly reachable"
// * The initial board is strictly reachable.
// * If a black board is strictly reachable, all white boards from the black
// one are strictly reachable.
// (i.e., a black player may choose any possible moves.)
// * If a white board is strictly reachable and the board is depth-N,
// all depth-{N-1} black boards from the black one are strictly reachable.
// (i.e., a white player chooses any best possible moves.)
let mut all_in_hands = 0;
let mut end_in_hands = 0;
let mut min_degree = 64;
let mut max_degree = 0;
let mut max_hands = 0;
let mut visited = BoardSet::new();
let mut boards = vec![Board::init().normalize()];
let mut nodes = vec![];
// straightforward DFS
while let Some(b) = boards.pop() {
if visited.contains(b) { continue }
visited.insert(b);
let depth = oracle[b];
if visited.len() % 1000000 == 0 {
log!("extracting... (visited: {}, remaining: {})",
visited.len(), boards.len());
}
// keep statistics
{
let h1 = b.hand(ELEPHANT) + b.hand(GIRAFFE) + b.hand(CHICK);
let b = b.reverse();
let h2 = b.hand(ELEPHANT) + b.hand(GIRAFFE) + b.hand(CHICK);
if h1 == 6 || h2 == 6 {
all_in_hands += 1;
if b.next() == Result::Win { end_in_hands += 1 }
}
max_hands = cmp::max(max_hands, cmp::max(h1, h2));
}
if let Result::Unknown(bs) = b.next() {
let mut node = Node {
board: b,
depth: depth as i32,
next_boards: vec![],
};
max_degree = cmp::max(max_degree, bs.len());
min_degree = cmp::min(min_degree, bs.len());
for (i, &nb) in bs.iter().enumerate() {
// record all black boards (even depth),
// and white boards (odd depth, only best move)
if depth % 2 == 0 || oracle[nb] == depth - 1 {
// ad-hoc heuristic: manually prune hopeless branches
if check_hopeless(b, nb) { continue }
node.next_boards.push(Move { idx: i as u8, board: nb });
boards.push(nb)
}
}
nodes.push(node);
}
}
log!("Step 3: result");
log!(" extracted dag size: {}", visited.len());
log!(" all in hands: {}", all_in_hands);
log!(" end in hands: {}", end_in_hands);
log!(" max hands: {}", max_hands);
log!(" min degree: {}", min_degree);
log!(" max degree: {}", max_degree);
nodes.sort_by_key(|node| -node.depth );
nodes
}
fn output(nodes: Vec<Node>) {
let mut out = Out::new();
let mut map = BoardMap::new();
let mut names = BoardMap::new();
let mut name = 0;
for (i, node) in nodes.iter().enumerate() {
map[node.board] = i as i32;
if node.depth % 2 == 1 {
names[node.board] = name;
name += 1;
}
}
for ref m in &nodes[map[Board::init().normalize()] as usize].next_boards {
out!(out, " {}", names[m.board]);
}
out!(out, "\n");
for node in &nodes {
// black boards are omitted
if node.depth % 2 == 0 { continue }
// depth-3 (or less) boards are omitted
if node.depth <= 3 { continue }
// print this white board
out!(out, "{:015x} {} {}\n", node.board.0, node.depth, names[node.board]);
// print all next white boards for each best move
for ref m in &node.next_boards {
if !map.contains(m.board) { continue };
let ref nnode = nodes[map[m.board] as usize];
if nnode.depth % 2 != 0 { unreachable!() }
out!(out, " {}:", m.idx);
for ref m in &nnode.next_boards {
if names.contains(m.board) { out!(out, " {}", names[m.board]) }
}
out!(out, "\n");
}
out!(out, "\n");
}
}
fn main() {
log!("Step 3: extract an subset of needed boards");
let oracle = load();
let nodes = extract(oracle);
output(nodes);
log!("Step 3: done!");
}
|
// Generated by `scripts/generate.js`
pub type VkAccelerationStructureType = super::super::khr::VkAccelerationStructureType;
#[doc(hidden)]
pub type RawVkAccelerationStructureType = super::super::khr::RawVkAccelerationStructureType; |
#![cfg(all(test, feature = "test_e2e"))]
use azure_core::prelude::*;
use azure_storage::blob::container::PublicAccess;
use azure_storage::blob::prelude::*;
use azure_storage::core::prelude::*;
use std::sync::Arc;
use std::time::Duration;
#[tokio::test]
async fn lease() {
let container_name: &'static str = "azuresdkrustetoets2";
let storage = initialize().as_storage_client();
let container = storage.as_container_client(container_name);
container
.create()
.public_access(PublicAccess::None)
.execute()
.await
.unwrap();
let res = container
.acquire_lease(Duration::from_secs(30))
.execute()
.await
.unwrap();
let lease_id = res.lease_id;
let lease = container.as_container_lease_client(lease_id);
let _res = lease.renew().execute().await.unwrap();
let _res = lease.release().execute().await.unwrap();
container.delete().execute().await.unwrap();
}
#[tokio::test]
async fn break_lease() {
let container_name: &'static str = "azuresdkrustetoets3";
let storage = initialize().as_storage_client();
let container = storage.as_container_client(container_name);
container
.create()
.public_access(PublicAccess::None)
.execute()
.await
.unwrap();
let res = container
.acquire_lease(Duration::from_secs(30))
.execute()
.await
.unwrap();
let lease = container.as_container_lease_client(res.lease_id);
lease.renew().execute().await.unwrap();
let res = container
.break_lease()
.lease_break_period(Duration::from_secs(0))
.execute()
.await
.unwrap();
assert!(res.lease_time == 0);
container.delete().execute().await.unwrap();
}
fn initialize() -> Arc<StorageAccountClient> {
let account =
std::env::var("STORAGE_ACCOUNT").expect("Set env variable STORAGE_ACCOUNT first!");
let master_key =
std::env::var("STORAGE_MASTER_KEY").expect("Set env variable STORAGE_MASTER_KEY first!");
let http_client = new_http_client();
StorageAccountClient::new_access_key(http_client.clone(), &account, &master_key)
}
|
use crate::{
env::Env,
render::{RenderContext, VNode},
};
use futures::{
channel::mpsc, //
prelude::*,
stream::{FusedStream, Stream},
task::{self, Poll},
};
use siro::vdom::Nodes;
use std::pin::Pin;
use wasm_bindgen::JsCast as _;
pub struct App<'env, TMsg: 'static> {
env: &'env Env,
mountpoint: web::Node,
vnodes: Vec<VNode>,
tx: mpsc::UnboundedSender<TMsg>,
rx: mpsc::UnboundedReceiver<TMsg>,
}
impl<'env, TMsg: 'static> App<'env, TMsg> {
pub(crate) fn new(env: &'env Env, mountpoint: web::Node) -> Self {
let (tx, rx) = mpsc::unbounded();
Self {
env,
mountpoint,
vnodes: vec![],
tx,
rx,
}
}
pub fn send_message(&self, msg: TMsg) {
let _ = self.tx.unbounded_send(msg);
}
pub async fn next_message(&mut self) -> Option<TMsg> {
self.next().await
}
pub fn render<N>(&mut self, nodes: N) -> crate::Result<()>
where
N: Nodes<TMsg>,
{
RenderContext {
document: &self.env.document,
parent: &self.mountpoint,
tx: &self.tx,
}
.diff_nodes(nodes, &mut self.vnodes)?;
Ok(())
}
pub fn focus(&self, id: &str) -> crate::Result<()> {
if let Some(element) = self.env.document.get_element_by_id(id) {
if let Ok(element) = element.dyn_into::<web::HtmlElement>() {
element.focus().map_err(crate::Error::caught_from_js)?;
}
}
Ok(())
}
pub fn blur(&self, id: &str) -> crate::Result<()> {
if let Some(element) = self.env.document.get_element_by_id(id) {
if let Ok(element) = element.dyn_into::<web::HtmlElement>() {
element.blur().map_err(crate::Error::caught_from_js)?;
}
}
Ok(())
}
}
impl<TMsg: 'static> Stream for App<'_, TMsg> {
type Item = TMsg;
#[inline]
fn poll_next(mut self: Pin<&mut Self>, cx: &mut task::Context<'_>) -> Poll<Option<Self::Item>> {
self.rx.poll_next_unpin(cx)
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.rx.size_hint()
}
}
impl<TMsg: 'static> FusedStream for App<'_, TMsg> {
#[inline]
fn is_terminated(&self) -> bool {
self.rx.is_terminated()
}
}
|
type Image = Vec<Vec<u32>>;
fn rotate_matrix(image: Image) -> Image {
let mut i = image.clone();
for (x, vec) in image.iter().enumerate() {
for (y, _) in vec.iter().enumerate() {
i[x][y] = image[y][x];
}
i[x].reverse();
}
i
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_1() {
let image = vec![
vec![1, 2, 3, 4],
vec![5, 6, 7, 8],
vec![9, 10, 11, 12],
vec![13, 14, 15, 16],
];
let rotated = rotate_matrix(image);
let expected_rotated = vec![
vec![13, 9, 5, 1],
vec![14, 10, 6, 2],
vec![15, 11, 7, 3],
vec![16, 12, 8, 4],
];
assert_eq!(rotated, expected_rotated);
}
#[test]
fn test_2() {
let image = vec![vec![1, 2], vec![3, 4]];
let rotated = rotate_matrix(image);
let expected_rotated = vec![vec![3, 1], vec![4, 2]];
assert_eq!(rotated, expected_rotated);
}
}
|
use actix_web::{App,HttpServer,web};
extern crate simple_logger;
mod executor{
pub mod validate_password;
pub mod execute;
}
#[actix_rt::main]
async fn main() ->std::io::Result<()> {
simple_logger::SimpleLogger::new().init().unwrap();
HttpServer::new(||{
App::new()
.service(
web::scope("/serv/")
.service(executor::validate_password::validate_password)
.service(executor::execute::execute_command)
)
})
.workers(10)
.keep_alive(15)
.bind("127.0.0.1:8088")?
.run()
.await
} |
use std::collections::HashMap;
use amethyst::{
assets::Handle,
core::timing::Time,
ecs::prelude::*,
input::{is_close_requested, is_key_down},
prelude::*,
renderer::VirtualKeyCode,
ui::{UiEventType, UiPrefab},
};
use crate::{states::ToppaState, ToppaGameData};
#[derive(PartialEq, Eq, Hash, Debug, PartialOrd, Ord)]
pub enum CreditsButtons {
Back,
}
/// The credit menu stage, displaying the credits.
pub struct CreditsState<'d, 'e,> {
menu_duration: f32,
main_dispatcher: Option<Dispatcher<'d, 'e,>,>,
// The displayed Ui Entity, if any.
current_screen: Option<Entity,>,
// The Handle of the Prefab for the displayed Ui Entity.
current_screen_prefab: Option<Handle<UiPrefab,>,>,
// Map of the Ui Button entities and the corresponding button type.
ui_buttons: HashMap<Entity, CreditsButtons,>,
b_buttons_found: bool,
}
impl<'d, 'e,> ToppaState<'d, 'e,> for CreditsState<'d, 'e,> {
type StateButton = CreditsButtons;
fn enable_dispatcher(&mut self, world: &mut World,) {
self.main_dispatcher = None; /*Some({
let mut dispatcher = DispatcherBuilder::new()
.with(DummySystem { counter: 0 }, "dummy_system", &[])
.build();
dispatcher.setup(&mut world.res);
dispatcher
});*/
}
fn new(_world: &mut World, screen_opt: Option<Handle<UiPrefab,>,>,) -> Self {
CreditsState {
menu_duration: 0.0,
current_screen: None,
current_screen_prefab: screen_opt,
ui_buttons: HashMap::new(),
b_buttons_found: false,
main_dispatcher: None,
}
}
fn get_screen_entity(&self) -> Option<Entity,> {
self.current_screen
}
fn set_screen_entity(&mut self, screen_entity: Option<Entity,>,) {
self.current_screen = screen_entity;
}
fn get_screen_prefab(&self) -> Option<Handle<UiPrefab,>,> {
self.current_screen_prefab.clone()
}
fn set_screen_prefab(&mut self, screen_prefab: Option<Handle<UiPrefab,>,>,) {
self.current_screen_prefab = screen_prefab.clone();
}
fn get_main_dispatcher(&mut self) -> Option<&mut Option<Dispatcher<'d, 'e,>,>,> {
Some(&mut self.main_dispatcher,)
}
fn reset_buttons(&mut self) {
self.b_buttons_found = false;
self.ui_buttons.clear();
}
fn get_buttons(&mut self) -> Option<&mut HashMap<Entity, Self::StateButton,>,> {
Some(&mut self.ui_buttons,)
}
}
impl<'a, 'b, 'd, 'e,> State<ToppaGameData<'a, 'b,>, StateEvent,> for CreditsState<'d, 'e,> {
fn handle_event(
&mut self,
data: StateData<'_, ToppaGameData<'_, '_,>,>,
event: StateEvent,
) -> Trans<ToppaGameData<'a, 'b,>, StateEvent,> {
let StateData {
mut world,
data: _,
} = data;
match &event {
StateEvent::Window(wnd_event,) => {
if is_close_requested(&wnd_event,)
|| is_key_down(&wnd_event, VirtualKeyCode::Escape,)
{
Trans::Quit
}
else {
Trans::None
}
},
StateEvent::Ui(ui_event,) => {
use self::UiEventType::*;
match ui_event.event_type {
Click => self.btn_click(&mut world, ui_event.target,),
_ => Trans::None,
}
},
}
}
fn update(
&mut self,
data: StateData<'_, ToppaGameData<'_, '_,>,>,
) -> Trans<ToppaGameData<'a, 'b,>, StateEvent,> {
let StateData {
mut world,
data,
} = data;
self.dispatch(&world,);
data.update_menu(&world,);
self.menu_duration += world.read_resource::<Time>().delta_seconds();
if !self.b_buttons_found {
self.b_buttons_found =
self.insert_button(&mut world, CreditsButtons::Back, "menu_credits_back_button",);
Trans::None
}
else {
Trans::None
}
}
// Executed when this game state runs for the first time.
fn on_start(&mut self, data: StateData<'_, ToppaGameData<'_, '_,>,>,) {
let StateData {
mut world,
data: _,
} = data;
self.enable_current_screen(&mut world,);
self.enable_dispatcher(&mut world,);
}
// Executed when this game state gets popped.
fn on_stop(&mut self, data: StateData<'_, ToppaGameData<'_, '_,>,>,) {
let StateData {
mut world,
data: _,
} = data;
self.disable_dispatcher();
self.disable_current_screen(&mut world,);
}
}
impl<'a, 'b, 'd, 'e,> CreditsState<'d, 'e,> {
fn btn_click(
&self,
_world: &mut World,
target: Entity,
) -> Trans<ToppaGameData<'a, 'b,>, StateEvent,> {
use self::CreditsButtons::*;
if let Some(button,) = self.ui_buttons.get(&target,) {
match button {
Back => self.btn_back(),
}
}
else {
Trans::None
}
}
fn btn_back(&self) -> Trans<ToppaGameData<'a, 'b,>, StateEvent,> {
#[cfg(feature = "debug")]
debug!("Returning to CentreState.");
Trans::Pop
}
}
|
use crate::distribution::{Continuous, ContinuousCDF};
use crate::function::gamma;
use crate::statistics::*;
use crate::{Result, StatsError};
use rand::Rng;
use std::f64;
/// Implements the [Inverse
/// Gamma](https://en.wikipedia.org/wiki/Inverse-gamma_distribution)
/// distribution
///
/// # Examples
///
/// ```
/// use statrs::distribution::{InverseGamma, Continuous};
/// use statrs::statistics::Distribution;
/// use statrs::prec;
///
/// let n = InverseGamma::new(1.1, 0.1).unwrap();
/// assert!(prec::almost_eq(n.mean().unwrap(), 1.0, 1e-14));
/// assert_eq!(n.pdf(1.0), 0.07554920138253064);
/// ```
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct InverseGamma {
shape: f64,
rate: f64,
}
impl InverseGamma {
/// Constructs a new inverse gamma distribution with a shape (α)
/// of `shape` and a rate (β) of `rate`
///
/// # Errors
///
/// Returns an error if `shape` or `rate` are `NaN`.
/// Also returns an error if `shape` or `rate` are not in `(0, +inf)`
///
/// # Examples
///
/// ```
/// use statrs::distribution::InverseGamma;
///
/// let mut result = InverseGamma::new(3.0, 1.0);
/// assert!(result.is_ok());
///
/// result = InverseGamma::new(0.0, 0.0);
/// assert!(result.is_err());
/// ```
pub fn new(shape: f64, rate: f64) -> Result<InverseGamma> {
let is_nan = shape.is_nan() || rate.is_nan();
match (shape, rate, is_nan) {
(_, _, true) => Err(StatsError::BadParams),
(_, _, false) if shape <= 0.0 || rate <= 0.0 => Err(StatsError::BadParams),
(_, _, false) if shape.is_infinite() || rate.is_infinite() => {
Err(StatsError::BadParams)
}
(_, _, false) => Ok(InverseGamma { shape, rate }),
}
}
/// Returns the shape (α) of the inverse gamma distribution
///
/// # Examples
///
/// ```
/// use statrs::distribution::InverseGamma;
///
/// let n = InverseGamma::new(3.0, 1.0).unwrap();
/// assert_eq!(n.shape(), 3.0);
/// ```
pub fn shape(&self) -> f64 {
self.shape
}
/// Returns the rate (β) of the inverse gamma distribution
///
/// # Examples
///
/// ```
/// use statrs::distribution::InverseGamma;
///
/// let n = InverseGamma::new(3.0, 1.0).unwrap();
/// assert_eq!(n.rate(), 1.0);
/// ```
pub fn rate(&self) -> f64 {
self.rate
}
}
impl ::rand::distributions::Distribution<f64> for InverseGamma {
fn sample<R: Rng + ?Sized>(&self, r: &mut R) -> f64 {
1.0 / super::gamma::sample_unchecked(r, self.shape, self.rate)
}
}
impl ContinuousCDF<f64, f64> for InverseGamma {
/// Calculates the cumulative distribution function for the inverse gamma
/// distribution at `x`
///
/// # Formula
///
/// ```ignore
/// Γ(α, β / x) / Γ(α)
/// ```
///
/// where the numerator is the upper incomplete gamma function,
/// the denominator is the gamma function, `α` is the shape,
/// and `β` is the rate
fn cdf(&self, x: f64) -> f64 {
if x <= 0.0 {
0.0
} else if x.is_infinite() {
1.0
} else {
gamma::gamma_ur(self.shape, self.rate / x)
}
}
}
impl Min<f64> for InverseGamma {
/// Returns the minimum value in the domain of the
/// inverse gamma distribution representable by a double precision
/// float
///
/// # Formula
///
/// ```ignore
/// 0
/// ```
fn min(&self) -> f64 {
0.0
}
}
impl Max<f64> for InverseGamma {
/// Returns the maximum value in the domain of the
/// inverse gamma distribution representable by a double precision
/// float
///
/// # Formula
///
/// ```ignore
/// INF
/// ```
fn max(&self) -> f64 {
f64::INFINITY
}
}
impl Distribution<f64> for InverseGamma {
/// Returns the mean of the inverse distribution
///
/// # None
///
/// If `shape <= 1.0`
///
/// # Formula
///
/// ```ignore
/// β / (α - 1)
/// ```
///
/// where `α` is the shape and `β` is the rate
fn mean(&self) -> Option<f64> {
if self.shape <= 1.0 {
None
} else {
Some(self.rate / (self.shape - 1.0))
}
}
/// Returns the variance of the inverse gamma distribution
///
/// # None
///
/// If `shape <= 2.0`
///
/// # Formula
///
/// ```ignore
/// β^2 / ((α - 1)^2 * (α - 2))
/// ```
///
/// where `α` is the shape and `β` is the rate
fn variance(&self) -> Option<f64> {
if self.shape <= 2.0 {
None
} else {
let val = self.rate * self.rate
/ ((self.shape - 1.0) * (self.shape - 1.0) * (self.shape - 2.0));
Some(val)
}
}
/// Returns the entropy of the inverse gamma distribution
///
/// # Formula
///
/// ```ignore
/// α + ln(β * Γ(α)) - (1 + α) * ψ(α)
/// ```
///
/// where `α` is the shape, `β` is the rate, `Γ` is the gamma function,
/// and `ψ` is the digamma function
fn entropy(&self) -> Option<f64> {
let entr = self.shape + self.rate.ln() + gamma::ln_gamma(self.shape)
- (1.0 + self.shape) * gamma::digamma(self.shape);
Some(entr)
}
/// Returns the skewness of the inverse gamma distribution
///
/// # None
///
/// If `shape <= 3`
///
/// # Formula
///
/// ```ignore
/// 4 * sqrt(α - 2) / (α - 3)
/// ```
///
/// where `α` is the shape
fn skewness(&self) -> Option<f64> {
if self.shape <= 3.0 {
None
} else {
Some(4.0 * (self.shape - 2.0).sqrt() / (self.shape - 3.0))
}
}
}
impl Mode<Option<f64>> for InverseGamma {
/// Returns the mode of the inverse gamma distribution
///
/// # Formula
///
/// ```ignore
/// β / (α + 1)
/// ```
///
/// /// where `α` is the shape and `β` is the rate
fn mode(&self) -> Option<f64> {
Some(self.rate / (self.shape + 1.0))
}
}
impl Continuous<f64, f64> for InverseGamma {
/// Calculates the probability density function for the
/// inverse gamma distribution at `x`
///
/// # Formula
///
/// ```ignore
/// (β^α / Γ(α)) * x^(-α - 1) * e^(-β / x)
/// ```
///
/// where `α` is the shape, `β` is the rate, and `Γ` is the gamma function
fn pdf(&self, x: f64) -> f64 {
if x <= 0.0 || x.is_infinite() {
0.0
} else if ulps_eq!(self.shape, 1.0) {
self.rate / (x * x) * (-self.rate / x).exp()
} else {
self.rate.powf(self.shape) * x.powf(-self.shape - 1.0) * (-self.rate / x).exp()
/ gamma::gamma(self.shape)
}
}
/// Calculates the probability density function for the
/// inverse gamma distribution at `x`
///
/// # Formula
///
/// ```ignore
/// ln((β^α / Γ(α)) * x^(-α - 1) * e^(-β / x))
/// ```
///
/// where `α` is the shape, `β` is the rate, and `Γ` is the gamma function
fn ln_pdf(&self, x: f64) -> f64 {
self.pdf(x).ln()
}
}
#[rustfmt::skip]
#[cfg(test)]
mod tests {
use crate::statistics::*;
use crate::distribution::{ContinuousCDF, Continuous, InverseGamma};
use crate::distribution::internal::*;
use crate::consts::ACC;
fn try_create(shape: f64, rate: f64) -> InverseGamma {
let n = InverseGamma::new(shape, rate);
assert!(n.is_ok());
n.unwrap()
}
fn create_case(shape: f64, rate: f64) {
let n = try_create(shape, rate);
assert_eq!(shape, n.shape());
assert_eq!(rate, n.rate());
}
fn bad_create_case(shape: f64, rate: f64) {
let n = InverseGamma::new(shape, rate);
assert!(n.is_err());
}
fn get_value<F>(shape: f64, rate: f64, eval: F) -> f64
where F: Fn(InverseGamma) -> f64
{
let n = try_create(shape, rate);
eval(n)
}
fn test_case<F>(shape: f64, rate: f64, expected: f64, eval: F)
where F: Fn(InverseGamma) -> f64
{
let x = get_value(shape, rate, eval);
assert_eq!(expected, x);
}
fn test_almost<F>(shape: f64, rate: f64, expected: f64, acc: f64, eval: F)
where F: Fn(InverseGamma) -> f64
{
let x = get_value(shape, rate, eval);
assert_almost_eq!(expected, x, acc);
}
#[test]
fn test_create() {
create_case(0.1, 0.1);
create_case(1.0, 1.0);
}
#[test]
fn test_bad_create() {
bad_create_case(0.0, 1.0);
bad_create_case(-1.0, 1.0);
bad_create_case(-100.0, 1.0);
bad_create_case(f64::NEG_INFINITY, 1.0);
bad_create_case(f64::NAN, 1.0);
bad_create_case(1.0, 0.0);
bad_create_case(1.0, -1.0);
bad_create_case(1.0, -100.0);
bad_create_case(1.0, f64::NEG_INFINITY);
bad_create_case(1.0, f64::NAN);
bad_create_case(f64::INFINITY, 1.0);
bad_create_case(1.0, f64::INFINITY);
bad_create_case(f64::INFINITY, f64::INFINITY);
}
#[test]
fn test_mean() {
let mean = |x: InverseGamma| x.mean().unwrap();
test_almost(1.1, 0.1, 1.0, 1e-14, mean);
test_almost(1.1, 1.0, 10.0, 1e-14, mean);
}
#[test]
#[should_panic]
fn test_mean_with_shape_lte_1() {
let mean = |x: InverseGamma| x.mean().unwrap();
get_value(0.1, 0.1, mean);
}
#[test]
fn test_variance() {
let variance = |x: InverseGamma| x.variance().unwrap();
test_almost(2.1, 0.1, 0.08264462809917355371901, 1e-15, variance);
test_almost(2.1, 1.0, 8.264462809917355371901, 1e-13, variance);
}
#[test]
#[should_panic]
fn test_variance_with_shape_lte_2() {
let variance = |x: InverseGamma| x.variance().unwrap();
get_value(0.1, 0.1, variance);
}
#[test]
fn test_entropy() {
let entropy = |x: InverseGamma| x.entropy().unwrap();
test_almost(0.1, 0.1, 11.51625799319234475054, 1e-14, entropy);
test_almost(1.0, 1.0, 2.154431329803065721213, 1e-14, entropy);
}
#[test]
fn test_skewness() {
let skewness = |x: InverseGamma| x.skewness().unwrap();
test_almost(3.1, 0.1, 41.95235392680606187966, 1e-13, skewness);
test_almost(3.1, 1.0, 41.95235392680606187966, 1e-13, skewness);
test_case(5.0, 0.1, 3.464101615137754587055, skewness);
}
#[test]
#[should_panic]
fn test_skewness_with_shape_lte_3() {
let skewness = |x: InverseGamma| x.skewness().unwrap();
get_value(0.1, 0.1, skewness);
}
#[test]
fn test_mode() {
let mode = |x: InverseGamma| x.mode().unwrap();
test_case(0.1, 0.1, 0.09090909090909090909091, mode);
test_case(1.0, 1.0, 0.5, mode);
}
#[test]
fn test_min_max() {
let min = |x: InverseGamma| x.min();
let max = |x: InverseGamma| x.max();
test_case(1.0, 1.0, 0.0, min);
test_case(1.0, 1.0, f64::INFINITY, max);
}
#[test]
fn test_pdf() {
let pdf = |arg: f64| move |x: InverseGamma| x.pdf(arg);
test_almost(0.1, 0.1, 0.0628591853882328004197, 1e-15, pdf(1.2));
test_almost(0.1, 1.0, 0.0297426109178248997426, 1e-15, pdf(2.0));
test_case(1.0, 0.1, 0.04157808822362745501024, pdf(1.5));
test_case(1.0, 1.0, 0.3018043114632487660842, pdf(1.2));
}
#[test]
fn test_ln_pdf() {
let ln_pdf = |arg: f64| move |x: InverseGamma| x.ln_pdf(arg);
test_almost(0.1, 0.1, 0.0628591853882328004197f64.ln(), 1e-15, ln_pdf(1.2));
test_almost(0.1, 1.0, 0.0297426109178248997426f64.ln(), 1e-15, ln_pdf(2.0));
test_case(1.0, 0.1, 0.04157808822362745501024f64.ln(), ln_pdf(1.5));
test_case(1.0, 1.0, 0.3018043114632487660842f64.ln(), ln_pdf(1.2));
}
#[test]
fn test_cdf() {
let cdf = |arg: f64| move |x: InverseGamma| x.cdf(arg);
test_almost(0.1, 0.1, 0.1862151961946054271994, 1e-14, cdf(1.2));
test_almost(0.1, 1.0, 0.05859755410986647796141, 1e-14, cdf(2.0));
test_case(1.0, 0.1, 0.9355069850316177377304, cdf(1.5));
test_almost(1.0, 1.0, 0.4345982085070782231613, 1e-14, cdf(1.2));
}
#[test]
fn test_continuous() {
test::check_continuous_distribution(&try_create(1.0, 0.5), 0.0, 100.0);
test::check_continuous_distribution(&try_create(9.0, 2.0), 0.0, 100.0);
}
}
|
use apllodb_shared_components::{ApllodbError, ApllodbResult};
use apllodb_storage_engine_interface::{
ColumnDataType, ColumnDefinition, ColumnName, TableConstraintKind,
};
use serde::{Deserialize, Serialize};
/// A constraint parameter that set of record (not each record) must satisfy.
#[derive(Clone, Eq, PartialEq, Hash, Debug, Serialize, Deserialize)]
pub(super) enum TableWideConstraintKind {
/// PRIMARY KEY ({column_name}, ...)
PrimaryKey {
column_data_types: Vec<ColumnDataType>,
},
/// UNIQUE ({column_name}, ...)
/// It does not hold data type because data type info are held by Version.
Unique { column_names: Vec<ColumnName> },
}
impl TableWideConstraintKind {
pub fn new(
column_definitions: &[ColumnDefinition],
tck: &TableConstraintKind,
) -> ApllodbResult<Self> {
let kind = match tck {
TableConstraintKind::PrimaryKey { column_names } => {
let pk_column_data_types = column_names.iter().map(|pk_cn| {
let pk_cd = column_definitions.iter().find(|cd| cd.column_data_type().column_name() == pk_cn).ok_or_else(||
ApllodbError::ddl_error(
format!("column `{:?}` does not exist in ColumnDefinition while it is declared as PRIMARY KEY", pk_cn),
)
)?;
Ok(pk_cd.column_data_type().clone())
}).collect::<ApllodbResult<Vec<ColumnDataType>>>()?;
Self::PrimaryKey {
column_data_types: pk_column_data_types,
}
}
TableConstraintKind::Unique { column_names } => Self::Unique {
column_names: column_names.clone(),
},
};
Ok(kind)
}
}
|
use private::{SealedRequestLineParserState, SealedRequestParserState};
use std::collections::HashMap;
use thiserror::Error;
#[doc(hidden)]
const SPACE: u8 = ' ' as u8;
#[doc(hidden)]
const COLON: u8 = ':' as u8;
#[doc(hidden)]
const CR: u8 = '\r' as u8;
#[doc(hidden)]
const LF: u8 = '\n' as u8;
#[doc(hidden)]
const TAB: u8 = '\t' as u8;
/// Possible transitions which can error return `Result<T, ParsingError>`.
type Result<T> = std::result::Result<T, ParsingError>;
/// Short form for `HttpRequestParser<'a, RequestLine<S>>`.
type RequestLineParser<'a, S> = HttpRequestParser<'a, RequestLine<S>>;
#[doc(hidden)]
mod private {
pub trait SealedRequestParserState {}
impl<S> SealedRequestParserState for super::RequestLine<S> where S: super::RequestLineParserState {}
impl SealedRequestParserState for super::Header {}
impl SealedRequestParserState for super::Body {}
pub trait SealedRequestLineParserState {}
impl SealedRequestLineParserState for super::Method {}
impl SealedRequestLineParserState for super::Uri {}
impl SealedRequestLineParserState for super::Version {}
}
/// The HTTP request structure.
///
/// This structure tries to follow RFC 2616 Section 5 <https://tools.ietf.org/html/rfc2616#section-5>.
/// Bellow you can see the expected request format.
/// ```text
/// Request = Request-Line
/// *(( general-header
/// | request-header
/// | entity-header ) CRLF)
/// CRLF
/// [ message-body ]
/// ```
/// *The implementation may not be complete as it is a work in progress.*
#[derive(Debug)]
pub struct Request<'a> {
/// The method of the request, it can be one of: `OPTIONS`, `GET`, `HEAD`, `POST`, `PUT`, `DELETE`, `TRACE`, `CONNECT`
method: &'a str,
request_uri: &'a str,
http_version: &'a str,
header: HashMap<&'a str, &'a str>,
body: &'a str,
}
// #[derive(Debug)]
// pub enum RequestMethod {
// Options,
// Get,
// Head,
// Post,
// Put,
// Delete,
// Trace,
// Connect,
// }
impl<'a> Request<'a> {
/// Create a new `Request`.
fn new() -> Self {
Self {
method: "",
request_uri: "",
http_version: "",
header: HashMap::new(),
body: "",
}
}
}
/// The provides the means of state transition for the parser,
/// it provides a single function `parse`,
/// when called it is supposed to parse the stream until the completion of the current state.
pub trait Parse {
/// `NextState` type are of kind `Parser<'a, State>`
/// Sadly we can't do `type NextParser = Parser<'a, Self::NextState>`
/// and allow the final user to simply define `type NextState`
/// until <https://github.com/rust-lang/rust/issues/29661> is resolved.
type NextState;
/// Parse the existing content consuming it in the process,
/// in the end, return the next parser state.
fn parse(self) -> Self::NextState;
}
/// A trait for the request parser states.
///
/// *This trait is sealed.*
pub trait RequestParserState: SealedRequestParserState {}
/// The `Parser` structure.
#[derive(Debug)]
pub struct HttpRequestParser<'a, S>
where
S: RequestParserState,
{
packet: &'a str,
request: Request<'a>,
state: S,
}
impl<'a, T> HttpRequestParser<'a, T>
where
T: RequestParserState,
{
/// Skip existing spaces (other whitespace is not considered).
fn skip_spaces(&mut self) {
let mut curr = 0;
let bytes = self.packet.as_bytes();
while curr < bytes.len() && bytes[curr] == SPACE {
curr += 1;
}
self.packet = &self.packet[curr..];
}
/// If the next two characters are `\r\n`.
fn skip_crlf(&mut self) -> Result<()> {
let bytes = self.packet.as_bytes();
if self.packet.len() < 2 {
return Err(ParsingError::UnexpectedEndOfPacket);
}
if is_crlf(&[bytes[0], bytes[1]]) {
self.packet = &self.packet[2..];
}
Ok(())
}
fn parse_until_char(&mut self, chr: u8) -> &'a str {
let mut curr = 0;
let bytes = self.packet.as_bytes();
while curr < bytes.len() && bytes[curr] != chr {
curr += 1;
}
let res = &self.packet[..curr];
self.packet = &self.packet[curr..];
res
}
}
/// A trait for request line parser states.
///
/// *This trait is sealed.*
pub trait RequestLineParserState: SealedRequestLineParserState {}
/// The `RequestLine`, the parser starting state.
///
/// It is defined in RFC 2616 as follows:
/// ```text
/// Request-Line = Method SP Request-URI SP HTTP-Version CRLF
/// ```
/// Where `SP` is defined as ASCII character 32 and
/// `CRLF` the combination of ASCII characters 13 and 10 (`\r\n`).
#[derive(Debug)]
pub struct RequestLine<S>
where
S: RequestLineParserState,
{
state: S,
}
impl<S> RequestParserState for RequestLine<S> where S: RequestLineParserState {}
impl<'a, S> HttpRequestParser<'a, RequestLine<S>>
where
S: RequestLineParserState,
{
/// Create a new `HttpRequestParser` which starts in `RequestLine<Method>`.
pub fn start(packet: &'a str) -> HttpRequestParser<'a, RequestLine<Method>> {
HttpRequestParser {
packet,
request: Request::new(),
state: RequestLine { state: Method },
}
}
}
/// The method of the request line.
#[derive(Debug)]
pub struct Method;
impl RequestLineParserState for Method {}
impl<'a> Parse for RequestLineParser<'a, Method> {
type NextState = Result<RequestLineParser<'a, Uri>>;
fn parse(mut self) -> Self::NextState {
let mut curr = 0;
let bytes = self.packet.as_bytes();
while bytes[curr] != SPACE {
curr += 1;
}
let method = &self.packet[0..curr];
if !is_valid_method(method) {
return Err(ParsingError::InvalidMethod(method.to_string()));
}
self.request.method = method;
self.packet = &self.packet[curr + 1..];
self.skip_spaces();
Ok(HttpRequestParser {
packet: self.packet,
request: self.request,
state: RequestLine { state: Uri },
})
}
}
/// The URI of the request line.
#[derive(Debug)]
pub struct Uri;
impl RequestLineParserState for Uri {}
impl<'a> Parse for RequestLineParser<'a, Uri> {
type NextState = Result<RequestLineParser<'a, Version>>;
fn parse(mut self) -> Self::NextState {
self.request.request_uri = self.parse_until_char(SPACE);
self.skip_spaces();
Ok(HttpRequestParser {
packet: self.packet,
request: self.request,
state: RequestLine { state: Version },
})
}
}
/// The HTTP version part of the request line.
#[derive(Debug)]
pub struct Version;
impl RequestLineParserState for Version {}
impl<'a> Parse for RequestLineParser<'a, Version> {
type NextState = Result<HttpRequestParser<'a, Header>>;
fn parse(mut self) -> Self::NextState {
let mut curr = 0;
let bytes = self.packet.as_bytes();
while !is_crlf(&[bytes[curr], bytes[curr + 1]]) {
curr += 1;
}
let version = &self.packet[..curr];
if !is_valid_version(version) {
return Err(ParsingError::InvalidVersion(version.to_string()));
}
self.request.http_version = version;
self.packet = &self.packet[curr + 2..];
Ok(HttpRequestParser {
packet: self.packet,
request: self.request,
state: Header,
})
}
}
/// The `Header` state.
///
/// *This state should be reached *after* the `RequestLine` state.*
#[derive(Debug)]
pub struct Header;
impl RequestParserState for Header {}
impl<'a> HttpRequestParser<'a, Header> {
/// Parse a line in the header.
fn parse_line(&mut self) {
// Parse the line key
let mut curr = 0;
let bytes = self.packet.as_bytes();
while !is_whitespace(bytes[curr]) && bytes[curr] != COLON {
curr += 1;
}
let key = &self.packet[0..curr];
self.packet = &self.packet[curr..];
// Skip the separator which will match the regex `\s*:\s*`
let mut curr = 0;
let bytes = self.packet.as_bytes();
while is_whitespace(bytes[curr]) || bytes[curr] == COLON {
curr += 1;
}
self.packet = &self.packet[curr..];
// Parse the line value
let bytes = self.packet.as_bytes();
while bytes.len() >= 2 && !is_crlf(&[bytes[curr], bytes[curr + 1]]) {
curr += 1;
}
let value = &self.packet[0..curr];
self.packet = &self.packet[curr + 2..];
self.request.header.insert(key, value);
}
}
impl<'a> Parse for HttpRequestParser<'a, Header> {
type NextState = Result<HttpRequestParser<'a, Body>>;
fn parse(mut self) -> Self::NextState {
let mut bytes = self.packet.as_bytes();
while bytes.len() >= 2 && !is_crlf(&[bytes[0], bytes[1]]) {
self.parse_line();
bytes = self.packet.as_bytes();
}
self.skip_crlf()?;
Ok(HttpRequestParser {
packet: self.packet,
request: self.request,
state: Body,
})
}
}
/// The `Body` state.
///
/// *This state should be reached *after* the `Header` state.*
#[derive(Debug)]
pub struct Body;
impl RequestParserState for Body {}
impl<'a> Parse for HttpRequestParser<'a, Body> {
/// Since the body is the last element of the request,
/// the next possible state is final and thus returns the parsed request.
type NextState = Request<'a>;
/// Parse the body (which is composed of all remaining bytes)
/// and return the next state.
fn parse(mut self) -> Self::NextState {
self.request.body = self.packet;
self.request
}
}
/// Checks if the given string slice is a valid HTTP method according to
/// IETF RFC 2616 [5.1.1](https://tools.ietf.org/html/rfc2616#section-5.1.1).
///
/// Supported valid methods are:
/// - `OPTIONS`
/// - `GET`
/// - `HEAD`
/// - `POST`
/// - `PUT`
/// - `DELETE`
/// - `TRACE`
/// - `CONNECT`
fn is_valid_method(method: &str) -> bool {
match method {
"OPTIONS" | "GET" | "HEAD" | "POST" | "PUT" | "DELETE" | "TRACE" | "CONNECT" => true,
_ => false,
}
}
/// Checks if the HTTP version is a valid version.
///
/// Versions considered valid are:
/// `HTTP/1`, `HTTP/1.0`, `HTTP/1.1`, `HTTP/2`
fn is_valid_version(version: &str) -> bool {
match version {
"HTTP/1" | "HTTP/1.0" | "HTTP/1.1" | "HTTP/2" => true,
_ => false,
}
}
/// Errors types for the parser.
#[derive(Debug, Error)]
pub enum ParsingError {
/// An invalid request method was detected (e.g. `ADD`).
#[error("invalid HTTP request method: {0}")]
InvalidMethod(String),
/// An invalid version was detected (e.g. `HTTP/0`).
#[error("invalid HTTP version: {0}")]
InvalidVersion(String),
/// Unexpected reach to the end of the packet.
#[error("unexpected end of packet")]
UnexpectedEndOfPacket,
}
/// Check if a pair of bytes are CRLF.
#[inline(always)]
fn is_crlf(bytes: &[u8; 2]) -> bool {
return bytes[0] == CR && bytes[1] == LF;
}
/// Check if a byte is whitespace.
#[inline(always)]
fn is_whitespace(byte: u8) -> bool {
return byte == SPACE || byte == LF || byte == CR || byte == TAB;
}
|
// vim: tw=80
#![cfg_attr(feature = "nightly-docs", feature(doc_cfg))]
//! Examples of mock objects and their generated methods.
//!
//! This crate only exists to document the autogenerated methods of the
//! [`Mockall`](https://docs.rs/mockall/latest/mockall)
//! crate. You should never depend on this crate.
//
#[cfg(all(feature = "nightly-docs", rustdoc))]
use mockall::*;
/// A basic trait with several kinds of method.
///
/// It is mocked by the [`MockFoo`](struct.MockFoo.html) struct.
#[cfg(all(feature = "nightly-docs", rustdoc))]
#[automock]
pub trait Foo {
/// A method with a `'static` return type
fn foo(&self, x: i32, y: i16) -> i32;
/// A method returning a reference
fn bar(&self, x: i32) -> &i32;
/// A method returning a mutable reference
fn baz(&mut self, x: i32) -> &mut i32;
/// A method returning a `'static` reference
fn bean(&self) -> &'static i32;
/// A static method
fn bang(x: i32) -> i32;
}
#[cfg(all(feature = "nightly-docs", rustdoc))]
#[automock(mod mock_ffi;)]
extern "C" {
/// A foreign "C" function
pub fn ffi_func();
}
|
#![feature(rand, old_io, core, collections, hash)]
#![allow(unused_features)]
#![feature(test)]
extern crate url;
extern crate openssl;
extern crate "rustc-serialize" as rustc_serialize;
extern crate "sha1-hasher" as sha1;
extern crate rand;
#[macro_use] extern crate bitflags;
#[cfg(test)]
extern crate test;
pub use socket::WebSocket;
pub use message::{WSMessage, WSStatusCode};
pub mod nonce;
pub mod message;
pub mod stream;
pub mod socket;
|
/*!
```rudra-poc
[target]
crate = "concread"
version = "0.2.5"
indexed_version = "0.1.18"
[report]
issue_url = "https://github.com/kanidm/concread/issues/48"
issue_date = 2020-11-13
rustsec_url = "https://github.com/RustSec/advisory-db/pull/532"
rustsec_id = "RUSTSEC-2020-0092"
[[bugs]]
analyzer = "SendSyncVariance"
bug_class = "SendSyncVariance"
bug_count = 2
rudra_report_locations = [
"src/cache/arc/mod.rs:153:1: 153:88",
"src/cache/arc/mod.rs:154:1: 154:88",
]
```
!*/
#![forbid(unsafe_code)]
use concread::arcache::ARCache;
use std::rc::Rc;
use std::sync::Arc;
fn main() {
let non_sync_item = Rc::new(0); // neither `Send` nor `Sync`
assert_eq!(Rc::strong_count(&non_sync_item), 1);
let cache = ARCache::<i32, Rc<u64>>::new_size(5, 5);
let mut writer = cache.write();
writer.insert(0, non_sync_item);
writer.commit();
let arc_parent = Arc::new(cache);
let mut handles = vec![];
for _ in 0..5 {
let arc_child = arc_parent.clone();
let child_handle = std::thread::spawn(move || {
let reader = arc_child.read(); // new Reader of ARCache
let smuggled_rc = reader.get(&0).unwrap();
for _ in 0..1000 {
let _dummy_clone = Rc::clone(&smuggled_rc); // Increment `strong_count` of `Rc`
// When `_dummy_clone` is dropped, `strong_count` is decremented.
}
});
handles.push(child_handle);
}
for handle in handles {
handle.join().expect("failed to join child thread");
}
assert_eq!(Rc::strong_count(arc_parent.read().get(&0).unwrap()), 1);
}
|
use diesel::result::{DatabaseErrorKind, Error as DieselError};
use failure::Fail;
use hyper::StatusCode;
use serde_json;
use validator::{ValidationError, ValidationErrors};
use stq_http::errors::{Codeable, PayloadCarrier};
#[derive(Debug, Fail)]
pub enum Error {
#[fail(display = "Not found")]
NotFound,
#[fail(display = "Parse error")]
Parse,
#[fail(display = "Validation error")]
Validate(ValidationErrors),
#[fail(display = "Server is refusing to fullfil the request")]
Forbidden,
#[fail(display = "R2D2 connection error")]
Connection,
#[fail(display = "Http client error")]
HttpClient,
#[fail(display = "service error - internal")]
Internal,
}
impl Codeable for Error {
fn code(&self) -> StatusCode {
match *self {
Error::NotFound => StatusCode::NotFound,
Error::Parse => StatusCode::UnprocessableEntity,
Error::Validate(_) => StatusCode::BadRequest,
Error::HttpClient | Error::Connection | Error::Internal => StatusCode::InternalServerError,
Error::Forbidden => StatusCode::Forbidden,
}
}
}
impl PayloadCarrier for Error {
fn payload(&self) -> Option<serde_json::Value> {
match *self {
Error::Validate(ref e) => serde_json::to_value(e.clone()).ok(),
_ => None,
}
}
}
impl<'a> From<DieselError> for Error {
fn from(e: DieselError) -> Self {
match e {
DieselError::DatabaseError(DatabaseErrorKind::UniqueViolation, ref info) => {
let mut errors = ValidationErrors::new();
let mut error = ValidationError::new("not_unique");
let message: &str = info.message();
error.add_param("message".into(), &message);
errors.add("repo", error);
Error::Validate(errors)
}
DieselError::NotFound => Error::NotFound,
_ => Error::Internal,
}
}
}
|
use std::{collections::hash_map::Entry, fmt};
use ahash::AHashMap as HashMap;
#[cfg(feature = "parallel")]
use crate::dispatch::dispatcher::ThreadPoolWrapper;
use crate::{
dispatch::{
batch::BatchControllerSystem,
dispatcher::{SystemId, ThreadLocal},
stage::StagesBuilder,
BatchAccessor, BatchController, Dispatcher,
},
system::{RunNow, System, SystemData},
};
/// Builder for the [`Dispatcher`].
///
/// [`Dispatcher`]: struct.Dispatcher.html
///
/// ## Barriers
///
/// Barriers are a way of sequentializing parts of
/// the system execution. See `add_barrier()`/`with_barrier()`.
///
/// ## Examples
///
/// This is how you create a dispatcher with
/// a shared thread pool:
///
/// ```rust
/// # #![allow(unused)]
/// #
/// # extern crate shred;
/// # #[macro_use]
/// # extern crate shred_derive;
/// # use shred::{Dispatcher, DispatcherBuilder, Read, ResourceId, World, System, SystemData};
/// # #[derive(Debug, Default)] struct Res;
/// # #[derive(SystemData)] #[allow(unused)] struct Data<'a> { a: Read<'a, Res> }
/// # struct Dummy;
/// # impl<'a> System<'a> for Dummy {
/// # type SystemData = Data<'a>;
/// #
/// # fn run(&mut self, _: Data<'a>) {}
/// # }
/// #
/// # fn main() {
/// # let system_a = Dummy;
/// # let system_b = Dummy;
/// # let system_c = Dummy;
/// # let system_d = Dummy;
/// # let system_e = Dummy;
/// let dispatcher: Dispatcher = DispatcherBuilder::new()
/// .with(system_a, "a", &[])
/// .with(system_b, "b", &["a"]) // b depends on a
/// .with(system_c, "c", &["a"]) // c also depends on a
/// .with(system_d, "d", &[])
/// .with(system_e, "e", &["c", "d"]) // e executes after c and d are finished
/// .build();
/// # }
/// ```
///
/// Systems can be conditionally added by using the `add_` functions:
///
/// ```rust
/// # #![allow(unused)]
/// #
/// # extern crate shred;
/// # #[macro_use]
/// # extern crate shred_derive;
/// # use shred::{Dispatcher, DispatcherBuilder, Read, ResourceId, World, System, SystemData};
/// # #[derive(Debug, Default)] struct Res;
/// # #[derive(SystemData)] #[allow(unused)] struct Data<'a> { a: Read<'a, Res> }
/// # struct Dummy;
/// # impl<'a> System<'a> for Dummy {
/// # type SystemData = Data<'a>;
/// #
/// # fn run(&mut self, _: Data<'a>) {}
/// # }
/// #
/// # fn main() {
/// # let b_enabled = true;
/// # let system_a = Dummy;
/// # let system_b = Dummy;
/// let mut builder = DispatcherBuilder::new()
/// .with(system_a, "a", &[]);
///
/// if b_enabled {
/// builder.add(system_b, "b", &[]);
/// }
///
/// let dispatcher = builder.build();
/// # }
/// ```
#[derive(Default)]
pub struct DispatcherBuilder<'a, 'b> {
current_id: usize,
map: HashMap<String, SystemId>,
pub(crate) stages_builder: StagesBuilder<'a>,
thread_local: ThreadLocal<'b>,
#[cfg(feature = "parallel")]
thread_pool: ::std::sync::Arc<::std::sync::RwLock<ThreadPoolWrapper>>,
}
impl<'a, 'b> DispatcherBuilder<'a, 'b> {
/// Creates a new `DispatcherBuilder` by using the `Default` implementation.
///
/// The default behaviour is to create a thread pool on `finish`.
/// If you already have a rayon `ThreadPool`, it's highly recommended to
/// configure this builder to use it with `with_pool` instead.
pub fn new() -> Self {
Default::default()
}
/// Returns whether or not any system has been added to the builder
pub fn is_empty(&self) -> bool {
self.map.is_empty()
}
/// Returns the number of systems added to the builder
pub fn num_systems(&self) -> usize {
self.map.len()
}
/// Returns whether or not a specific system has been added to the builder
/// This is useful as [`add()`](struct.DispatcherBuilder.html#method.add)
/// will throw if a dependency does not exist So you can use this
/// function to check if dependencies are satisfied
pub fn has_system(&self, system: &str) -> bool {
self.map.contains_key(system)
}
/// Adds a new system with a given name and a list of dependencies.
/// Please note that the dependency should be added before
/// you add the depending system.
///
/// If you want to register systems which can not be specified as
/// dependencies, you can use `""` as their name, which will not panic
/// (using another name twice will).
///
/// Same as [`add()`](struct.DispatcherBuilder.html#method.add), but
/// returns `self` to enable method chaining.
///
/// # Panics
///
/// * if the specified dependency does not exist
/// * if a system with the same name was already registered.
pub fn with<T>(mut self, system: T, name: &str, dep: &[&str]) -> Self
where
T: for<'c> System<'c> + Send + 'a,
{
self.add(system, name, dep);
self
}
/// Adds a new system with a given name and a list of dependencies.
/// Please note that the dependency should be added before
/// you add the depending system.
///
/// If you want to register systems which can not be specified as
/// dependencies, you can use `""` as their name, which will not panic
/// (using another name twice will).
///
/// # Panics
///
/// * if the specified dependency does not exist
/// * if a system with the same name was already registered.
pub fn add<T>(&mut self, system: T, name: &str, dep: &[&str])
where
T: for<'c> System<'c> + Send + 'a,
{
let id = self.next_id();
let dependencies = dep
.iter()
.map(|x| {
*self
.map
.get(*x)
.unwrap_or_else(|| panic!("No such system registered (\"{}\")", *x))
})
.collect();
if !name.is_empty() {
if let Entry::Vacant(e) = self.map.entry(name.to_owned()) {
e.insert(id);
} else {
panic!(
"Cannot insert multiple systems with the same name (\"{}\")",
name
);
}
}
self.stages_builder.insert(dependencies, id, system);
}
/// Returns `true` if a system with the given name has been added to the
/// `BispatcherBuilder`, otherwise, returns false.
pub fn contains(&self, name: &str) -> bool {
self.map.contains_key(name)
}
/// The `Batch` is a `System` which contains a `Dispatcher`.
/// By wrapping a `Dispatcher` inside a system, we can control the execution
/// of a whole group of system, without sacrificing parallelism or
/// conciseness.
///
/// This function accepts the `DispatcherBuilder` as parameter, and the type
/// of the `System` that will drive the execution of the internal
/// dispatcher.
///
/// Note that depending on the dependencies of the SubSystems the Batch
/// can run in parallel with other Systems.
/// In addition the Sub Systems can run in parallel within the Batch.
///
/// The `Dispatcher` created for this `Batch` is completelly separate,
/// from the parent `Dispatcher`.
/// This mean that the dependencies, the `System` names, etc.. specified on
/// the `Batch` `Dispatcher` are not visible on the parent, and is not
/// allowed to specify cross dependencies.
pub fn with_batch<T>(
mut self,
controller: T,
dispatcher_builder: DispatcherBuilder<'a, 'b>,
name: &str,
dep: &[&str],
) -> Self
where
T: for<'c> BatchController<'a, 'b, 'c> + Send + 'a,
'b: 'a,
{
self.add_batch::<T>(controller, dispatcher_builder, name, dep);
self
}
/// The `Batch` is a `System` which contains a `Dispatcher`.
/// By wrapping a `Dispatcher` inside a system, we can control the execution
/// of a whole group of system, without sacrificing parallelism or
/// conciseness.
///
/// This function accepts the `DispatcherBuilder` as parameter, and the type
/// of the `System` that will drive the execution of the internal
/// dispatcher.
///
/// Note that depending on the dependencies of the SubSystems the Batch
/// can run in parallel with other Systems.
/// In addition the Sub Systems can run in parallel within the Batch.
///
/// The `Dispatcher` created for this `Batch` is completelly separate,
/// from the parent `Dispatcher`.
/// This mean that the dependencies, the `System` names, etc.. specified on
/// the `Batch` `Dispatcher` are not visible on the parent, and is not
/// allowed to specify cross dependencies.
pub fn add_batch<T>(
&mut self,
controller: T,
mut dispatcher_builder: DispatcherBuilder<'a, 'b>,
name: &str,
dep: &[&str],
) where
T: for<'c> BatchController<'a, 'b, 'c> + Send + 'a,
'b: 'a,
{
#[cfg(feature = "parallel")]
{
dispatcher_builder.thread_pool = self.thread_pool.clone();
}
let mut reads = dispatcher_builder.stages_builder.fetch_all_reads();
reads.extend(<T::BatchSystemData as SystemData>::reads());
reads.sort();
reads.dedup();
let mut writes = dispatcher_builder.stages_builder.fetch_all_writes();
writes.extend(<T::BatchSystemData as SystemData>::writes());
writes.sort();
writes.dedup();
let accessor = BatchAccessor::new(reads, writes);
let dispatcher: Dispatcher<'a, 'b> = dispatcher_builder.build();
let batch_system =
unsafe { BatchControllerSystem::<'a, 'b, T>::create(accessor, controller, dispatcher) };
self.add(batch_system, name, dep);
}
/// Adds a new thread local system.
///
/// Please only use this if your struct is not `Send` and `Sync`.
///
/// Thread-local systems are dispatched in-order.
///
/// Same as [DispatcherBuilder::add_thread_local], but returns `self` to
/// enable method chaining.
pub fn with_thread_local<T>(mut self, system: T) -> Self
where
T: for<'c> RunNow<'c> + 'b,
{
self.add_thread_local(system);
self
}
/// Adds a new thread local system.
///
/// Please only use this if your struct is not `Send` and `Sync`.
///
/// Thread-local systems are dispatched in-order.
pub fn add_thread_local<T>(&mut self, system: T)
where
T: for<'c> RunNow<'c> + 'b,
{
self.thread_local.push(Box::new(system));
}
/// Inserts a barrier which assures that all systems
/// added before the barrier are executed before the ones
/// after this barrier.
///
/// Does nothing if there were no systems added
/// since the last call to `add_barrier()`/`with_barrier()`.
///
/// Thread-local systems are not affected by barriers;
/// they're always executed at the end.
///
/// Same as [DispatcherBuilder::add_barrier], but returns `self` to enable
/// method chaining.
pub fn with_barrier(mut self) -> Self {
self.add_barrier();
self
}
/// Inserts a barrier which assures that all systems
/// added before the barrier are executed before the ones
/// after this barrier.
///
/// Does nothing if there were no systems added
/// since the last call to `add_barrier()`/`with_barrier()`.
///
/// Thread-local systems are not affected by barriers;
/// they're always executed at the end.
pub fn add_barrier(&mut self) {
self.stages_builder.add_barrier();
}
/// Attach a rayon thread pool to the builder
/// and use that instead of creating one.
///
/// Same as
/// [`add_pool()`](struct.DispatcherBuilder.html#method.add_pool),
/// but returns `self` to enable method chaining.
#[cfg(feature = "parallel")]
pub fn with_pool(mut self, pool: ::std::sync::Arc<::rayon::ThreadPool>) -> Self {
self.add_pool(pool);
self
}
/// Attach a rayon thread pool to the builder
/// and use that instead of creating one.
#[cfg(feature = "parallel")]
pub fn add_pool(&mut self, pool: ::std::sync::Arc<::rayon::ThreadPool>) {
*self.thread_pool.write().unwrap() = Some(pool);
}
/// Prints the equivalent system graph
/// that can be easily used to get the graph using the `seq!` and `par!`
/// macros. This is only recommended for advanced users.
pub fn print_par_seq(&self) {
println!("{:#?}", self);
}
/// Builds the `Dispatcher`.
///
/// In the future, this method will
/// precompute useful information in
/// order to speed up dispatching.
pub fn build(self) -> Dispatcher<'a, 'b> {
use crate::dispatch::dispatcher::new_dispatcher;
#[cfg(feature = "parallel")]
self.thread_pool
.write()
.unwrap()
.get_or_insert_with(Self::create_thread_pool);
#[cfg(feature = "parallel")]
let d = new_dispatcher(
self.stages_builder.build(),
self.thread_local,
self.thread_pool,
);
#[cfg(not(feature = "parallel"))]
let d = new_dispatcher(self.stages_builder.build(), self.thread_local);
d
}
fn next_id(&mut self) -> SystemId {
let id = self.current_id;
self.current_id += 1;
SystemId(id)
}
#[cfg(feature = "parallel")]
fn create_thread_pool() -> ::std::sync::Arc<::rayon::ThreadPool> {
use rayon::ThreadPoolBuilder;
use std::sync::Arc;
Arc::new(
ThreadPoolBuilder::new()
.build()
.expect("Invalid configuration"),
)
}
}
#[cfg(feature = "parallel")]
impl<'b> DispatcherBuilder<'static, 'b> {
/// Builds an async dispatcher.
///
/// It does not allow non-static types and accepts a `World` struct or a
/// value that can be borrowed as `World`.
pub fn build_async<R>(
self,
world: R,
) -> crate::dispatch::async_dispatcher::AsyncDispatcher<'b, R> {
use crate::dispatch::async_dispatcher::new_async;
self.thread_pool
.write()
.unwrap()
.get_or_insert_with(Self::create_thread_pool);
new_async(
world,
self.stages_builder.build(),
self.thread_local,
self.thread_pool,
)
}
}
impl<'a, 'b> fmt::Debug for DispatcherBuilder<'a, 'b> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.stages_builder.write_par_seq(f, &self.map)
}
}
|
// < begin copyright >
// Copyright Ryan Marcus 2020
//
// See root directory of this project for license terms.
//
// < end copyright >
use crate::models::Model;
use crate::models::*;
use bytesize::ByteSize;
use log::*;
use std::collections::HashSet;
use std::io::Write;
use std::str;
use crate::train::TrainedRMI;
use std::fs::File;
use std::io::BufWriter;
use std::path::Path;
use std::fmt;
enum StorageConf {
Embed,
Disk(String)
}
enum LayerParams {
Constant(usize, Vec<ModelParam>),
Array(usize, usize, Vec<ModelParam>),
MixedArray(usize, usize, Vec<ModelParam>)
}
macro_rules! constant_name {
($layer:expr, $idx: expr) => {
format!("L{}_PARAMETER{}", $layer, $idx)
};
}
macro_rules! array_name {
($layer: expr) => {
format!("L{}_PARAMETERS", $layer)
}
}
impl LayerParams {
fn new(idx: usize,
array_access: bool,
params_per_model: usize,
params: Vec<ModelParam>) -> LayerParams {
// first, if the underlying data is mixed, we can only support array mode.
let first_param = params.first().unwrap();
let mixed = !params.iter().all(|p| first_param.is_same_type(p));
if mixed {
return LayerParams::MixedArray(idx, params_per_model, params);
}
let param_size_bytes: usize = params.iter().map(|p| p.size()).sum();
if array_access || param_size_bytes > 4096 {
return LayerParams::Array(idx, params_per_model, params);
}
return LayerParams::Constant(idx, params);
}
fn to_code<T: Write>(&self, target: &mut T) -> Result<(), std::io::Error> {
match self {
LayerParams::Constant(idx, params) => {
for (p_idx, param) in params.iter().enumerate() {
writeln!(
target,
"const {} {}{} = {};",
param.c_type(),
constant_name!(idx, p_idx),
param.c_type_mod(),
param.c_val()
)?;
}
}
LayerParams::Array(idx, _, params) => {
write!(
target,
"const {} {}[] = {{",
params[0].c_type(),
array_name!(idx)
)?;
let (last, rest) = params.split_last().unwrap();
for param in rest {
write!(target, "{},", param.c_val())?;
}
write!(target, "{}", last.c_val())?;
writeln!(target, "}};")?;
},
LayerParams::MixedArray(_, _, _) => {
panic!("Cannot hardcode mixed array.");
}
};
return Result::Ok(());
}
fn requires_malloc(&self) -> bool {
return match self {
LayerParams::Array(_, _, params) => {
let array_size: usize = params.iter().map(|p| p.size()).sum();
return array_size >= 4 * 1024;
},
LayerParams::MixedArray(_, _, _) => true,
LayerParams::Constant(_, _) => false,
};
}
fn pointer_type(&self) -> &'static str {
assert!(self.requires_malloc());
return match self {
LayerParams::Array(_, _, params) => params[0].c_type(),
LayerParams::MixedArray(_, _, _) => "char",
LayerParams::Constant(_, _) => panic!("No pointer type for constant params")
};
}
fn to_decl<T: Write>(&self, target: &mut T) -> Result<(), std::io::Error> {
match self {
LayerParams::Constant(_, _) => {
panic!("Cannot forward-declare constants");
}
LayerParams::Array(idx, _, params) => {
if !self.requires_malloc() {
let num_items: usize = params.iter().map(|p| p.len()).sum();
writeln!(
target,
"{} {}[{}];",
params[0].c_type(),
array_name!(idx),
num_items
)?;
} else {
writeln!(
target,
"{}* {};",
params[0].c_type(),
array_name!(idx)
)?;
}
},
LayerParams::MixedArray(idx, _, _) => {
assert!(self.requires_malloc());
writeln!(
target,
"char* {};",
array_name!(idx)
)?;
}
};
return Result::Ok(());
}
fn write_to<T: Write>(&self, target: &mut T) -> Result<(), std::io::Error> {
match self {
LayerParams::Array(_idx, _, params) |
LayerParams::MixedArray(_idx, _, params) => {
let (first, rest) = params.split_first().unwrap();
first.write_to(target)?;
for itm in rest {
if let LayerParams::Array(_, _, _) = self {
assert!(first.is_same_type(itm));
}
itm.write_to(target)?;
}
return Ok(());
},
LayerParams::Constant(_, _) =>
panic!("Cannot write constant parameters to binary file.")
};
}
fn params(&self) -> &[ModelParam] {
return match self {
LayerParams::Array(_, _, params) |
LayerParams::MixedArray(_, _, params)
=> params,
LayerParams::Constant(_, params) => params
};
}
fn index(&self) -> usize {
return match self {
LayerParams::Array(idx, _, _) |
LayerParams::MixedArray(idx, _, _)
=> *idx,
LayerParams::Constant(idx, _) => *idx
};
}
fn params_per_model(&self) -> usize {
return match self {
LayerParams::Array(_idx, ppm, _params) |
LayerParams::MixedArray(_idx, ppm, _params)
=> *ppm,
LayerParams::Constant(_, params) => params.len()
};
}
fn size(&self) -> usize {
return self.params().iter().map(|p| p.size()).sum();
}
fn access_by_const<T: Write>(
&self,
target: &mut T,
parameter_index: usize,
) -> Result<(), std::io::Error> {
if let LayerParams::Constant(idx, _) = self {
write!(target, "{}", constant_name!(idx, parameter_index))?;
return Result::Ok(());
}
return self.access_by_ref(target, "0", parameter_index);
}
fn access_by_ref<T: Write>(
&self,
target: &mut T,
model_index: &str,
parameter_index: usize
) -> Result<(), std::io::Error> {
if self.params()[0].is_array() {
assert_eq!(self.params().len(), 1,
"Layer params with array had more than one member.");
write!(target, "{}", array_name!(self.index()))?;
return Result::Ok(());
}
match self {
LayerParams::Constant(idx, _) => {
panic!(
"Cannot access constant parameters by reference on layer {}",
idx
);
}
LayerParams::Array(idx, params_per_model, params) => {
if params[0].is_array() {
assert_eq!(params.len(), 1);
}
let expr = format!("{}*{} + {}",
params_per_model, model_index, parameter_index);
write!(target, "{}[{}]", array_name!(idx), expr)?;
},
LayerParams::MixedArray(idx, params_per_model, params) => {
// determine the number of bytes for each model
let mut bytes_per_model = 0;
for item in params.iter().take(*params_per_model) {
bytes_per_model += item.size();
}
// determine the byte offset of this parameter
let mut offset = 0;
for item in params.iter().take(parameter_index) {
offset += item.size();
}
// we have to determine the type of the index being accessed
// and add the appropiate cast.
let c_type = params[parameter_index].c_type();
let ptr_expr = format!("{} + ({} * {}) + {}",
array_name!(idx),
model_index, bytes_per_model,
offset);
write!(target, "*(({new_type}*) ({ptr_expr}))",
new_type=c_type, ptr_expr=ptr_expr)?;
}
};
return Result::Ok(());
}
fn with_zipped_errors(&self, lle: Vec<u64>) -> LayerParams {
let params = self.params();
// integrate the errors into the model parameters of the last
// layer to save a cache miss.
// TODO we should add padding to make sure each of these are
// cache-aligned. Also a lot of unneeded copying going on here...
let combined_lle_params: Vec<ModelParam> =
params.chunks(self.params_per_model())
.zip(lle)
.flat_map(|(mod_params, err)| {
let mut to_r: Vec<ModelParam> = Vec::new();
to_r.extend_from_slice(mod_params);
to_r.push(ModelParam::Int(err));
to_r
}).collect();
let is_constant = if let LayerParams::Constant(_, _) = self {
true
} else {
false
};
return LayerParams::new(self.index(), is_constant, self.params_per_model() + 1,
combined_lle_params);
}
}
impl fmt::Display for LayerParams {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
LayerParams::Constant(idx, params) =>
write!(f, "Constant(idx: {}, len: {}, malloc: {})",
idx, params.len(), self.requires_malloc()),
LayerParams::Array(idx, ppm, params) =>
write!(f, "Array(idx: {}, ppm: {}, len: {}, malloc: {})",
idx, ppm, params.len(), self.requires_malloc()),
LayerParams::MixedArray(idx, ppm, params) =>
write!(f, "MixedArray(idx: {}, ppm: {}, len: {}, malloc: {})",
idx, ppm, params.len(), self.requires_malloc())
}
}
}
fn params_for_layer(layer_idx: usize, models: &[Box<dyn Model>]) -> LayerParams {
let params_per_model = models[0].params().len();
let params = models.iter().flat_map(|m| m.params()).collect();
return LayerParams::new(layer_idx,
models.len() > 1, // array access on non-singleton layers
params_per_model,
params);
}
macro_rules! model_index_from_output {
($from: expr, $bound: expr, $needs_check: expr) => {
match $from {
ModelDataType::Float => {
if $needs_check {
format!("FCLAMP(fpred, {}.0 - 1.0)", $bound)
} else {
format!("(uint64_t) fpred")
}
}
ModelDataType::Int => {
if $needs_check {
format!("(ipred > {0} - 1 ? {0} - 1 : ipred)", $bound)
} else {
format!("ipred")
}
}
}
};
}
pub fn rmi_size(rmi: &[Vec<Box<dyn Model>>], report_last_layer_errors: bool) -> u64 {
// compute the RMI size (used in the header, compute here before consuming)
let mut num_total_bytes = 0;
for layer in rmi.iter() {
let model_on_this_layer_size: usize = layer[0].params().iter().map(|p| p.size()).sum();
// assume all models on this layer have the same size
num_total_bytes += model_on_this_layer_size * layer.len();
}
if report_last_layer_errors {
num_total_bytes += rmi.last().unwrap().len() * 8;
}
return num_total_bytes as u64;
}
fn generate_code<T: Write>(
code_output: &mut T,
data_output: &mut T,
header_output: &mut T,
namespace: &str,
total_rows: usize,
rmi: Vec<Vec<Box<dyn Model>>>,
last_layer_errors: Option<Vec<u64>>,
storage: StorageConf,
build_time: u128,
) -> Result<(), std::io::Error> {
// construct the code for the model parameters.
let mut layer_params: Vec<LayerParams> = rmi
.iter()
.enumerate()
.map(|(layer_idx, models)| params_for_layer(layer_idx, models))
.collect();
let report_last_layer_errors = last_layer_errors.is_some();
let mut report_lle: Vec<u8> = Vec::new();
if report_last_layer_errors {
if let Some(lle) = last_layer_errors {
assert!(!lle.is_empty());
if lle.len() > 1 {
let old_last = layer_params.pop().unwrap();
let new_last = old_last.with_zipped_errors(lle);
write!(report_lle, " *err = ")?;
new_last.access_by_ref(&mut report_lle, "modelIndex",
new_last.params_per_model() - 1)?;
writeln!(report_lle, ";")?;
layer_params.push(new_last);
} else {
write!(report_lle, " *err = {};", lle[0])?;
}
}
}
trace!("Layer parameters:");
for lps in layer_params.iter() {
trace!("{}", lps);
}
writeln!(data_output, "namespace {} {{", namespace)?;
let mut read_code = Vec::new();
match &storage {
// embed the data directly inside of the header files
StorageConf::Embed => {
for lp in layer_params.iter() {
lp.to_code(data_output)?;
}
},
// store the data on disk, add code to load it
StorageConf::Disk(path) => {
read_code.push("bool load(char const* dataPath) {".to_string());
for lp in layer_params.iter() {
match lp {
// constants are still put directly in the header
LayerParams::Constant(_idx, _) => lp.to_code(data_output)?,
LayerParams::Array(idx, _, _) |
LayerParams::MixedArray(idx, _, _) => {
let data_path = Path::new(&path).join(format!("{}_{}", namespace, array_name!(idx)));
let f = File::create(data_path).expect("Could not write data file -- does the RMI data directory exist?");
let mut bw = BufWriter::new(f);
lp.write_to(&mut bw)?; // write to data file
lp.to_decl(data_output)?; // write to source code
read_code.push(" {".to_string());
read_code.push(format!(" std::ifstream infile(std::filesystem::path(dataPath) / \"{ns}_{fn}\", std::ios::in | std::ios::binary);",
ns=namespace, fn=array_name!(idx)));
read_code.push(" if (!infile.good()) return false;".to_string());
if lp.requires_malloc() {
read_code.push(format!(" {} = ({}*) malloc({});",
array_name!(idx), lp.pointer_type(), lp.size()));
read_code.push(format!(" if ({} == NULL) return false;",
array_name!(idx)));
}
read_code.push(format!(" infile.read((char*){fn}, {size});",
fn=array_name!(idx), size=lp.size()));
read_code.push(" if (!infile.good()) return false;".to_string());
read_code.push(" }".to_string());
}
}
}
read_code.push(" return true;".to_string());
read_code.push("}".to_string());
}
};
let mut free_code = Vec::new();
free_code.push("void cleanup() {".to_string());
// generate free code
for lp in layer_params.iter() {
if !lp.requires_malloc() { continue; }
if let LayerParams::Array(idx, _, _) | LayerParams::MixedArray(idx, _, _) = lp {
free_code.push(format!(" free({});", array_name!(idx)));
continue;
}
panic!();
}
free_code.push("}".to_string());
writeln!(data_output, "}} // namespace")?;
// get all of the required stdlib function signatures together
// TODO assumes all layers are homogenous
let mut decls = HashSet::new();
let mut sigs = HashSet::new();
for layer in rmi.iter() {
for stdlib in layer[0].standard_functions() {
decls.insert(stdlib.decl().to_string());
sigs.insert(stdlib.code().to_string());
}
}
writeln!(code_output, "#include \"{}.h\"", namespace)?;
writeln!(code_output, "#include \"{}_data.h\"", namespace)?;
writeln!(code_output, "#include <math.h>")?;
writeln!(code_output, "#include <cmath>")?;
writeln!(code_output, "#include <fstream>")?;
writeln!(code_output, "#include <filesystem>")?;
writeln!(code_output, "#include <iostream>")?;
writeln!(code_output, "namespace {} {{", namespace)?;
for ln in read_code {
writeln!(code_output, "{}", ln)?;
}
for ln in free_code {
writeln!(code_output, "{}", ln)?;
}
for decl in decls {
writeln!(code_output, "{}", decl)?;
}
for sig in sigs {
writeln!(code_output, "{}", sig)?;
}
// next, the model sigs
sigs = HashSet::new();
for layer in rmi.iter() {
sigs.insert(layer[0].code());
}
for sig in sigs {
writeln!(code_output, "{}", sig)?;
}
writeln!(
code_output,
"
inline size_t FCLAMP(double inp, double bound) {{
if (inp < 0.0) return 0;
return (inp > bound ? bound : (size_t)inp);
}}\n"
)?;
let lookup_sig = if report_last_layer_errors {
"uint64_t lookup(uint64_t key, size_t* err)"
} else {
"uint64_t lookup(uint64_t key)"
};
writeln!(code_output, "{} {{", lookup_sig)?;
// determine if we have any layers with float (fpred) or int (ipred) outputs
let mut needed_vars = HashSet::new();
if rmi.len() > 1 {
needed_vars.insert("size_t modelIndex;");
}
for layer in rmi.iter() {
match layer[0].output_type() {
ModelDataType::Int => needed_vars.insert("uint64_t ipred;"),
ModelDataType::Float => needed_vars.insert("double fpred;"),
};
}
for var in needed_vars {
writeln!(code_output, " {}", var)?;
}
let model_size_bytes = rmi_size(&rmi, report_last_layer_errors);
info!("Generated model size: {:?} ({} bytes)", ByteSize(model_size_bytes), model_size_bytes);
let mut last_model_output = ModelDataType::Int;
let mut needs_bounds_check = true;
for (layer_idx, layer) in rmi.into_iter().enumerate() {
let layer_param = &layer_params[layer_idx];
let required_type = layer[0].input_type();
let current_model_output = layer[0].output_type();
let var_name = match current_model_output {
ModelDataType::Int => "ipred",
ModelDataType::Float => "fpred",
};
let num_parameters = layer[0].params().len();
if layer.len() == 1 {
// use constant indexing, only one model
write!(
code_output,
" {} = {}(",
var_name,
layer[0].function_name()
)?;
for pidx in 0..num_parameters {
layer_param.access_by_const(code_output, pidx)?;
write!(code_output, ", ")?;
}
} else {
// we need to get the model index based on the previous
// prediction, and then use ref accessing
writeln!(
code_output,
" modelIndex = {};",
model_index_from_output!(last_model_output, layer.len(), needs_bounds_check)
)?;
write!(
code_output,
" {} = {}(",
var_name,
layer[0].function_name()
)?;
for pidx in 0..num_parameters {
layer_param.access_by_ref(code_output, "modelIndex", pidx)?;
write!(code_output, ", ")?;
}
}
writeln!(code_output, "({})key);", required_type.c_type())?;
last_model_output = layer[0].output_type();
needs_bounds_check = layer[0].needs_bounds_check();
}
writeln!(code_output, "{}", str::from_utf8(&report_lle).unwrap())?;
writeln!(
code_output,
" return {};",
model_index_from_output!(last_model_output, total_rows, true)
)?; // always bounds check the last level
writeln!(code_output, "}}")?;
writeln!(code_output, "}} // namespace")?;
// write out our forward declarations
writeln!(header_output, "#include <cstddef>")?;
writeln!(header_output, "#include <cstdint>")?;
writeln!(header_output, "namespace {} {{", namespace)?;
if let StorageConf::Disk(_) = storage {
writeln!(header_output, "bool load(char const* dataPath);")?;
}
writeln!(header_output, "void cleanup();")?;
if !report_last_layer_errors {
writeln!(header_output, "#ifdef EXTERN_RMI_LOOKUP")?;
writeln!(header_output, "extern \"C\" uint64_t lookup(uint64_t key);")?;
writeln!(header_output, "#endif")?;
}
writeln!(
header_output,
"const size_t RMI_SIZE = {};",
model_size_bytes
)?;
assert!(build_time <= u128::from(std::u64::MAX));
writeln!(
header_output,
"const uint64_t BUILD_TIME_NS = {};",
build_time
)?;
writeln!(header_output, "const char NAME[] = \"{}\";", namespace)?;
writeln!(header_output, "{};", lookup_sig)?;
writeln!(header_output, "}}")?;
return Result::Ok(());
}
pub fn output_rmi(namespace: &str,
last_layer_errors: bool,
trained_model: TrainedRMI,
num_rows: usize,
build_time: u128,
data_dir: Option<&str>) -> Result<(), std::io::Error> {
let f1 = File::create(format!("{}.cpp", namespace)).expect("Could not write RMI CPP file");
let mut bw1 = BufWriter::new(f1);
let f2 =
File::create(format!("{}_data.h", namespace)).expect("Could not write RMI data file");
let mut bw2 = BufWriter::new(f2);
let f3 = File::create(format!("{}.h", namespace)).expect("Could not write RMI header file");
let mut bw3 = BufWriter::new(f3);
let lle = if last_layer_errors {
Some(trained_model.last_layer_max_l1s)
} else {
None
};
let conf = match data_dir {
None => {
assert!(!last_layer_errors,
"Cannot directly embed RMI data and track last level errors.");
StorageConf::Embed
},
Some(s) => StorageConf::Disk(String::from(s))
};
return generate_code(
&mut bw1,
&mut bw2,
&mut bw3,
namespace,
num_rows,
trained_model.rmi,
lle,
conf,
build_time,
);
}
|
///// chapter 2 "using variables and types"
///// program section:
//
fn main() {
let x = Box::new(5_i32);
println!("{}", x);
}
///// output should be:
/*
*/// end of output
|
use crate::api::*;
use std::{
os::raw::c_short,
sync::{Arc, Mutex},
};
use vigem_client as vgm;
use vigem_client::ClientExt;
#[derive(Clone)]
pub struct Bus {
client: Arc<Mutex<vgm::Client>>,
}
impl Bus {
pub fn new() -> Result<Bus, Error> {
let client = vgm::Client::new().map_with_vgp_error()?;
let client = Arc::new(Mutex::new(client));
Ok(Bus { client })
}
pub fn plug_in(&mut self) -> Result<Device, Error> {
let device = self.client.plug_in().map_with_vgp_error()?;
Ok(Device { device })
}
}
pub struct Device {
device: vgm::Device,
}
impl Device {
pub fn put_input(&mut self, input: Input) -> Result<(), Error> {
let input = match input {
Input::Move { thumb_stick, x, y } => match thumb_stick {
ThumbStick::Left => vgm::Input::MovedLeftThumbStick(
(32767f32 * x) as c_short,
(32767f32 * y) as c_short,
),
ThumbStick::Right => vgm::Input::MovedRightThumbStick(
(32767f32 * x) as c_short,
(32767f32 * y) as c_short,
),
},
Input::Press(button) => match button {
Button::DpadDown => vgm::Input::Pressed(vgm::Button::DpadDown),
Button::DpadUp => vgm::Input::Pressed(vgm::Button::DpadUp),
Button::DpadLeft => vgm::Input::Pressed(vgm::Button::DpadLeft),
Button::DpadRight => vgm::Input::Pressed(vgm::Button::DpadRight),
Button::North => vgm::Input::Pressed(vgm::Button::Y),
Button::South => vgm::Input::Pressed(vgm::Button::A),
Button::West => vgm::Input::Pressed(vgm::Button::B),
Button::East => vgm::Input::Pressed(vgm::Button::X),
Button::Start => vgm::Input::Pressed(vgm::Button::Start),
Button::Select => vgm::Input::Pressed(vgm::Button::Guide),
Button::TriggerLeft => vgm::Input::Pressed(vgm::Button::LeftShoulder),
Button::TriggerRight => vgm::Input::Pressed(vgm::Button::RightShoulder),
Button::TriggerLeft2 => vgm::Input::PressedLeftTrigger(127),
Button::TriggerRight2 => vgm::Input::PressedRightTrigger(127),
Button::ThumbStickLeft => vgm::Input::Pressed(vgm::Button::LeftThumb),
Button::ThumbStickRight => vgm::Input::Pressed(vgm::Button::RightThumb),
},
Input::Release(button) => match button {
Button::DpadDown => vgm::Input::Released(vgm::Button::DpadDown),
Button::DpadUp => vgm::Input::Released(vgm::Button::DpadUp),
Button::DpadLeft => vgm::Input::Released(vgm::Button::DpadLeft),
Button::DpadRight => vgm::Input::Released(vgm::Button::DpadRight),
Button::North => vgm::Input::Released(vgm::Button::Y),
Button::South => vgm::Input::Released(vgm::Button::A),
Button::West => vgm::Input::Released(vgm::Button::B),
Button::East => vgm::Input::Released(vgm::Button::X),
Button::Start => vgm::Input::Released(vgm::Button::Start),
Button::Select => vgm::Input::Released(vgm::Button::Guide),
Button::TriggerLeft => vgm::Input::Released(vgm::Button::LeftShoulder),
Button::TriggerRight => vgm::Input::Released(vgm::Button::RightShoulder),
Button::TriggerLeft2 => vgm::Input::PressedLeftTrigger(0),
Button::TriggerRight2 => vgm::Input::PressedRightTrigger(0),
Button::ThumbStickLeft => vgm::Input::Released(vgm::Button::LeftThumb),
Button::ThumbStickRight => vgm::Input::Released(vgm::Button::RightThumb),
},
};
self.device.put_input(input).map_with_vgp_error()
}
pub fn get_output(&mut self) -> Result<Output, Error> {
match self.device.get_output() {
Some(output) => match output {
vgm::Output::Rumble(large_motor, small_motor) => Ok(Output::Rumble {
large_motor: large_motor.into(),
small_motor: small_motor.into(),
}),
vgm::Output::Led(_) => Ok(Output::Unsupported),
},
None => Ok(Output::None),
}
}
pub fn unplug(self) -> Result<(), Error> {
self.device.unplug().map_with_vgp_error()
}
}
|
// Copyright 2018 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Test suite for different supported serialization formats.
use rand::{thread_rng, Rng};
use serde::Deserialize;
use pwbox::{rcrypto::RustCrypto, sodium::Sodium, ErasedPwBox, Eraser, Suite};
const PASSWORD: &str = "correct horse battery staple";
fn roundtrip<V, S, D>(serialize: S, deserialize: D)
where
S: Fn(&ErasedPwBox) -> V,
D: Fn(&V) -> ErasedPwBox,
{
let mut rng = thread_rng();
let secret: [u8; 32] = rng.gen();
let mut eraser = Eraser::new();
eraser.add_suite::<Sodium>();
let encrypted = Sodium::build_box(&mut rng).seal(PASSWORD, &secret).unwrap();
let encrypted = eraser.erase(&encrypted).unwrap();
let output = serialize(&encrypted);
let restored = deserialize(&output);
let restored = eraser.restore(&restored).unwrap();
assert_eq!(secret, &*restored.open(PASSWORD).unwrap());
}
#[test]
fn json_roundtrip() {
roundtrip(
|pwbox| serde_json::to_string(pwbox).expect("serialize"),
|s| serde_json::from_str(s).expect("deserialize"),
);
}
#[test]
fn json_serialization_compatibility() {
// Taken from `go-ethereum` keystore test vectors:
// <https://github.com/ethereum/go-ethereum/blob/2714e8f091117b4f110198008348bfc19233ed60/
// accounts/keystore/testdata/keystore/aaa>
const JSON: &str = r#"{
"cipher": "aes-128-ctr",
"ciphertext": "cb664472deacb41a2e995fa7f96fe29ce744471deb8d146a0e43c7898c9ddd4d",
"cipherparams": { "iv": "dfd9ee70812add5f4b8f89d0811c9158" },
"kdf": "scrypt",
"kdfparams": {
"dklen": 32, "n": 8, "p": 16, "r": 8,
"salt": "0d6769bf016d45c479213990d6a08d938469c4adad8a02ce507b4a4e7b7739f1"
},
"mac": "bac9af994b15a45dd39669fc66f9aa8a3b9dd8c22cb16e4d8d7ea089d0f1a1a9"
}"#;
const PASSWORD: &str = "foobar";
let mut eraser = Eraser::new();
eraser.add_suite::<RustCrypto>();
let erased: ErasedPwBox = serde_json::from_str(JSON).unwrap();
assert!(eraser.restore(&erased).unwrap().open(PASSWORD).is_ok());
}
#[test]
fn yaml_roundtrip() {
roundtrip(
|pwbox| serde_yaml::to_string(pwbox).expect("serialize"),
|s| serde_yaml::from_str(s).expect("deserialize"),
);
}
#[test]
fn yaml_serialization_example() {
const YAML: &str = r#"
secret:
kdf: scrypt-nacl
cipher: xsalsa20-poly1305
ciphertext: 6ebc1234418b494777d6e53f09f1c5a81b82d390ac0bf129c4dbb6a299ca4058
mac: 6fc1d3998030960a456436ce2ff3210c
kdfparams:
salt: d1946ce416f3c6d418a2db97a01e2427da87212bb4103c94ec78bb88103bf81c
memlimit: 16777216
opslimit: 524288
cipherparams:
iv: 80132c7db2994c3a9229247faac621b944e3e37f39aa4440
description: |
Super-secret key.
DO NOT decrypt.
"#;
#[derive(Deserialize)]
struct Container {
secret: ErasedPwBox,
}
let mut eraser = Eraser::new();
eraser.add_suite::<Sodium>();
let restored: Container = serde_yaml::from_str(YAML).unwrap();
assert_eq!(restored.secret.len(), 32);
let restored = eraser.restore(&restored.secret).unwrap();
assert!(restored.open(PASSWORD).is_ok());
}
#[test]
fn toml_serialization() {
roundtrip(
|pwbox| toml::to_string(pwbox).expect("serialize"),
|s| toml::from_str(s).expect("deserialize"),
);
}
#[test]
fn toml_deserialization_inner() {
use pwbox::Error;
const TOML: &str = r#"
some_data = 5
other_data = 'foobar'
[key]
ciphertext = 'cd9d2fb2355d8c60d92dcc860abc0c4b20ddd12dd52a4dd53caca0a2f87f7f5f'
mac = '83ae22646d7834f254caea78862eafda'
kdf = 'scrypt-nacl'
cipher = 'xsalsa20-poly1305'
[key.kdfparams]
salt = '87d68fb57d9c2331cf2bd9fdd7551057798bd36d0d2999481311cfae39863691'
memlimit = 16777216
opslimit = 524288
[key.cipherparams]
iv = 'db39c466e2f8ae7fbbc857df48d99254017b059624af7106'
"#;
#[derive(Deserialize)]
struct Test<T> {
some_data: u64,
other_data: Option<String>,
key: T,
}
impl Test<ErasedPwBox> {
fn open(self, eraser: &Eraser, password: &str) -> Result<Test<Vec<u8>>, Error> {
Ok(Test {
some_data: self.some_data,
other_data: self.other_data,
key: eraser.restore(&self.key)?.open(password)?.to_vec(),
})
}
}
let mut eraser = Eraser::new();
eraser.add_suite::<Sodium>();
let test: Test<ErasedPwBox> = toml::from_str(TOML).unwrap();
let decrypted_test = test.open(&eraser, PASSWORD).unwrap();
assert_eq!(decrypted_test.key.len(), 32);
}
#[test]
fn cbor_roundtrip() {
roundtrip(
|pwbox| serde_cbor::to_vec(pwbox).expect("serialize"),
|s| serde_cbor::from_slice(s).expect("deserialize"),
);
}
|
// Helper macro to test against errors in validate function.
#[macro_export]
macro_rules! test {
($name:ident, $contents:expr, $expected_errors:expr) => {
#[test]
fn $name() {
// Which errors do we expect?
let mut expected_errors: Vec<(&str, usize, usize)> = $expected_errors;
// Which is the JSON document to validate?
let contents: &str = $contents;
// Which are the actual errors returned from validate function?
let mut actual_errors = jsonprima::validate(contents);
// Sort errors by the start position.
// We test expected_errors against actual_errors in parallel.
expected_errors.sort_by(|a, b| a.1.cmp(&b.1));
actual_errors.sort_by(|a, b| a.index_start.cmp(&b.index_start));
// Check if there exist as many errors as we want to.
// This check also fulfill the requirement that wants the
// two vectors to have the same size to be checked in parallel.
if actual_errors.len() != expected_errors.len() {
panic!(
"Expected to find {} errors by found {}. \n{:#?}",
expected_errors.len(),
actual_errors.len(),
actual_errors
);
}
// Turn each vector into an iterator.
let mut iter_expected_errors = expected_errors.iter().peekable();
let mut iter_actual_errors = actual_errors.into_iter();
// For each error returned by the validate function...
while let Some(actual_error) = iter_actual_errors.next() {
// Get the parallel expected error.
let (expected_error_code, expected_error_index_start, expected_error_index_end) =
iter_expected_errors.next().unwrap();
// Check if error codes match.
if actual_error.err.code() != *expected_error_code {
panic!(
"Expected error code \"{}\" to equal \"{}\"",
actual_error.err.code(),
expected_error_code
);
}
// Check if error start positions match.
if actual_error.index_start != *expected_error_index_start {
panic!(
"Expected start error position {} to equal {}",
actual_error.index_start, expected_error_index_start
);
}
// Check if error end positions match.
if actual_error.index_end != *expected_error_index_end {
panic!(
"Expected end error position {} to equal {}",
actual_error.index_end, expected_error_index_end
);
}
}
}
};
}
|
use crate::byte::Byte;
use crate::word::Word;
use crate::interrupt::{Interrupt, Result};
pub struct MemorySpace {
memory: Box<[Byte; MemorySpace::SIZE]>,
}
macro_rules! box_array {
($val:expr ; $len:expr) => {{
// Use a generic function so that the pointer cast remains type-safe
fn vec_to_boxed_array<T>(vec: Vec<T>) -> Box<[T; $len]> {
let boxed_slice = vec.into_boxed_slice();
let ptr = ::std::boxed::Box::into_raw(boxed_slice) as *mut [T; $len];
unsafe { Box::from_raw(ptr) }
}
vec_to_boxed_array(vec![$val; $len])
}};
}
impl MemorySpace {
pub const SIZE: usize = 43046721;
pub const MAX_ADDR: isize = Self::SIZE as isize/2;
pub const MIN_ADDR: isize = -(Self::SIZE as isize)/2;
pub const INDEX_WIDTH: usize = 2;
pub fn new() -> MemorySpace {
MemorySpace{memory: box_array![Byte::ZERO; MemorySpace::SIZE]}
}
fn to_offset(addr: isize) -> Result<usize> {
if addr < Self::MIN_ADDR || addr > Self::MAX_ADDR {
Err(Interrupt::BadCode)
} else {
Ok((addr + Self::MAX_ADDR) as usize % Self::SIZE)
}
}
pub fn get_byte(&self, addr: isize) -> Result<Byte> {
let index = Self::to_offset(addr)?;
Ok(self.memory[index])
}
pub fn set_byte(&mut self, addr: isize, value: Byte) -> Result<()> {
let index = Self::to_offset(addr)?;
self.memory[index] = value;
Ok(())
}
pub fn get_word(&self, addr: isize) -> Result<Word> {
let index = Self::to_offset(addr)?;
if addr + Word::BYTE_COUNT as isize > Self::MAX_ADDR {
Err(Interrupt::MemoryFault)
} else {
Ok(Word::from_bytes(&self.memory[index..index+Word::BYTE_COUNT]))
}
}
pub fn set_word(&mut self, addr: isize, value: Word) -> Result<()> {
let index = Self::to_offset(addr)?;
if addr + Word::BYTE_COUNT as isize > Self::MAX_ADDR {
Err(Interrupt::MemoryFault)
} else {
self.memory[index..index+Word::BYTE_COUNT].copy_from_slice(&value.bytes);
Ok(())
}
}
}
#[test]
fn test_size() {
assert_eq!(MemorySpace::SIZE, 3usize.pow((Word::WIDTH - MemorySpace::INDEX_WIDTH) as u32));
}
#[test]
fn test_offset() {
assert_eq!(MemorySpace::to_offset(MemorySpace::MIN_ADDR), Ok(0));
assert_eq!(MemorySpace::to_offset(MemorySpace::MAX_ADDR), Ok(MemorySpace::SIZE - 1));
assert_eq!(MemorySpace::to_offset(MemorySpace::MIN_ADDR-1), Err(Interrupt::BadCode));
assert_eq!(MemorySpace::to_offset(MemorySpace::MAX_ADDR+1), Err(Interrupt::BadCode));
}
#[test]
fn test_getset_byte() {
let mut memspace = MemorySpace::new();
assert_eq!(memspace.get_byte(MemorySpace::MIN_ADDR-1), Err(Interrupt::BadCode));
assert_eq!(memspace.set_byte(MemorySpace::MIN_ADDR-1, Byte::ONE), Err(Interrupt::BadCode));
assert_eq!(memspace.get_byte(MemorySpace::MAX_ADDR+1), Err(Interrupt::BadCode));
assert_eq!(memspace.set_byte(MemorySpace::MAX_ADDR+1, Byte::ONE), Err(Interrupt::BadCode));
assert_eq!(memspace.get_byte(0), Ok(Byte::ZERO));
memspace.memory[MemorySpace::MAX_ADDR as usize] = Byte::ONE;
assert_eq!(memspace.get_byte(0), Ok(Byte::ONE));
memspace.set_byte(MemorySpace::MAX_ADDR, Byte::TERN).unwrap();
assert_eq!(memspace.get_byte(MemorySpace::MAX_ADDR), Ok(Byte::TERN));
memspace.set_byte(MemorySpace::MIN_ADDR, Byte::ONE).unwrap();
assert_eq!(memspace.get_byte(MemorySpace::MIN_ADDR), Ok(Byte::ONE));
}
#[test]
fn test_getset_word() {
let mut memspace = MemorySpace::new();
assert_eq!(memspace.get_word(MemorySpace::MIN_ADDR-1), Err(Interrupt::BadCode));
assert_eq!(memspace.set_word(MemorySpace::MIN_ADDR-1, Word::ONE), Err(Interrupt::BadCode));
assert_eq!(memspace.get_word(MemorySpace::MAX_ADDR), Err(Interrupt::MemoryFault));
assert_eq!(memspace.set_word(MemorySpace::MAX_ADDR, Word::ONE), Err(Interrupt::MemoryFault));
assert_eq!(memspace.get_word(MemorySpace::MAX_ADDR+1), Err(Interrupt::BadCode));
assert_eq!(memspace.set_word(MemorySpace::MAX_ADDR+1, Word::ONE), Err(Interrupt::BadCode));
assert_eq!(memspace.get_word(0), Ok(Word::ZERO));
memspace.memory[MemorySpace::MAX_ADDR as usize] = Byte::ONE;
assert_eq!(memspace.get_word(0), Ok(Word::ONE));
memspace.set_word(0, Word::TERN).unwrap();
assert_eq!(memspace.get_word(0), Ok(Word::TERN));
memspace.set_word(10, Word::TERN).unwrap();
assert_eq!(memspace.get_word(10), Ok(Word::TERN));
}
|
use crate::{inventory::get_inventory_list, prelude::*};
pub const MAP_VIEW_WIDTH: usize = 80;
pub const MAP_VIEW_HEIGHT: usize = 43;
const TOOLTIP_HORIZONTAL_PADDING: i32 = 1;
#[derive(Debug, PartialEq, Copy, Clone)]
pub enum ItemMenuResult {
Cancel,
NoResponse,
Selected,
}
pub fn ui_inventory_use_input(context: &mut BTerm, world: &mut World) -> ItemMenuResult {
let (action, selected) = ui_inventory_menu_input(context, world);
if let Some((user, item)) = selected {
world.spawn_command(UseItemCommand { user, item });
}
action
}
pub fn ui_inventory_drop_input(context: &mut BTerm, world: &mut World) -> ItemMenuResult {
let (action, selected) = ui_inventory_menu_input(context, world);
if let Some((dropper, item)) = selected {
world.spawn_command(DropItemCommand { dropper, item });
}
action
}
fn ui_inventory_menu_input(
context: &mut BTerm,
world: &mut World,
) -> (ItemMenuResult, Option<(Entity, Entity)>) {
match context.key {
Some(VirtualKeyCode::Escape) => (ItemMenuResult::Cancel, None),
Some(key) => {
if let Ok(player) = world.resource_entity::<Player>() {
let inventory = get_inventory_list(world, player);
let selection = letter_to_option(key);
if let Some((item, _)) = inventory.get(selection as usize) {
(ItemMenuResult::Selected, Some((player, *item)))
} else {
(ItemMenuResult::NoResponse, None)
}
} else {
(ItemMenuResult::NoResponse, None)
}
}
None => (ItemMenuResult::NoResponse, None),
}
}
/// Convert index to a letter, starting with 'a' -> 0
/// ```
/// assert_eq!(index_to_letter(0), 'a' as FontCharType);
/// assert_eq!(index_to_letter(25), 'z' as FontCharType);
/// ```
fn index_to_letter(i: usize) -> FontCharType {
// 0 -> 'a'
97 + i as FontCharType
}
fn draw_fill_box(context: &mut BTerm, x: i32, y: i32, width: i32, height: i32, fg: RGB, bg: RGB) {
let blank = to_cp437(' ');
for x in x..=(x + width) {
for y in y..=(y + height) {
context.set(x, y, fg, bg, blank);
}
}
}
/// Fix draw_box bug which fills box with #000000 instead of bg.
/// See https://github.com/amethyst/bracket-lib/issues/174
fn draw_box_bugfix(context: &mut BTerm, x: i32, y: i32, width: i32, height: i32, fg: RGB, bg: RGB) {
// context.draw_box(x, y, width, height, fg, bg);
context.draw_hollow_box(x, y, width, height, fg, bg);
draw_fill_box(context, x + 1, y + 1, width - 2, height - 2, fg, bg);
}
pub fn draw_ui(context: &mut BTerm, world: &World, config: &Config) {
draw_box_bugfix(context, 0, 43, 79, 6, config.ui.fg, config.ui.bg);
if let Some((_, (_, stats))) = world.query::<(&Player, &CombatStats)>().into_iter().next() {
let health = format!(" HP: {} / {} ", stats.hp, stats.max_hp);
context.print_color(12, 43, config.ui_title.fg, config.ui_title.bg, &health);
context.draw_bar_horizontal(
28,
43,
51,
stats.hp,
stats.max_hp,
config.ui_hp_bar.fg,
config.ui_hp_bar.bg,
);
}
if let Some((_, log)) = world.query::<&GameLog>().into_iter().next() {
for (i, msg) in log.entries.iter().rev().enumerate() {
let y = 44 + i;
context.print(2, y, msg);
if y >= 48 {
break;
}
}
}
// let mouse_pos = context.mouse_pos();
// context.set_bg(mouse_pos.0, mouse_pos.1, config.ui_tooltip.bg);
draw_tooltips(context, world, config);
}
fn draw_tooltips(context: &mut BTerm, world: &World, config: &Config) {
let (mx, my) = context.mouse_pos();
for (_, map) in world.query::<&mut TileMap>().into_iter() {
if !map.is_tile_visible(mx, my) {
continue;
}
let tooltip = map
.get_entities_on_tile(mx, my)
.iter()
.filter_map(|entity| {
let mut query = world.query_one::<&Name>(*entity).ok()?;
let Name(name) = query.get().expect("Unfiltered query");
Some(name.clone())
})
.collect::<Vec<_>>();
let tooltip_width = 2 * TOOLTIP_HORIZONTAL_PADDING
+ tooltip.iter().map(|s| s.len() as i32).max().unwrap_or(0);
let tooltip_height = tooltip.len() as i32;
let tooltip_x = i32::max(
0,
if mx >= map.get_width() - tooltip_width {
mx - tooltip_width
} else {
mx + 1
},
);
let tooltip_y = i32::max(0, i32::min(my, map.get_height() - tooltip_height));
for (i, s) in tooltip.into_iter().enumerate() {
context.print_color(
tooltip_x,
tooltip_y + i as i32,
config.ui_tooltip.fg,
config.ui_tooltip.bg,
format!(" {:<pad$}", s, pad = (tooltip_width - 1) as usize),
);
}
}
}
struct MenuBoxStyle {
pad: i32,
fg: RGB,
bg: RGB,
highlight_fg: RGB,
highlight_bg: RGB,
}
fn draw_menu_box(
context: &mut BTerm,
style: &MenuBoxStyle,
title: &str,
footer: &str,
x: i32,
y: i32,
min_width: i32,
min_height: i32,
) {
const TITLE_OFFSET_X: i32 = 3;
let width = *[
min_width,
title.len() as i32 + TITLE_OFFSET_X * 2,
footer.len() as i32 + TITLE_OFFSET_X * 2,
]
.iter()
.max()
.unwrap();
let height = i32::max(2, min_height);
// Menu box
draw_box_bugfix(context, x, y, width, height, style.fg, style.bg);
// Title
context.print_color(
x + TITLE_OFFSET_X,
y,
style.highlight_fg,
style.highlight_bg,
title,
);
// Footer
context.print_color(
x + TITLE_OFFSET_X,
y + height,
style.highlight_fg,
style.highlight_bg,
footer,
);
}
fn draw_select_menu<S: AsRef<str>>(
context: &mut BTerm,
style: &MenuBoxStyle,
title: &str,
footer: &str,
x: i32,
y: i32,
options: &[S],
) {
let inner_x = x + style.pad;
let inner_y = y + style.pad;
let inner_height = options.len() as i32;
let inner_width = 4 + options
.iter()
.map(|s| s.as_ref().len() as i32)
.max()
.unwrap_or(0);
draw_menu_box(
context,
style,
title,
footer,
x,
y,
inner_width + style.pad * 2 - 1,
inner_height + style.pad * 2 - 1,
);
for (i, s) in options.iter().enumerate() {
let item_y = inner_y + i as i32;
context.set(inner_x, item_y, style.fg, style.bg, to_cp437('('));
context.set(
inner_x + 1,
item_y,
style.highlight_fg,
style.highlight_bg,
index_to_letter(i),
);
context.set(inner_x + 2, item_y, style.fg, style.bg, to_cp437(')'));
context.print(inner_x + 4, item_y, s.as_ref());
}
}
pub fn draw_inventory_menu(context: &mut BTerm, world: &World, config: &Config, title: &str) {
if let Ok(player) = world.resource_entity::<Player>() {
let menu_options = get_inventory_list(world, player)
.into_iter()
.map(|(_, name)| name)
.collect::<Vec<_>>();
draw_select_menu(
context,
&MenuBoxStyle {
pad: 2,
fg: config.ui.fg,
bg: config.ui.bg,
highlight_fg: config.ui_title.fg,
highlight_bg: config.ui_title.bg,
},
title,
"ESCAPE to cancel",
15,
25 - menu_options.len() as i32 / 2,
&menu_options,
);
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_index_to_letter() {
assert_eq!(index_to_letter(0), 'a' as FontCharType);
assert_eq!(index_to_letter(25), 'z' as FontCharType);
}
}
|
use std::convert::TryFrom;
use anyhow::Result;
use crate::evaluator::new_error;
use crate::evaluator::objects;
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Function {
Len,
First,
Last,
Rest,
Push,
Puts,
}
static FUNCTIONS: [Function; 6] = [
Function::Len,
Function::First,
Function::Last,
Function::Rest,
Function::Push,
Function::Puts,
];
impl Function {
pub fn iterator() -> std::slice::Iter<'static, Function> {
FUNCTIONS.iter()
}
pub fn by_index(idx: usize) -> Function {
FUNCTIONS[idx].clone()
}
pub fn name(&self) -> &'static str {
match *self {
Function::Len => "len",
Function::First => "first",
Function::Last => "last",
Function::Rest => "rest",
Function::Push => "push",
Function::Puts => "puts",
}
}
}
impl TryFrom<&str> for Function {
type Error = &'static str;
fn try_from(value: &str) -> Result<Self, Self::Error> {
match value {
"len" => Ok(Self::Len),
"first" => Ok(Self::First),
"last" => Ok(Self::Last),
"rest" => Ok(Self::Rest),
"push" => Ok(Self::Push),
"puts" => Ok(Self::Puts),
_ => Err("Not found builtin function."),
}
}
}
impl From<Function> for objects::Object {
fn from(value: Function) -> Self {
objects::Builtin { func: value }.into()
}
}
impl Function {
pub fn call(&self, args: &[objects::Object]) -> Result<Option<objects::Object>> {
match self {
Self::Len => len(args),
Self::First => first(args),
Self::Last => last(args),
Self::Rest => rest(args),
Self::Push => push(args),
Self::Puts => puts(args),
}
}
}
fn len(args: &[objects::Object]) -> Result<Option<objects::Object>> {
if args.len() != 1 {
return new_error(&format!(
"wrong number of arguments. got={}, want=1",
args.len()
));
}
let count = match &args[0] {
objects::Object::StringLit(arg) => arg.value.chars().count(),
objects::Object::Array(arg) => arg.elements.len(),
arg => {
return new_error(&format!(
"argument to 'len' not supported, got {}",
arg.o_type()
));
}
};
Ok(Some(
objects::Integer {
value: i64::try_from(count).or_else(|e| new_error(&e.to_string()))?,
}
.into(),
))
}
fn first(args: &[objects::Object]) -> Result<Option<objects::Object>> {
if args.len() != 1 {
return new_error(&format!(
"wrong number of arguments. got={}, want=1",
args.len()
));
}
match &args[0] {
objects::Object::Array(arr) => {
let res = arr.elements.first();
Ok(res.cloned())
}
not_arr => new_error(&format!(
"argument to 'first' must be Array, got {}",
not_arr.o_type()
)),
}
}
fn last(args: &[objects::Object]) -> Result<Option<objects::Object>> {
if args.len() != 1 {
return new_error(&format!(
"wrong number of arguments. got={}, want=1",
args.len()
));
}
match &args[0] {
objects::Object::Array(arr) => {
let res = arr.elements.last();
Ok(res.cloned())
}
not_arr => new_error(&format!(
"argument to 'last' must be Array, got {}",
not_arr.o_type()
)),
}
}
fn rest(args: &[objects::Object]) -> Result<Option<objects::Object>> {
if args.len() != 1 {
return new_error(&format!(
"wrong number of arguments. got={}, want=1",
args.len()
));
}
match &args[0] {
objects::Object::Array(arr) => {
if arr.elements.is_empty() {
return Ok(None);
}
Ok(Some(
objects::Array {
elements: arr.elements[1..].to_vec(),
}
.into(),
))
}
not_arr => new_error(&format!(
"argument to 'rest' must be Array, got {}",
not_arr.o_type()
)),
}
}
fn push(args: &[objects::Object]) -> Result<Option<objects::Object>> {
if args.len() != 2 {
return new_error(&format!(
"wrong number of arguments. got={}, want=2",
args.len()
));
}
match &args[0] {
objects::Object::Array(ref arr) => {
let mut v = arr.elements.clone();
v.push(args[1].clone());
Ok(Some(objects::Array { elements: v }.into()))
}
not_arr => new_error(&format!(
"argument to 'push' must be Array, got {}",
not_arr.o_type()
)),
}
}
fn puts(args: &[objects::Object]) -> Result<Option<objects::Object>> {
for arg in args.iter() {
println!("{}", arg);
}
Ok(None)
}
|
use std::collections::HashMap;
use std::env;
use std::fs;
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
struct Coordinate {
x: i32,
y: i32,
}
fn simulate(curr: Coordinate, op: (char, i32), len: &mut i32) -> Vec<(Coordinate, i32)> {
let (dir, dist) = op;
let mut coords: Vec<(Coordinate, i32)> = Vec::new();
match dir {
'U' => {
for i in 1..=dist {
coords.push((
Coordinate {
x: curr.x,
y: curr.y + i,
},
*len,
));
*len += 1;
}
}
'D' => {
for i in 1..=dist {
coords.push((
Coordinate {
x: curr.x,
y: curr.y - i,
},
*len,
));
*len += 1;
}
}
'L' => {
for i in 1..=dist {
coords.push((
Coordinate {
x: curr.x - i,
y: curr.y,
},
*len,
));
*len += 1;
}
}
'R' => {
for i in 1..=dist {
coords.push((
Coordinate {
x: curr.x + i,
y: curr.y,
},
*len,
));
*len += 1;
}
}
_ => panic!("Unexpected match"),
}
coords
}
fn get_wire(line: &str) -> HashMap<Coordinate, i32> {
// Split the line and get (char, usize)
let ops: Vec<(char, i32)> = line
.split(',')
.map(|x| {
let mut chars = x.chars();
(
chars.next().unwrap(),
chars.collect::<String>().parse().unwrap(),
)
})
.collect();
let mut wire: HashMap<Coordinate, i32> = HashMap::new();
let mut curr: Coordinate = Coordinate { x: 0, y: 0 };
let mut dist: i32 = 1;
for op in ops {
let coords: Vec<(Coordinate, i32)> = simulate(curr, op, &mut dist);
for (c, d) in &coords {
if !wire.contains_key(c) {
wire.insert(*c, *d);
}
}
curr = coords[coords.len() - 1].0;
}
wire
}
fn get_wires(filename: &str) -> (HashMap<Coordinate, i32>, HashMap<Coordinate, i32>) {
// Read the file to a Vec<String>
let file_lines: Vec<String> = fs::read_to_string(filename)
.expect("Failed to read the problem input")
.split('\n')
.filter(|x| !x.is_empty())
.map(|x| String::from(x))
.collect();
(get_wire(&file_lines[0]), get_wire(&file_lines[1]))
}
fn get_min_value(a: HashMap<Coordinate, i32>, b: HashMap<Coordinate, i32>) -> i32 {
let mut min_value: i32 = std::i32::MAX;
for k in a.keys() {
if b.contains_key(k) {
if k.x == 0 && k.y == 0 {
continue;
}
let val: i32 = a.get(k).unwrap() + b.get(k).unwrap();
if val < min_value {
min_value = val;
dbg!(min_value);
}
}
}
min_value
}
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() < 2 {
panic!("Please supply a filename");
}
let (a, b) = get_wires(&args[1]);
let min: i32 = get_min_value(a, b);
dbg!(min);
}
|
#[macro_use]
mod xml;
mod make_invertible;
use cgmath::{Matrix4, One};
use convert::image_namer::ImageNamer;
use db::{Database, ModelId};
use skeleton::{Skeleton, Transform, SMatrix};
use primitives::{self, Primitives, DynamicState};
use nitro::Model;
use time;
use util::BiVec;
use connection::Connection;
use self::xml::Xml;
use util::tree::NodeIdx;
static FRAME_LENGTH: f64 = 1.0 / 60.0; // 60 fps
struct Ctx<'a> {
model_id: ModelId,
model: &'a Model,
db: &'a Database,
conn: &'a Connection,
image_namer: &'a ImageNamer,
objects: &'a [Matrix4<f64>],
prims: &'a Primitives,
skel: &'a Skeleton,
}
pub fn write(
db: &Database,
conn: &Connection,
image_namer: &ImageNamer,
model_id: ModelId,
) -> String {
let model = &db.models[model_id];
// We need invertible matrices since we're obliged to give values for
// inverse bind matrices.
use self::make_invertible::make_invertible;
let objects = &model.objects.iter()
.map(|o| make_invertible(&o.matrix))
.collect::<Vec<_>>();
let uv_mats = &model.materials.iter()
.map(|mat| mat.texture_mat)
.collect::<Vec<_>>();
let state = DynamicState { objects, uv_mats };
let prims = &Primitives::build(model, primitives::PolyType::TrisAndQuads, state);
let skel = &Skeleton::build(model, objects);
let ctx = Ctx { model_id, model, db, conn, image_namer, objects, prims, skel };
let mut xml = Xml::with_capacity(1024 * 1024); // 1MiB
xml!(xml;
(r#"<?xml version="1.0" encoding="utf-8"?>"#);
<COLLADA xmlns=["http://www.collada.org/2005/11/COLLADASchema"] version=["1.4.1"]>;
);
asset(&mut xml, &ctx);
library_images(&mut xml, &ctx);
library_materials(&mut xml, &ctx);
library_effects(&mut xml, &ctx);
if !ctx.prims.vertices.is_empty() {
library_geometries(&mut xml, &ctx);
library_controllers(&mut xml, &ctx);
library_animations(&mut xml, &ctx);
library_animation_clips(&mut xml, &ctx);
library_visual_scenes(&mut xml, &ctx);
scene(&mut xml, &ctx);
}
xml!(xml;
/COLLADA>;
);
xml.string()
}
fn asset(xml: &mut Xml, _ctx: &Ctx) {
let now = time::now_utc();
let iso8601_datetime = time::strftime("%FT%TZ", &now).unwrap();
xml!(xml;
<asset>;
<created>(iso8601_datetime)</created>;
<modified>(iso8601_datetime)</modified>;
/asset>;
);
}
fn library_images(xml: &mut Xml, ctx: &Ctx) {
use std::collections::HashSet;
// Find the names for all the images this model uses
let image_names = (0..ctx.model.materials.len())
.filter_map(|material_id| {
match ctx.conn.models[ctx.model_id].materials[material_id].image_id() {
Ok(Some(image_id)) => Some(image_id),
_ => None,
}
})
.filter_map(|ids| ctx.image_namer.names.get(&ids))
.collect::<HashSet<_>>();
xml!(xml;
<library_images>;
for name in (image_names) {
<image id=["image-"(name)]>;
<init_from>(name)".png"</init_from>;
/image>;
}
/library_images>;
);
}
fn library_materials(xml: &mut Xml, ctx: &Ctx) {
xml!(xml;
<library_materials>;
for (i, mat) in (ctx.model.materials.iter().enumerate()) {
<material id=["material"(i)] name=[(mat.name.print_safe())]>;
<instance_effect url=["#effect"(i)]/>;
/material>;
}
/library_materials>;
);
}
fn library_effects(xml: &mut Xml, ctx: &Ctx) {
xml!(xml;
<library_effects>;
);
for (material_id, mat) in ctx.model.materials.iter().enumerate() {
let mat_conn = &ctx.conn.models[ctx.model_id].materials[material_id];
let image_name = match mat_conn.image_id() {
Ok(Some(image_id)) => ctx.image_namer.names.get(&image_id),
_ => None,
};
xml!(xml;
<effect id=["effect"(material_id)] name=[(mat.name.print_safe())]>;
<profile_COMMON>;
);
if let Some(name) = image_name {
let wrap = |repeat, mirror| {
match (repeat, mirror) {
(false, _) => "CLAMP",
(true, false) => "WRAP",
(true, true) => "MIRROR",
}
};
xml!(xml;
<newparam sid=["Image-surface"]>;
<surface type=["2D"]>;
<init_from>"image-"(name)</init_from>;
<format>"A8R8G8B8"</format>;
/surface>;
/newparam>;
<newparam sid=["Image-sampler"]>;
<sampler2D>;
<source>"Image-surface"</source>;
<wrap_s>(wrap(mat.params.repeat_s(), mat.params.mirror_s()))</wrap_s>;
<wrap_t>(wrap(mat.params.repeat_t(), mat.params.mirror_t()))</wrap_t>;
<minfilter>"NEAREST"</minfilter>;
<magfilter>"NEAREST"</magfilter>;
<mipfilter>"NEAREST"</mipfilter>;
/sampler2D>;
/newparam>;
);
}
// Lookup the texture we're using and find out if it has transparency.
let has_transparency = match mat_conn.image_id() {
Ok(Some(image_id)) => {
let texture_id = image_id.0;
let params = ctx.db.textures[texture_id].params;
let alpha_type = params.format().alpha_type(params);
use nds::Alpha;
match alpha_type {
Alpha::Opaque => false,
Alpha::Transparent | Alpha::Translucent => true,
}
}
_ => false,
};
xml!(xml;
<technique sid=["common"]>;
<phong>;
<emission>;
<color>(mat.emission[0])" "(mat.emission[1])" "(mat.emission[2])" 1"</color>;
/emission>;
<ambient>;
<color>(mat.ambient[0])" "(mat.ambient[1])" "(mat.ambient[2])" 1"</color>;
/ambient>;
<diffuse>;
if (image_name.is_some()) {
<texture texture=["Image-sampler"] texcoord=["tc"]/>;
} else {
<color>(mat.diffuse[0])" "(mat.diffuse[1])" "(mat.diffuse[2])" "(mat.alpha)</color>;
}
/diffuse>;
<specular>;
<color>(mat.specular[0])" "(mat.specular[1])" "(mat.specular[2])" 1"</color>;
/specular>;
if (has_transparency) {
<transparent>;
<texture texture=["Image-sampler"] texcoord=["tc"]/>;
/transparent>;
}
if (mat.alpha != 1.0) {
<transparency>;
<float>(mat.alpha)</float>;
/transparency>;
}
/phong>;
/technique>;
);
xml!(xml;
/profile_COMMON>;
/effect>;
);
}
xml!(xml;
/library_effects>;
);
}
fn library_geometries(xml: &mut Xml, ctx: &Ctx) {
let model_name = ctx.model.name;
xml!(xml;
<library_geometries>;
<geometry id=["geometry"] name=[(model_name.print_safe())]>;
<mesh>;
);
let verts = &ctx.prims.vertices;
// Positions
xml!(xml;
<source id=[("positions")]>;
<float_array id=[("positions-array")] count=[(3 * verts.len())]>
for v in (verts) {
(v.position[0])" "(v.position[1])" "(v.position[2])" "
}
</float_array>;
<technique_common>;
<accessor source=["#positions-array"] count=[(verts.len())] stride=["3"]>;
<param name=["X"] type=["float"]/>;
<param name=["Y"] type=["float"]/>;
<param name=["Z"] type=["float"]/>;
/accessor>;
/technique_common>;
/source>;
);
// Texcoords
let has_texcoords = ctx.prims.draw_calls.iter().any(|call| call.used_texcoords);
if has_texcoords {
xml!(xml;
<source id=["texcoords"]>;
<float_array id=["texcoords-array"] count=[(2 * verts.len())]>
for v in (verts) {
(v.texcoord[0])" "(v.texcoord[1])" "
}
</float_array>;
<technique_common>;
<accessor source=["#texcoords-array"] count=[(verts.len())] stride=["2"]>;
<param name=["S"] type=["float"]/>;
<param name=["T"] type=["float"]/>;
/accessor>;
/technique_common>;
/source>;
);
}
// Vertex colors
let has_colors = ctx.prims.draw_calls.iter().any(|call| call.used_vertex_color);
if has_colors {
xml!(xml;
<source id=["colors"]>;
<float_array id=["colors-array"] count=[(3 * verts.len())]>
for v in (verts) {
(v.color[0])" "(v.color[1])" "(v.color[2])" "
}
</float_array>;
<technique_common>;
<accessor source=["#colors-array"] count=[(verts.len())] stride=["3"]>;
<param name=["R"] type=["float"]/>;
<param name=["G"] type=["float"]/>;
<param name=["B"] type=["float"]/>;
/accessor>;
/technique_common>;
/source>;
);
}
// Normals
let has_normals = ctx.prims.draw_calls.iter().any(|call| call.used_normals);
if has_normals {
xml!(xml;
<source id=["normals"]>;
<float_array id=["normals-array"] count=[(3 * verts.len())]>
for v in (verts) {
(v.normal[0])" "(v.normal[1])" "(v.normal[2])" "
}
</float_array>;
<technique_common>;
<accessor source=["#normals-array"] count=[(verts.len())] stride=["3"]>;
<param name=["X"] type=["float"]/>;
<param name=["Y"] type=["float"]/>;
<param name=["Z"] type=["float"]/>;
/accessor>;
/technique_common>;
/source>;
);
}
xml!(xml;
<vertices id=["vertices"]>;
<input semantic=["POSITION"] source=["#positions"]/>;
if (has_texcoords) {
<input semantic=["TEXCOORD"] source=["#texcoords"]/>;
}
if (has_colors) {
<input semantic=["COLOR"] source=["#colors"]/>;
}
if (has_normals) {
<input semantic=["NORMAL"] source=["#normals"]/>;
}
/vertices>;
);
// One <polylist> per draw call
for call in &ctx.prims.draw_calls {
let indices = &ctx.prims.indices[call.index_range.clone()];
// Remember indices come in groups of four.
// [a, b, c, 0xffff] = triangle(a, b, c)
// [a, b, c, d] = quad(a, b, c, d)
let num_polys = indices.len() / 4;
xml!(xml;
<polylist material=["material"(call.mat_id)] count=[(num_polys)]>;
<input semantic=["VERTEX"] source=["#vertices"] offset=["0"]/>;
<vcount>
for inds in (indices.chunks(4)) {
if (inds[3] == 0xffff) { "3 " }
else { "4 " }
}
</vcount>;
<p>
for &ind in (indices) {
if (ind != 0xffff) { (ind)" " }
}
</p>;
/polylist>;
);
}
xml!(xml;
/mesh>;
/geometry>;
/library_geometries>;
);
}
fn library_controllers(xml: &mut Xml, ctx: &Ctx) {
xml!(xml;
<library_controllers>;
<controller id=["controller"]>;
<skin source=["#geometry"]>;
);
let num_joints = ctx.skel.tree.node_count();
// XML IDs of the joint <node>s
xml!(xml;
<source id=["controller-joints"]>;
<Name_array id=["controller-joints-array"] count=[(num_joints)]>
for j in (ctx.skel.tree.node_idxs()) {
"joint"(j)" "
}
</Name_array>;
<technique_common>;
<accessor source=["#controller-joints-array"] count=[(num_joints)]>;
<param name=["JOINT"] type=["Name"]/>;
/accessor>;
/technique_common>;
/source>;
);
// Inverse bind matrices (ie. rest world-to-locals)
xml!(xml;
<source id=["controller-bind-poses"]>;
<float_array id=["controller-bind-poses-array"] count=[(16 * num_joints)]>
for j in (ctx.skel.tree.node_idxs()) {
MATRIX(&ctx.skel.tree[j].rest_world_to_local)" "
}
</float_array>;
<technique_common>;
<accessor source=["#controller-bind-poses-array"] count=[(num_joints)] stride=["16"]>;
<param name=["TRANSFORM"] type=["float4x4"]/>;
/accessor>;
/technique_common>;
/source>;
);
// We gives weights by first giving a list of all weights we're going to
// use and then giving indices into the list with the vertices, so we start
// by gathering all weights into a list. Since weights are floats, we can't
// insert them into a HashMap directly, so we first encode them as a
// fixed-point number. Remember to decode them when they come out!
let mut weights_lut = BiVec::new();
let encode = |x: f32| (x * 4096.0) as u32;
let decode = |x: u32| x as f64 / 4096.0;
weights_lut.clear();
for w in &ctx.skel.weights {
weights_lut.push(encode(w.weight));
}
// Here is the list of all weights.
xml!(xml;
<source id=["controller-weights"]>;
<float_array id=["controller-weights-array"] count=[(weights_lut.len())]>
for &weight in (weights_lut.iter()) {
(decode(weight))" "
}
</float_array>;
<technique_common>;
<accessor source=["#controller-weights-array"] count=[(weights_lut.len())]>;
<param name=["WEIGHT"] type=["float"]/>;
/accessor>;
/technique_common>;
/source>;
);
xml!(xml;
<joints>;
<input semantic=["JOINT"] source=["#controller-joints"]/>;
<input semantic=["INV_BIND_MATRIX"] source=["#controller-bind-poses"]/>;
/joints>;
);
let num_verts = ctx.prims.vertices.len();
xml!(xml;
<vertex_weights count=[(num_verts)]>;
<input semantic=["JOINT"] source=["#controller-joints"] offset=["0"]/>;
<input semantic=["WEIGHT"] source=["#controller-weights"] offset=["1"]/>;
<vcount>
for vi in (0 .. ctx.prims.vertices.len()) {
(ctx.skel.vert_weights(vi).len())" "
}
</vcount>;
<v>
for vi in (0 .. ctx.prims.vertices.len()) {
for w in (ctx.skel.vert_weights(vi)) {
(w.joint)" "
(weights_lut.idx(&encode(w.weight)))" "
}
}
</v>;
/vertex_weights>;
);
xml!(xml;
/skin>;
/controller>;
/library_controllers>;
);
}
fn library_animations(xml: &mut Xml, ctx: &Ctx) {
let anims = &ctx.conn.models[ctx.model_id].animations;
if anims.is_empty() { return; }
xml!(xml;
<library_animations>;
);
for &anim_id in anims {
let anim = &ctx.db.animations[anim_id];
let num_frames = anim.num_frames;
for joint_index in ctx.skel.tree.node_idxs() {
let joint = &ctx.skel.tree[joint_index];
let object_id = match joint.local_to_parent {
Transform::SMatrix(SMatrix::Object { object_idx }) => object_idx,
_ => continue,
};
xml!(xml;
<animation id=["anim"(anim_id)"-joint"(joint_index)]>;
);
// Time
xml!(xml;
<source id=["anim"(anim_id)"-joint"(joint_index)"-time"]>;
<float_array id=["anim"(anim_id)"-joint"(joint_index)"-time-array"] count=[(num_frames)]>
for frame in (0..num_frames) {
(frame as f64 * FRAME_LENGTH)" "
}
</float_array>;
<technique_common>;
<accessor source=["#anim"(anim_id)"-joint"(joint_index)"-time-array"] count=[(num_frames)]>;
<param name=["TIME"] type=["float"]/>;
/accessor>;
/technique_common>;
/source>;
);
// Matrix
xml!(xml;
<source id=["anim"(anim_id)"-joint"(joint_index)"-matrix"]>;
<float_array id=["anim"(anim_id)"-joint"(joint_index)"-matrix-array"] count=[(16 * num_frames)]>
for frame in (0..num_frames) {
MATRIX(
&anim.objects_curves.get(object_id as usize)
.map(|trs| trs.sample_at(frame))
.unwrap_or_else(|| Matrix4::one())
)" "
}
</float_array>;
<technique_common>;
<accessor source=["#anim"(anim_id)"-joint"(joint_index)"-matrix-array"] count=[(num_frames)] stride=["16"]>;
<param name=["TRANSFORM"] type=["float4x4"]/>;
/accessor>;
/technique_common>;
/source>;
);
// Interpolation (all LINEAR)
xml!(xml;
<source id=["anim"(anim_id)"-joint"(joint_index)"-interpolation"]>;
<Name_array id=["anim"(anim_id)"-joint"(joint_index)"-interpolation-array"] count=[(num_frames)]>
for _frame in (0..num_frames) {
"LINEAR "
}
</Name_array>;
<technique_common>;
<accessor source=["#anim"(anim_id)"-joint"(joint_index)"-interpolation-array"] count=[(num_frames)]>;
<param name=["INTERPOLATION"] type=["Name"]/>;
/accessor>;
/technique_common>;
/source>;
);
xml!(xml;
<sampler id=["anim"(anim_id)"-joint"(joint_index)"-sampler"]>;
<input semantic=["INPUT"] source=["#anim"(anim_id)"-joint"(joint_index)"-time"]/>;
<input semantic=["OUTPUT"] source=["#anim"(anim_id)"-joint"(joint_index)"-matrix"]/>;
<input semantic=["INTERPOLATION"] source=["#anim"(anim_id)"-joint"(joint_index)"-interpolation"]/>;
/sampler>;
);
xml!(xml;
<channel
source=["#anim"(anim_id)"-joint"(joint_index)"-sampler"]
target=["joint"(joint_index)"/transform"]/>;
);
xml!(xml;
/animation>;
);
}
}
xml!(xml;
/library_animations>;
);
}
fn library_animation_clips(xml: &mut Xml, ctx: &Ctx) {
let anims = &ctx.conn.models[ctx.model_id].animations;
if anims.is_empty() { return ;}
xml!(xml;
<library_animation_clips>;
);
for &anim_id in anims {
let anim = &ctx.db.animations[anim_id];
assert!(anim.num_frames != 0);
let end_time = (anim.num_frames - 1) as f64 * FRAME_LENGTH;
xml!(xml;
<animation_clip id=["anim"(anim_id)] name=[(anim.name.print_safe())] end=[(end_time)]>;
for j in (ctx.skel.tree.node_idxs()) {
<instance_animation url=["#anim"(anim_id)"-joint"(j)]/>;
}
/animation_clip>;
);
}
xml!(xml;
/library_animation_clips>;
);
}
fn library_visual_scenes(xml: &mut Xml, ctx: &Ctx) {
let model_name = ctx.model.name;
xml!(xml;
<library_visual_scenes>;
<visual_scene id=["scene0"] name=[(model_name.print_safe())]>;
);
joint_hierarchy(xml, ctx);
xml!(xml;
<node id=["node"] name=[(model_name.print_safe())] type=["NODE"]>;
<instance_controller url=["#controller"]>;
<skeleton>"#joint"(ctx.skel.root)</skeleton>;
<bind_material>;
<technique_common>;
for i in (0..ctx.model.materials.len()) {
<instance_material symbol=["material"(i)] target=["#material"(i)]>;
<bind_vertex_input semantic=["tc"] input_semantic=["TEXCOORD"]/>;
/instance_material>;
}
/technique_common>;
/bind_material>;
/instance_controller>;
/node>;
);
xml!(xml;
/visual_scene>;
/library_visual_scenes>;
);
}
fn joint_hierarchy(xml: &mut Xml, ctx: &Ctx) {
/// Write the name for a joint that will appear in DCC programs.
fn joint_name(ctx: &Ctx, node: NodeIdx) -> String {
match ctx.skel.tree[node].local_to_parent {
Transform::Root =>
format!("__ROOT__"),
Transform::SMatrix(SMatrix::Object { object_idx }) =>
format!("{}", ctx.model.objects[object_idx as usize].name.print_safe()),
Transform::SMatrix(SMatrix::InvBind { inv_bind_idx }) =>
format!("__INV_BIND{}", inv_bind_idx),
Transform::SMatrix(SMatrix::Uninitialized { stack_pos }) =>
format!("__UNINITIALIZED{}", stack_pos),
}
}
// Recursive tree walker
fn rec(xml: &mut Xml, ctx: &Ctx, node: NodeIdx) {
let tree = &ctx.skel.tree;
xml!(xml;
<node
id=["joint"(node)]
sid=["joint"(node)]
name=[(joint_name(ctx, node))]
type=["JOINT"]>;
);
let mat = match tree[node].local_to_parent {
Transform::Root =>
Matrix4::one(),
Transform::SMatrix(SMatrix::Object { object_idx }) =>
ctx.objects[object_idx as usize],
Transform::SMatrix(SMatrix::InvBind { inv_bind_idx }) =>
ctx.model.inv_binds[inv_bind_idx as usize],
Transform::SMatrix(SMatrix::Uninitialized { .. }) =>
Matrix4::one(),
};
xml!(xml;
<matrix sid=["transform"]>MATRIX(&mat)</matrix>;
);
for child in tree.children(node) {
rec(xml, ctx, child);
}
xml!(xml;
/node>;
);
}
rec(xml, ctx, ctx.skel.root)
}
fn scene(xml: &mut Xml, _ctx: &Ctx) {
xml!(xml;
<scene>;
<instance_visual_scene url=["#scene0"]/>;
/scene>;
);
}
|
/// Result type for Error
pub type ParseResult<T, E = ParseError> = std::result::Result<T, E>;
/// Any kind of error encountered during parsing
#[derive(Debug, Error)]
#[allow(missing_docs)]
pub enum ParseError {
#[error("io error: {0}")]
Io(#[from] std::io::Error),
#[error("missing version header")]
MissingVersionHeader,
#[error("error parsing int: {0}")]
Int(#[from] std::num::ParseIntError),
#[error("error parsing float: {0}")]
Float(#[from] std::num::ParseFloatError),
#[error("missing component in color")]
MissingColorComponent,
#[error("invalid timing point: {0}")]
InvalidTimingPoint(&'static str),
#[error("invalid additions: {0}")]
InvalidAdditions(u32),
#[error("invalid hit object type: {0}")]
InvalidObjectType(i32),
#[error("invalid slider spline type: {0}")]
InvalidSliderType(String),
#[error("invalid sample set: {0}")]
InvalidSampleSet(u32),
#[error("invalid sample set: {0}")]
InvalidSampleSetString(String),
#[error("invalid game mode: {0}")]
InvalidGameMode(u8),
#[error("invalid grid size: {0}")]
InvalidGridSize(u8),
#[error("custom: {0}")]
Custom(String),
}
|
pub mod aon;
pub use aon::AONDriver;
pub mod prci;
pub use prci::PRCIDriver;
pub mod spi;
pub use spi::SPIDriver;
pub mod gpio;
pub use gpio::GPIODriver;
pub mod uart;
pub use uart::UartDriver;
pub mod clint;
pub use clint::CLINTDriver;
|
#[doc = "Register `CSR` reader"]
pub type R = crate::R<CSR_SPEC>;
#[doc = "Register `CSR` writer"]
pub type W = crate::W<CSR_SPEC>;
#[doc = "Field `WUF` reader - Wakeup flag"]
pub type WUF_R = crate::BitReader;
#[doc = "Field `SBF` reader - Standby flag"]
pub type SBF_R = crate::BitReader;
#[doc = "Field `EWUP1` reader - Enable WKUP pin 1"]
pub type EWUP1_R = crate::BitReader;
#[doc = "Field `EWUP1` writer - Enable WKUP pin 1"]
pub type EWUP1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `EWUP2` reader - Enable WKUP pin 2"]
pub type EWUP2_R = crate::BitReader;
#[doc = "Field `EWUP2` writer - Enable WKUP pin 2"]
pub type EWUP2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `EWUP4` reader - Enable WKUP pin 4"]
pub type EWUP4_R = crate::BitReader;
#[doc = "Field `EWUP4` writer - Enable WKUP pin 4"]
pub type EWUP4_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `EWUP5` reader - Enable WKUP pin 5"]
pub type EWUP5_R = crate::BitReader;
#[doc = "Field `EWUP5` writer - Enable WKUP pin 5"]
pub type EWUP5_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `EWUP6` reader - Enable WKUP pin 6"]
pub type EWUP6_R = crate::BitReader;
#[doc = "Field `EWUP6` writer - Enable WKUP pin 6"]
pub type EWUP6_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `EWUP7` reader - Enable WKUP pin 7"]
pub type EWUP7_R = crate::BitReader;
#[doc = "Field `EWUP7` writer - Enable WKUP pin 7"]
pub type EWUP7_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - Wakeup flag"]
#[inline(always)]
pub fn wuf(&self) -> WUF_R {
WUF_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Standby flag"]
#[inline(always)]
pub fn sbf(&self) -> SBF_R {
SBF_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 8 - Enable WKUP pin 1"]
#[inline(always)]
pub fn ewup1(&self) -> EWUP1_R {
EWUP1_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - Enable WKUP pin 2"]
#[inline(always)]
pub fn ewup2(&self) -> EWUP2_R {
EWUP2_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 11 - Enable WKUP pin 4"]
#[inline(always)]
pub fn ewup4(&self) -> EWUP4_R {
EWUP4_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - Enable WKUP pin 5"]
#[inline(always)]
pub fn ewup5(&self) -> EWUP5_R {
EWUP5_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - Enable WKUP pin 6"]
#[inline(always)]
pub fn ewup6(&self) -> EWUP6_R {
EWUP6_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - Enable WKUP pin 7"]
#[inline(always)]
pub fn ewup7(&self) -> EWUP7_R {
EWUP7_R::new(((self.bits >> 14) & 1) != 0)
}
}
impl W {
#[doc = "Bit 8 - Enable WKUP pin 1"]
#[inline(always)]
#[must_use]
pub fn ewup1(&mut self) -> EWUP1_W<CSR_SPEC, 8> {
EWUP1_W::new(self)
}
#[doc = "Bit 9 - Enable WKUP pin 2"]
#[inline(always)]
#[must_use]
pub fn ewup2(&mut self) -> EWUP2_W<CSR_SPEC, 9> {
EWUP2_W::new(self)
}
#[doc = "Bit 11 - Enable WKUP pin 4"]
#[inline(always)]
#[must_use]
pub fn ewup4(&mut self) -> EWUP4_W<CSR_SPEC, 11> {
EWUP4_W::new(self)
}
#[doc = "Bit 12 - Enable WKUP pin 5"]
#[inline(always)]
#[must_use]
pub fn ewup5(&mut self) -> EWUP5_W<CSR_SPEC, 12> {
EWUP5_W::new(self)
}
#[doc = "Bit 13 - Enable WKUP pin 6"]
#[inline(always)]
#[must_use]
pub fn ewup6(&mut self) -> EWUP6_W<CSR_SPEC, 13> {
EWUP6_W::new(self)
}
#[doc = "Bit 14 - Enable WKUP pin 7"]
#[inline(always)]
#[must_use]
pub fn ewup7(&mut self) -> EWUP7_W<CSR_SPEC, 14> {
EWUP7_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "power control/status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`csr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`csr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CSR_SPEC;
impl crate::RegisterSpec for CSR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`csr::R`](R) reader structure"]
impl crate::Readable for CSR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`csr::W`](W) writer structure"]
impl crate::Writable for CSR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CSR to value 0"]
impl crate::Resettable for CSR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
// 数组的大小在编译期间就已经确定,slice的大小在编译期间不确定,数组的类型标记为[T;size],slice的类型标记为&[T]
use std::mem;
// 借用一个slice
fn analyze_slice(slice: &[i32]) {
println!("first element of the slice: {}", slice[0]);
println!("the slice has {} element", slice.len());
}
fn main() {
// 固定大小的数组(类型标记是多余的)
let xs: [i32; 5] = [1, 2, 3, 4, 5];
// 所有元素可以初始化成相同的值
let ys: [i32; 500] = [0; 500];
// 索引从零开始
println!("first element of the array: {}", xs[0]);
println!("second element of the array: {}", xs[1]);
// len返回数组大小
println!("array size: {}", xs.len());
// 数组在堆中分配
println!("array occupies {} bytes", mem::size_of_val(&xs));
// 数组可以自动地借用成为slice
println!("borrow the whole array as a slice");
analyze_slice(&xs);
// slice可以指向数组的一部分
println!("borrow a section of the arrat as a slice");
analyze_slice(&ys[1..4]);
// 越界的索引会引发panic
println!("{}", xs[5]);
}
|
//! module subtle implements functions that are often useful in cryptographic code but require
//! careful thought to use correctly.
/// constant_time_byte_eq returns 1 if x == y and 0 otherwise.
pub fn constant_time_byte_eq(x: u8, y: u8) -> isize {
(((x ^ y) as u32).wrapping_sub(1) >> 31) as isize
}
/// constant_time_compare returns 1 if the two slices, x and y, have equal contents and 0
/// otherwise. The time taken is a function of the length of the slices and is independent of the
/// contents.
pub fn constant_time_compare(x: &[u8], y: &[u8]) -> isize {
if x.len() != y.len() {
return 0;
}
let mut v = 0u8;
for i in 0..x.len() {
v |= x[i] ^ y[i];
}
constant_time_byte_eq(v, 0)
}
/// constant_time_copy copies the contents of y into x (a slice of equal length) if v == 1. If v ==
/// 0, x is left unchanged. Its behavior is undefined if v takes any other value.
pub fn constant_time_copy(v: isize, x: &mut [u8], y: &[u8]) {
if x.len() != y.len() {
panic!("subtle: slices have different lengths");
}
let xmask = (v - 1) as u8;
let ymask = !(v - 1) as u8;
for i in 0..x.len() {
x[i] = x[i] & xmask | y[i] & ymask;
}
}
/// constant_time_eq returns 1 if x == y and 0 otherwise.
pub fn constant_time_eq(x: i32, y: i32) -> isize {
(((x ^ y) as u32 - 1) as u64 >> 63) as isize
}
/// constant_time_less_or_eq returns 1 if x <= y and 0 otherwise. Its behavior is undefined if x or
/// y are negative or > 2**31 - 1.
pub fn constant_time_less_or_eq(x: isize, y: isize) -> isize {
let (x32, y32) = (x as i32, y as i32);
(((x32 - y32).wrapping_sub(1) >> 31) & 1) as isize
}
/// constant_time_select returns x if v == 1 and y if v == 0. Its behavior is undefined if v takes
/// any other value.
pub fn constant_time_select(v: isize, x: isize, y: isize) -> isize {
!(v - 1) & x | (v - 1) & y
}
|
mod class;
fn main() {
let args: Vec<String> = std::env::args().collect();
let class_file = args.get(1).expect("Must specify class file");
let data = &std::fs::read(class_file).expect("Unable to read file");
let c = class::parse(data);
c.print();
}
|
//! An analog of a Python String.
//!
//! To return to Python you must use ```into_raw``` method and return a raw pointer.
//! You can create them using the ```from``` trait method, from both ```&str``` and ```String```.
//!
//! # Safety
//! When passed from Python you can convert from PyString to an owned string
//! (```from_ptr_into_string``` method) or to a ```&str``` slice (to_str method), or
//! to a PyString reference (```from_ptr``` method). Those operations are unsafe
//! as they require dereferencing a raw pointer.
//!
//! # Examples
//!
//! ```
//! use rustypy::PyString;
//! let pystr = PyString::from("Hello world!");
//!
//! // prepare to return to Python:
//! let ptr = pystr.into_raw();
//! // convert from raw pointer to an owned String
//! let rust_string = unsafe { PyString::from_ptr_to_string(ptr) };
//! ```
use libc::c_char;
use std::ffi::CString;
use std::convert::From;
use std::fmt;
/// An analog of a Python string.
///
/// Read the [module docs](index.html) for more information.
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct PyString {
_inner: CString,
}
impl PyString {
/// Get a PyString from a previously boxed raw pointer.
///
/// # Safety
/// Ensure that the passed ptr is valid
pub unsafe fn from_ptr(ptr: *mut PyString) -> PyString {
if ptr.is_null() {
panic!("trying to deref a null ptr!");
}
*Box::from_raw(ptr)
}
/// Constructs an owned String from a raw pointer.
///
/// # Safety
/// Ensure that the passed ptr is valid
pub unsafe fn from_ptr_to_string(ptr: *mut PyString) -> String {
if ptr.is_null() {
panic!("trying to deref a null ptr!");
}
let pystr = *(Box::from_raw(ptr));
String::from(pystr._inner.to_str().unwrap())
}
/// Returns PyString as a raw pointer. Use this whenever you want to return
/// a PyString to Python.
pub fn into_raw(self) -> *mut PyString {
Box::into_raw(Box::new(self))
}
/// Return a PyString from a raw char pointer.
pub unsafe fn from_raw(ptr: *const c_char) -> PyString {
PyString {
_inner: CStr::from_ptr(ptr).to_owned(),
}
}
}
impl fmt::Display for PyString {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", String::from(self._inner.to_str().unwrap()))
}
}
impl<'a> From<&'a str> for PyString {
/// Copies a string slice to a PyString.
fn from(s: &'a str) -> PyString {
PyString {
_inner: CString::new(s).unwrap(),
}
}
}
impl From<String> for PyString {
/// Copies a String to a PyString.
fn from(s: String) -> PyString {
PyString {
_inner: CString::new(s).unwrap(),
}
}
}
impl From<PyString> for String {
fn from(s: PyString) -> String {
s.to_string()
}
}
/// Destructs the PyString, mostly to be used from Python.
#[doc(hidden)]
#[no_mangle]
pub unsafe extern "C" fn pystring_free(ptr: *mut PyString) {
if ptr.is_null() {
return;
}
Box::from_raw(ptr);
}
use std::ffi::CStr;
/// Creates a PyString wrapper from a raw c_char pointer
#[doc(hidden)]
#[no_mangle]
pub unsafe extern "C" fn pystring_new(ptr: *const c_char) -> *mut PyString {
let pystr = PyString {
_inner: CStr::from_ptr(ptr).to_owned(),
};
pystr.into_raw()
}
/// Consumes the wrapper and returns a raw c_char pointer. Afterwards is not necessary
/// to destruct it as it has already been consumed.
#[doc(hidden)]
#[no_mangle]
pub unsafe extern "C" fn pystring_get_str(ptr: *mut PyString) -> *const c_char {
let pystr: PyString = PyString::from_ptr(ptr);
pystr._inner.into_raw()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn pystring_operations() {
let source = "test string";
let owned_pystr = PyString::from(source).into_raw();
let back_from_py = unsafe { PyString::from_ptr_to_string(owned_pystr) };
assert_eq!(back_from_py, "test string");
{
String::from(source);
}
}
}
|
use anyhow::{anyhow, Result};
use futures::{StreamExt, TryStreamExt};
use imagepullsecret_sync::config::{Config, RegistryAuth};
use k8s_openapi::api::core::v1::{LocalObjectReference, Namespace, Secret, ServiceAccount};
use kube::{
api::{ListParams, Meta, PatchParams, PostParams},
Api, Client,
};
use kube_runtime::watcher;
use serde_json::json;
#[derive(Clone)]
pub struct SyncWorker<'a> {
cfg_ns: &'a str,
cfg_name: &'a str,
cfg_data_key: &'a str,
sa_name: &'a str,
client: Client,
}
impl<'a> SyncWorker<'a> {
pub fn new(client: Client, cfg_ns: &'a str, cfg_name: &'a str) -> Self {
SyncWorker {
client,
cfg_ns,
cfg_name,
cfg_data_key: "registry_secrets", // todo
sa_name: "default", // todo
}
}
async fn ensure(&self, all_ns: Vec<String>, configs: Vec<Config>) {
for ns in all_ns.iter() {
for cfg in configs.iter() {
match self.ensure_registry_secret(ns, cfg).await {
Ok(skip) => {
if !skip {
if let Err(e) = self.ensure_patch_sa(ns, &cfg.server).await {
warn!("patch '{}/{}' to default err: {}", ns, cfg.server, e);
}
}
}
Err(e) => {
info!("ensure '{}/{}' registry_secret err: {}", ns, cfg.server, e);
}
}
}
}
}
pub async fn watch_ns(&self) -> Result<()> {
info!("watching all active ns ...");
let ns_api = Api::<Namespace>::all(self.client.clone());
let lp = ListParams::default().fields("status.phase=Active");
let mut w = watcher(ns_api, lp).boxed();
while let Some(event) = w.try_next().await? {
match event {
watcher::Event::Applied(ns) => match self.read_config().await {
Ok(configs) => {
let all_ns = Vec::from(vec![ns.name()]);
self.ensure(all_ns, configs).await;
}
Err(e) => {
error!("applied ns {}, but read_config err: {}", ns.name(), e);
}
},
watcher::Event::Restarted(nss) => {
// if read config err stop watch
let configs = self.read_config().await?;
let all_ns = nss.iter().map(|ns| ns.name()).collect();
self.ensure(all_ns, configs).await;
}
_ => {}
}
}
Ok(())
}
pub async fn watch_cfg_secret(&self) -> Result<()> {
info!("watching secret '{}/{}' ...", self.cfg_ns, self.cfg_name);
let secret_api = Api::<Secret>::namespaced(self.client.clone(), self.cfg_ns);
let lp = ListParams::default().fields(&format!("metadata.name={}", self.cfg_name));
let mut w = watcher(secret_api, lp).boxed();
while let Some(event) = w.try_next().await? {
match event {
watcher::Event::Applied(s) => match self.read_data(s).await {
Ok(configs) => match self.get_all_ns().await {
Ok(all_ns) => self.ensure(all_ns, configs).await,
Err(e) => error!("get all ns err: {}", e),
},
Err(e) => {
error!("applied {} cfg, but read_data err: {}", self.cfg_name, e);
}
},
_ => {}
}
}
Ok(())
}
async fn get_all_ns(&self) -> Result<Vec<String>> {
let ns_api = Api::<Namespace>::all(self.client.clone());
let lp = ListParams::default().fields("status.phase=Active");
let all_ns = ns_api.list(&lp).await?;
Ok(all_ns.items.iter().map(|item| item.name()).collect())
}
async fn ensure_registry_secret(&self, ns: &str, cfg: &Config) -> Result<bool> {
if !cfg.namespaces.contains(&format!("*")) && !cfg.namespaces.contains(&format!("{}", ns)) {
info!("secret '{}' don't need sync to ns '{}'", cfg.server, ns);
return Ok(true);
}
let auth = RegistryAuth::new(
cfg.username.clone(),
cfg.password.clone(),
cfg.server.clone(),
);
let key = ".dockerconfigjson";
let secret_api = Api::<Secret>::namespaced(self.client.clone(), self.cfg_ns);
match secret_api.get(&cfg.server).await {
Ok(s) => {
if let Some(map) = s.data {
if let Some(data) = map.get(key) {
if base64::encode(&data.0) != auth.base64_encode() {
let js = json!({ "data": {key: auth.base64_encode() } });
let p = serde_json::to_vec(&js)?;
let pp = PatchParams::default();
secret_api.patch(&cfg.server, &pp, p).await?;
}
} else {
warn!("not found {} in map", key);
}
} else {
warn!("secret's data field is None");
}
}
Err(kube::Error::Api(e)) => {
if e.code == 404 {
let s: Secret = serde_json::from_value(json!({
"apiVersion": "v1",
"data": {
".dockerconfigjson": auth.base64_encode(),
},
"kind": "Secret",
"metadata": {
"name": cfg.server,
"namespace": ns,
},
"type": "kubernetes.io/dockerconfigjson"
}
))?;
let pp = PostParams::default();
secret_api.create(&pp, &s).await?;
}
}
Err(e) => return Err(anyhow!("query {} err: {}", cfg.server, e)),
}
Ok(false)
}
async fn ensure_patch_sa(&self, ns: &str, secret_name: &str) -> Result<()> {
let sa_api = Api::<ServiceAccount>::namespaced(self.client.clone(), ns);
let mut found = false;
let mut new_secrets: Vec<LocalObjectReference> = Vec::new();
match sa_api.get(self.sa_name).await {
Ok(sa) => {
if let Some(ipss) = sa.image_pull_secrets {
for item in ipss {
if item.name == Some(String::from(secret_name)) {
found = true
}
new_secrets.push(item);
}
}
}
Err(e) => return Err(anyhow!("get {}/default sa err: {}", ns, e)),
}
if !found {
let p = serde_json::to_vec(&json!({ "imagePullSecrets": new_secrets }))?;
let pp = PatchParams::default();
sa_api.patch(self.sa_name, &pp, p).await?;
}
Ok(())
}
async fn read_config(&self) -> Result<Vec<Config>> {
let secret_api = Api::<Secret>::namespaced(self.client.clone(), self.cfg_ns);
let secret = secret_api.get(self.cfg_name).await?;
self.read_data(secret).await
}
async fn read_data(&self, secret: Secret) -> Result<Vec<Config>> {
match secret.data {
Some(map) => match map.get(self.cfg_data_key) {
Some(byte_str) => {
let configs: Vec<Config> = serde_yaml::from_slice(&byte_str.0)?;
return Ok(configs);
}
None => Err(anyhow!("read secret data field {} err", self.cfg_data_key)),
},
None => Err(anyhow!("read secret data err")),
}
}
}
|
fn main() {
let mut s = String::from("hello world");
//let word = first_word(&s);
let word = first_word_slice(&s);
s.clear();
println!("the first word is {}", word);
}
// without slices
fn first_word(s: &String) -> usize {
let bytes = s.as_bytes(); //array of bytes
for (i, &item) in bytes.iter().enumerate() {
if item == b' ' {
println!("index is {}", i);
return i;
}
}
// If we find a space, we return the position.
// Otherwise, return the length of the string by using s.len():
println!("length of word is {}", s.len());
s.len()
}
// String Slices
fn first_word_slice(s: &str) -> &str {
let bytes = s.as_bytes();
for (i, &item) in bytes.iter().enumerate() {
if item == b' ' {
return &s[0..i];
}
}
&s[..]
}
|
use std::collections::HashMap;
pub struct Dictionary {
hash_map: HashMap<String, i32>,
}
impl Dictionary {
pub fn new(&self) -> Dictionary {
Dictionary {
hash_map: Default::default(),
}
}
pub fn add_key(&mut self, key: &str, number: i32) {
if !self.hash_map.contains_key(key) {
self.hash_map.insert(key.to_string(), number);
} else {
panic!("Key already exists")
}
}
pub fn delete_key(&mut self, key: &str) {
if !self.hash_map.contains_key(key) {
self.hash_map.remove(key);
} else {
panic!("Key Does not exist")
}
}
// gets a value from the dictionary or returns an error is unsuccessful.
pub fn get_value(&self, value: &str) -> Option<&i32> {
return self.hash_map.get(value);
}
}
|
/// An enum to represent all characters in the Coptic block.
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
pub enum Coptic {
/// \u{2c80}: 'Ⲁ'
CapitalLetterAlfa,
/// \u{2c81}: 'ⲁ'
SmallLetterAlfa,
/// \u{2c82}: 'Ⲃ'
CapitalLetterVida,
/// \u{2c83}: 'ⲃ'
SmallLetterVida,
/// \u{2c84}: 'Ⲅ'
CapitalLetterGamma,
/// \u{2c85}: 'ⲅ'
SmallLetterGamma,
/// \u{2c86}: 'Ⲇ'
CapitalLetterDalda,
/// \u{2c87}: 'ⲇ'
SmallLetterDalda,
/// \u{2c88}: 'Ⲉ'
CapitalLetterEie,
/// \u{2c89}: 'ⲉ'
SmallLetterEie,
/// \u{2c8a}: 'Ⲋ'
CapitalLetterSou,
/// \u{2c8b}: 'ⲋ'
SmallLetterSou,
/// \u{2c8c}: 'Ⲍ'
CapitalLetterZata,
/// \u{2c8d}: 'ⲍ'
SmallLetterZata,
/// \u{2c8e}: 'Ⲏ'
CapitalLetterHate,
/// \u{2c8f}: 'ⲏ'
SmallLetterHate,
/// \u{2c90}: 'Ⲑ'
CapitalLetterThethe,
/// \u{2c91}: 'ⲑ'
SmallLetterThethe,
/// \u{2c92}: 'Ⲓ'
CapitalLetterIauda,
/// \u{2c93}: 'ⲓ'
SmallLetterIauda,
/// \u{2c94}: 'Ⲕ'
CapitalLetterKapa,
/// \u{2c95}: 'ⲕ'
SmallLetterKapa,
/// \u{2c96}: 'Ⲗ'
CapitalLetterLaula,
/// \u{2c97}: 'ⲗ'
SmallLetterLaula,
/// \u{2c98}: 'Ⲙ'
CapitalLetterMi,
/// \u{2c99}: 'ⲙ'
SmallLetterMi,
/// \u{2c9a}: 'Ⲛ'
CapitalLetterNi,
/// \u{2c9b}: 'ⲛ'
SmallLetterNi,
/// \u{2c9c}: 'Ⲝ'
CapitalLetterKsi,
/// \u{2c9d}: 'ⲝ'
SmallLetterKsi,
/// \u{2c9e}: 'Ⲟ'
CapitalLetterO,
/// \u{2c9f}: 'ⲟ'
SmallLetterO,
/// \u{2ca0}: 'Ⲡ'
CapitalLetterPi,
/// \u{2ca1}: 'ⲡ'
SmallLetterPi,
/// \u{2ca2}: 'Ⲣ'
CapitalLetterRo,
/// \u{2ca3}: 'ⲣ'
SmallLetterRo,
/// \u{2ca4}: 'Ⲥ'
CapitalLetterSima,
/// \u{2ca5}: 'ⲥ'
SmallLetterSima,
/// \u{2ca6}: 'Ⲧ'
CapitalLetterTau,
/// \u{2ca7}: 'ⲧ'
SmallLetterTau,
/// \u{2ca8}: 'Ⲩ'
CapitalLetterUa,
/// \u{2ca9}: 'ⲩ'
SmallLetterUa,
/// \u{2caa}: 'Ⲫ'
CapitalLetterFi,
/// \u{2cab}: 'ⲫ'
SmallLetterFi,
/// \u{2cac}: 'Ⲭ'
CapitalLetterKhi,
/// \u{2cad}: 'ⲭ'
SmallLetterKhi,
/// \u{2cae}: 'Ⲯ'
CapitalLetterPsi,
/// \u{2caf}: 'ⲯ'
SmallLetterPsi,
/// \u{2cb0}: 'Ⲱ'
CapitalLetterOou,
/// \u{2cb1}: 'ⲱ'
SmallLetterOou,
/// \u{2cb2}: 'Ⲳ'
CapitalLetterDialectDashPAlef,
/// \u{2cb3}: 'ⲳ'
SmallLetterDialectDashPAlef,
/// \u{2cb4}: 'Ⲵ'
CapitalLetterOldAin,
/// \u{2cb5}: 'ⲵ'
SmallLetterOldAin,
/// \u{2cb6}: 'Ⲷ'
CapitalLetterCryptogrammicEie,
/// \u{2cb7}: 'ⲷ'
SmallLetterCryptogrammicEie,
/// \u{2cb8}: 'Ⲹ'
CapitalLetterDialectDashPKapa,
/// \u{2cb9}: 'ⲹ'
SmallLetterDialectDashPKapa,
/// \u{2cba}: 'Ⲻ'
CapitalLetterDialectDashPNi,
/// \u{2cbb}: 'ⲻ'
SmallLetterDialectDashPNi,
/// \u{2cbc}: 'Ⲽ'
CapitalLetterCryptogrammicNi,
/// \u{2cbd}: 'ⲽ'
SmallLetterCryptogrammicNi,
/// \u{2cbe}: 'Ⲿ'
CapitalLetterOldOou,
/// \u{2cbf}: 'ⲿ'
SmallLetterOldOou,
/// \u{2cc0}: 'Ⳁ'
CapitalLetterSampi,
/// \u{2cc1}: 'ⳁ'
SmallLetterSampi,
/// \u{2cc2}: 'Ⳃ'
CapitalLetterCrossedShei,
/// \u{2cc3}: 'ⳃ'
SmallLetterCrossedShei,
/// \u{2cc4}: 'Ⳅ'
CapitalLetterOldShei,
/// \u{2cc5}: 'ⳅ'
SmallLetterOldShei,
/// \u{2cc6}: 'Ⳇ'
CapitalLetterOldEsh,
/// \u{2cc7}: 'ⳇ'
SmallLetterOldEsh,
/// \u{2cc8}: 'Ⳉ'
CapitalLetterAkhmimicKhei,
/// \u{2cc9}: 'ⳉ'
SmallLetterAkhmimicKhei,
/// \u{2cca}: 'Ⳋ'
CapitalLetterDialectDashPHori,
/// \u{2ccb}: 'ⳋ'
SmallLetterDialectDashPHori,
/// \u{2ccc}: 'Ⳍ'
CapitalLetterOldHori,
/// \u{2ccd}: 'ⳍ'
SmallLetterOldHori,
/// \u{2cce}: 'Ⳏ'
CapitalLetterOldHa,
/// \u{2ccf}: 'ⳏ'
SmallLetterOldHa,
/// \u{2cd0}: 'Ⳑ'
CapitalLetterLDashShapedHa,
/// \u{2cd1}: 'ⳑ'
SmallLetterLDashShapedHa,
/// \u{2cd2}: 'Ⳓ'
CapitalLetterOldHei,
/// \u{2cd3}: 'ⳓ'
SmallLetterOldHei,
/// \u{2cd4}: 'Ⳕ'
CapitalLetterOldHat,
/// \u{2cd5}: 'ⳕ'
SmallLetterOldHat,
/// \u{2cd6}: 'Ⳗ'
CapitalLetterOldGangia,
/// \u{2cd7}: 'ⳗ'
SmallLetterOldGangia,
/// \u{2cd8}: 'Ⳙ'
CapitalLetterOldDja,
/// \u{2cd9}: 'ⳙ'
SmallLetterOldDja,
/// \u{2cda}: 'Ⳛ'
CapitalLetterOldShima,
/// \u{2cdb}: 'ⳛ'
SmallLetterOldShima,
/// \u{2cdc}: 'Ⳝ'
CapitalLetterOldNubianShima,
/// \u{2cdd}: 'ⳝ'
SmallLetterOldNubianShima,
/// \u{2cde}: 'Ⳟ'
CapitalLetterOldNubianNgi,
/// \u{2cdf}: 'ⳟ'
SmallLetterOldNubianNgi,
/// \u{2ce0}: 'Ⳡ'
CapitalLetterOldNubianNyi,
/// \u{2ce1}: 'ⳡ'
SmallLetterOldNubianNyi,
/// \u{2ce2}: 'Ⳣ'
CapitalLetterOldNubianWau,
/// \u{2ce3}: 'ⳣ'
SmallLetterOldNubianWau,
/// \u{2ce4}: 'ⳤ'
SymbolKai,
/// \u{2ce5}: '⳥'
SymbolMiRo,
/// \u{2ce6}: '⳦'
SymbolPiRo,
/// \u{2ce7}: '⳧'
SymbolStauros,
/// \u{2ce8}: '⳨'
SymbolTauRo,
/// \u{2ce9}: '⳩'
SymbolKhiRo,
/// \u{2cea}: '⳪'
SymbolShimaSima,
/// \u{2ceb}: 'Ⳬ'
CapitalLetterCryptogrammicShei,
/// \u{2cec}: 'ⳬ'
SmallLetterCryptogrammicShei,
/// \u{2ced}: 'Ⳮ'
CapitalLetterCryptogrammicGangia,
/// \u{2cee}: 'ⳮ'
SmallLetterCryptogrammicGangia,
/// \u{2cef}: '⳯'
CombiningNiAbove,
/// \u{2cf0}: '⳰'
CombiningSpiritusAsper,
/// \u{2cf1}: '⳱'
CombiningSpiritusLenis,
/// \u{2cf2}: 'Ⳳ'
CapitalLetterBohairicKhei,
/// \u{2cf3}: 'ⳳ'
SmallLetterBohairicKhei,
/// \u{2cf9}: '⳹'
OldNubianFullStop,
/// \u{2cfa}: '⳺'
OldNubianDirectQuestionMark,
/// \u{2cfb}: '⳻'
OldNubianIndirectQuestionMark,
/// \u{2cfc}: '⳼'
OldNubianVerseDivider,
/// \u{2cfd}: '⳽'
FractionOneHalf,
/// \u{2cfe}: '⳾'
FullStop,
}
impl Into<char> for Coptic {
fn into(self) -> char {
match self {
Coptic::CapitalLetterAlfa => 'Ⲁ',
Coptic::SmallLetterAlfa => 'ⲁ',
Coptic::CapitalLetterVida => 'Ⲃ',
Coptic::SmallLetterVida => 'ⲃ',
Coptic::CapitalLetterGamma => 'Ⲅ',
Coptic::SmallLetterGamma => 'ⲅ',
Coptic::CapitalLetterDalda => 'Ⲇ',
Coptic::SmallLetterDalda => 'ⲇ',
Coptic::CapitalLetterEie => 'Ⲉ',
Coptic::SmallLetterEie => 'ⲉ',
Coptic::CapitalLetterSou => 'Ⲋ',
Coptic::SmallLetterSou => 'ⲋ',
Coptic::CapitalLetterZata => 'Ⲍ',
Coptic::SmallLetterZata => 'ⲍ',
Coptic::CapitalLetterHate => 'Ⲏ',
Coptic::SmallLetterHate => 'ⲏ',
Coptic::CapitalLetterThethe => 'Ⲑ',
Coptic::SmallLetterThethe => 'ⲑ',
Coptic::CapitalLetterIauda => 'Ⲓ',
Coptic::SmallLetterIauda => 'ⲓ',
Coptic::CapitalLetterKapa => 'Ⲕ',
Coptic::SmallLetterKapa => 'ⲕ',
Coptic::CapitalLetterLaula => 'Ⲗ',
Coptic::SmallLetterLaula => 'ⲗ',
Coptic::CapitalLetterMi => 'Ⲙ',
Coptic::SmallLetterMi => 'ⲙ',
Coptic::CapitalLetterNi => 'Ⲛ',
Coptic::SmallLetterNi => 'ⲛ',
Coptic::CapitalLetterKsi => 'Ⲝ',
Coptic::SmallLetterKsi => 'ⲝ',
Coptic::CapitalLetterO => 'Ⲟ',
Coptic::SmallLetterO => 'ⲟ',
Coptic::CapitalLetterPi => 'Ⲡ',
Coptic::SmallLetterPi => 'ⲡ',
Coptic::CapitalLetterRo => 'Ⲣ',
Coptic::SmallLetterRo => 'ⲣ',
Coptic::CapitalLetterSima => 'Ⲥ',
Coptic::SmallLetterSima => 'ⲥ',
Coptic::CapitalLetterTau => 'Ⲧ',
Coptic::SmallLetterTau => 'ⲧ',
Coptic::CapitalLetterUa => 'Ⲩ',
Coptic::SmallLetterUa => 'ⲩ',
Coptic::CapitalLetterFi => 'Ⲫ',
Coptic::SmallLetterFi => 'ⲫ',
Coptic::CapitalLetterKhi => 'Ⲭ',
Coptic::SmallLetterKhi => 'ⲭ',
Coptic::CapitalLetterPsi => 'Ⲯ',
Coptic::SmallLetterPsi => 'ⲯ',
Coptic::CapitalLetterOou => 'Ⲱ',
Coptic::SmallLetterOou => 'ⲱ',
Coptic::CapitalLetterDialectDashPAlef => 'Ⲳ',
Coptic::SmallLetterDialectDashPAlef => 'ⲳ',
Coptic::CapitalLetterOldAin => 'Ⲵ',
Coptic::SmallLetterOldAin => 'ⲵ',
Coptic::CapitalLetterCryptogrammicEie => 'Ⲷ',
Coptic::SmallLetterCryptogrammicEie => 'ⲷ',
Coptic::CapitalLetterDialectDashPKapa => 'Ⲹ',
Coptic::SmallLetterDialectDashPKapa => 'ⲹ',
Coptic::CapitalLetterDialectDashPNi => 'Ⲻ',
Coptic::SmallLetterDialectDashPNi => 'ⲻ',
Coptic::CapitalLetterCryptogrammicNi => 'Ⲽ',
Coptic::SmallLetterCryptogrammicNi => 'ⲽ',
Coptic::CapitalLetterOldOou => 'Ⲿ',
Coptic::SmallLetterOldOou => 'ⲿ',
Coptic::CapitalLetterSampi => 'Ⳁ',
Coptic::SmallLetterSampi => 'ⳁ',
Coptic::CapitalLetterCrossedShei => 'Ⳃ',
Coptic::SmallLetterCrossedShei => 'ⳃ',
Coptic::CapitalLetterOldShei => 'Ⳅ',
Coptic::SmallLetterOldShei => 'ⳅ',
Coptic::CapitalLetterOldEsh => 'Ⳇ',
Coptic::SmallLetterOldEsh => 'ⳇ',
Coptic::CapitalLetterAkhmimicKhei => 'Ⳉ',
Coptic::SmallLetterAkhmimicKhei => 'ⳉ',
Coptic::CapitalLetterDialectDashPHori => 'Ⳋ',
Coptic::SmallLetterDialectDashPHori => 'ⳋ',
Coptic::CapitalLetterOldHori => 'Ⳍ',
Coptic::SmallLetterOldHori => 'ⳍ',
Coptic::CapitalLetterOldHa => 'Ⳏ',
Coptic::SmallLetterOldHa => 'ⳏ',
Coptic::CapitalLetterLDashShapedHa => 'Ⳑ',
Coptic::SmallLetterLDashShapedHa => 'ⳑ',
Coptic::CapitalLetterOldHei => 'Ⳓ',
Coptic::SmallLetterOldHei => 'ⳓ',
Coptic::CapitalLetterOldHat => 'Ⳕ',
Coptic::SmallLetterOldHat => 'ⳕ',
Coptic::CapitalLetterOldGangia => 'Ⳗ',
Coptic::SmallLetterOldGangia => 'ⳗ',
Coptic::CapitalLetterOldDja => 'Ⳙ',
Coptic::SmallLetterOldDja => 'ⳙ',
Coptic::CapitalLetterOldShima => 'Ⳛ',
Coptic::SmallLetterOldShima => 'ⳛ',
Coptic::CapitalLetterOldNubianShima => 'Ⳝ',
Coptic::SmallLetterOldNubianShima => 'ⳝ',
Coptic::CapitalLetterOldNubianNgi => 'Ⳟ',
Coptic::SmallLetterOldNubianNgi => 'ⳟ',
Coptic::CapitalLetterOldNubianNyi => 'Ⳡ',
Coptic::SmallLetterOldNubianNyi => 'ⳡ',
Coptic::CapitalLetterOldNubianWau => 'Ⳣ',
Coptic::SmallLetterOldNubianWau => 'ⳣ',
Coptic::SymbolKai => 'ⳤ',
Coptic::SymbolMiRo => '⳥',
Coptic::SymbolPiRo => '⳦',
Coptic::SymbolStauros => '⳧',
Coptic::SymbolTauRo => '⳨',
Coptic::SymbolKhiRo => '⳩',
Coptic::SymbolShimaSima => '⳪',
Coptic::CapitalLetterCryptogrammicShei => 'Ⳬ',
Coptic::SmallLetterCryptogrammicShei => 'ⳬ',
Coptic::CapitalLetterCryptogrammicGangia => 'Ⳮ',
Coptic::SmallLetterCryptogrammicGangia => 'ⳮ',
Coptic::CombiningNiAbove => '⳯',
Coptic::CombiningSpiritusAsper => '⳰',
Coptic::CombiningSpiritusLenis => '⳱',
Coptic::CapitalLetterBohairicKhei => 'Ⳳ',
Coptic::SmallLetterBohairicKhei => 'ⳳ',
Coptic::OldNubianFullStop => '⳹',
Coptic::OldNubianDirectQuestionMark => '⳺',
Coptic::OldNubianIndirectQuestionMark => '⳻',
Coptic::OldNubianVerseDivider => '⳼',
Coptic::FractionOneHalf => '⳽',
Coptic::FullStop => '⳾',
}
}
}
impl std::convert::TryFrom<char> for Coptic {
type Error = ();
fn try_from(c: char) -> Result<Self, Self::Error> {
match c {
'Ⲁ' => Ok(Coptic::CapitalLetterAlfa),
'ⲁ' => Ok(Coptic::SmallLetterAlfa),
'Ⲃ' => Ok(Coptic::CapitalLetterVida),
'ⲃ' => Ok(Coptic::SmallLetterVida),
'Ⲅ' => Ok(Coptic::CapitalLetterGamma),
'ⲅ' => Ok(Coptic::SmallLetterGamma),
'Ⲇ' => Ok(Coptic::CapitalLetterDalda),
'ⲇ' => Ok(Coptic::SmallLetterDalda),
'Ⲉ' => Ok(Coptic::CapitalLetterEie),
'ⲉ' => Ok(Coptic::SmallLetterEie),
'Ⲋ' => Ok(Coptic::CapitalLetterSou),
'ⲋ' => Ok(Coptic::SmallLetterSou),
'Ⲍ' => Ok(Coptic::CapitalLetterZata),
'ⲍ' => Ok(Coptic::SmallLetterZata),
'Ⲏ' => Ok(Coptic::CapitalLetterHate),
'ⲏ' => Ok(Coptic::SmallLetterHate),
'Ⲑ' => Ok(Coptic::CapitalLetterThethe),
'ⲑ' => Ok(Coptic::SmallLetterThethe),
'Ⲓ' => Ok(Coptic::CapitalLetterIauda),
'ⲓ' => Ok(Coptic::SmallLetterIauda),
'Ⲕ' => Ok(Coptic::CapitalLetterKapa),
'ⲕ' => Ok(Coptic::SmallLetterKapa),
'Ⲗ' => Ok(Coptic::CapitalLetterLaula),
'ⲗ' => Ok(Coptic::SmallLetterLaula),
'Ⲙ' => Ok(Coptic::CapitalLetterMi),
'ⲙ' => Ok(Coptic::SmallLetterMi),
'Ⲛ' => Ok(Coptic::CapitalLetterNi),
'ⲛ' => Ok(Coptic::SmallLetterNi),
'Ⲝ' => Ok(Coptic::CapitalLetterKsi),
'ⲝ' => Ok(Coptic::SmallLetterKsi),
'Ⲟ' => Ok(Coptic::CapitalLetterO),
'ⲟ' => Ok(Coptic::SmallLetterO),
'Ⲡ' => Ok(Coptic::CapitalLetterPi),
'ⲡ' => Ok(Coptic::SmallLetterPi),
'Ⲣ' => Ok(Coptic::CapitalLetterRo),
'ⲣ' => Ok(Coptic::SmallLetterRo),
'Ⲥ' => Ok(Coptic::CapitalLetterSima),
'ⲥ' => Ok(Coptic::SmallLetterSima),
'Ⲧ' => Ok(Coptic::CapitalLetterTau),
'ⲧ' => Ok(Coptic::SmallLetterTau),
'Ⲩ' => Ok(Coptic::CapitalLetterUa),
'ⲩ' => Ok(Coptic::SmallLetterUa),
'Ⲫ' => Ok(Coptic::CapitalLetterFi),
'ⲫ' => Ok(Coptic::SmallLetterFi),
'Ⲭ' => Ok(Coptic::CapitalLetterKhi),
'ⲭ' => Ok(Coptic::SmallLetterKhi),
'Ⲯ' => Ok(Coptic::CapitalLetterPsi),
'ⲯ' => Ok(Coptic::SmallLetterPsi),
'Ⲱ' => Ok(Coptic::CapitalLetterOou),
'ⲱ' => Ok(Coptic::SmallLetterOou),
'Ⲳ' => Ok(Coptic::CapitalLetterDialectDashPAlef),
'ⲳ' => Ok(Coptic::SmallLetterDialectDashPAlef),
'Ⲵ' => Ok(Coptic::CapitalLetterOldAin),
'ⲵ' => Ok(Coptic::SmallLetterOldAin),
'Ⲷ' => Ok(Coptic::CapitalLetterCryptogrammicEie),
'ⲷ' => Ok(Coptic::SmallLetterCryptogrammicEie),
'Ⲹ' => Ok(Coptic::CapitalLetterDialectDashPKapa),
'ⲹ' => Ok(Coptic::SmallLetterDialectDashPKapa),
'Ⲻ' => Ok(Coptic::CapitalLetterDialectDashPNi),
'ⲻ' => Ok(Coptic::SmallLetterDialectDashPNi),
'Ⲽ' => Ok(Coptic::CapitalLetterCryptogrammicNi),
'ⲽ' => Ok(Coptic::SmallLetterCryptogrammicNi),
'Ⲿ' => Ok(Coptic::CapitalLetterOldOou),
'ⲿ' => Ok(Coptic::SmallLetterOldOou),
'Ⳁ' => Ok(Coptic::CapitalLetterSampi),
'ⳁ' => Ok(Coptic::SmallLetterSampi),
'Ⳃ' => Ok(Coptic::CapitalLetterCrossedShei),
'ⳃ' => Ok(Coptic::SmallLetterCrossedShei),
'Ⳅ' => Ok(Coptic::CapitalLetterOldShei),
'ⳅ' => Ok(Coptic::SmallLetterOldShei),
'Ⳇ' => Ok(Coptic::CapitalLetterOldEsh),
'ⳇ' => Ok(Coptic::SmallLetterOldEsh),
'Ⳉ' => Ok(Coptic::CapitalLetterAkhmimicKhei),
'ⳉ' => Ok(Coptic::SmallLetterAkhmimicKhei),
'Ⳋ' => Ok(Coptic::CapitalLetterDialectDashPHori),
'ⳋ' => Ok(Coptic::SmallLetterDialectDashPHori),
'Ⳍ' => Ok(Coptic::CapitalLetterOldHori),
'ⳍ' => Ok(Coptic::SmallLetterOldHori),
'Ⳏ' => Ok(Coptic::CapitalLetterOldHa),
'ⳏ' => Ok(Coptic::SmallLetterOldHa),
'Ⳑ' => Ok(Coptic::CapitalLetterLDashShapedHa),
'ⳑ' => Ok(Coptic::SmallLetterLDashShapedHa),
'Ⳓ' => Ok(Coptic::CapitalLetterOldHei),
'ⳓ' => Ok(Coptic::SmallLetterOldHei),
'Ⳕ' => Ok(Coptic::CapitalLetterOldHat),
'ⳕ' => Ok(Coptic::SmallLetterOldHat),
'Ⳗ' => Ok(Coptic::CapitalLetterOldGangia),
'ⳗ' => Ok(Coptic::SmallLetterOldGangia),
'Ⳙ' => Ok(Coptic::CapitalLetterOldDja),
'ⳙ' => Ok(Coptic::SmallLetterOldDja),
'Ⳛ' => Ok(Coptic::CapitalLetterOldShima),
'ⳛ' => Ok(Coptic::SmallLetterOldShima),
'Ⳝ' => Ok(Coptic::CapitalLetterOldNubianShima),
'ⳝ' => Ok(Coptic::SmallLetterOldNubianShima),
'Ⳟ' => Ok(Coptic::CapitalLetterOldNubianNgi),
'ⳟ' => Ok(Coptic::SmallLetterOldNubianNgi),
'Ⳡ' => Ok(Coptic::CapitalLetterOldNubianNyi),
'ⳡ' => Ok(Coptic::SmallLetterOldNubianNyi),
'Ⳣ' => Ok(Coptic::CapitalLetterOldNubianWau),
'ⳣ' => Ok(Coptic::SmallLetterOldNubianWau),
'ⳤ' => Ok(Coptic::SymbolKai),
'⳥' => Ok(Coptic::SymbolMiRo),
'⳦' => Ok(Coptic::SymbolPiRo),
'⳧' => Ok(Coptic::SymbolStauros),
'⳨' => Ok(Coptic::SymbolTauRo),
'⳩' => Ok(Coptic::SymbolKhiRo),
'⳪' => Ok(Coptic::SymbolShimaSima),
'Ⳬ' => Ok(Coptic::CapitalLetterCryptogrammicShei),
'ⳬ' => Ok(Coptic::SmallLetterCryptogrammicShei),
'Ⳮ' => Ok(Coptic::CapitalLetterCryptogrammicGangia),
'ⳮ' => Ok(Coptic::SmallLetterCryptogrammicGangia),
'⳯' => Ok(Coptic::CombiningNiAbove),
'⳰' => Ok(Coptic::CombiningSpiritusAsper),
'⳱' => Ok(Coptic::CombiningSpiritusLenis),
'Ⳳ' => Ok(Coptic::CapitalLetterBohairicKhei),
'ⳳ' => Ok(Coptic::SmallLetterBohairicKhei),
'⳹' => Ok(Coptic::OldNubianFullStop),
'⳺' => Ok(Coptic::OldNubianDirectQuestionMark),
'⳻' => Ok(Coptic::OldNubianIndirectQuestionMark),
'⳼' => Ok(Coptic::OldNubianVerseDivider),
'⳽' => Ok(Coptic::FractionOneHalf),
'⳾' => Ok(Coptic::FullStop),
_ => Err(()),
}
}
}
impl Into<u32> for Coptic {
fn into(self) -> u32 {
let c: char = self.into();
let hex = c
.escape_unicode()
.to_string()
.replace("\\u{", "")
.replace("}", "");
u32::from_str_radix(&hex, 16).unwrap()
}
}
impl std::convert::TryFrom<u32> for Coptic {
type Error = ();
fn try_from(u: u32) -> Result<Self, Self::Error> {
if let Ok(c) = char::try_from(u) {
Self::try_from(c)
} else {
Err(())
}
}
}
impl Iterator for Coptic {
type Item = Self;
fn next(&mut self) -> Option<Self> {
let index: u32 = (*self).into();
use std::convert::TryFrom;
Self::try_from(index + 1).ok()
}
}
impl Coptic {
/// The character with the lowest index in this unicode block
pub fn new() -> Self {
Coptic::CapitalLetterAlfa
}
/// The character's name, in sentence case
pub fn name(&self) -> String {
let s = std::format!("Coptic{:#?}", self);
string_morph::to_sentence_case(&s)
}
}
|
pub mod matrix_test;
pub mod opengl_test;
pub mod quaternion_test;
pub mod ecs_test; |
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/
use reverie_syscalls::LocalMemory;
use reverie_syscalls::Syscall;
use syscalls::syscall;
use syscalls::SyscallArgs;
use syscalls::Sysno;
use super::ffi;
use super::thread;
use super::thread::GuestTransitionErr;
use super::thread::PidTid;
use super::thread::Thread;
use super::tool::Tool;
use super::tool::ToolGlobal;
use super::utils;
use super::vdso;
use crate::signal::guard;
pub const CONTROLLED_EXIT_SIGNAL: libc::c_int = libc::SIGSTKFLT;
/// Implement the thread notifier trait for any global tools
impl<T> thread::EventSink for T
where
T: ToolGlobal,
{
#[inline]
fn on_new_thread(pid_tid: PidTid) {
T::global().on_thread_start(pid_tid.tid);
}
fn on_thread_exit(pid_tid: PidTid) {
T::global().on_thread_exit(pid_tid.tid);
}
}
pub extern "C" fn handle_syscall<T: ToolGlobal>(
syscall: isize,
arg1: usize,
arg2: usize,
arg3: usize,
arg4: usize,
arg5: usize,
arg6: usize,
wrapper_sp: *mut ffi::syscall_stackframe,
) -> usize {
let mut thread = if let Some(thread) = Thread::<T>::current() {
thread
} else {
terminate(1);
};
match handle_syscall_with_thread::<T>(
&mut thread,
syscall,
arg1,
arg2,
arg3,
arg4,
arg5,
arg6,
wrapper_sp,
) {
Ok(return_code) => return_code,
Err(GuestTransitionErr::ExitNow) => terminate(0),
Err(GuestTransitionErr::ExitingElsewhere) => 0,
}
}
/// Handle the critical section for the given system call on the given thread
#[allow(clippy::if_same_then_else)]
fn handle_syscall_with_thread<T: ToolGlobal>(
thread: &mut Thread<T>,
syscall: isize,
arg1: usize,
arg2: usize,
arg3: usize,
arg4: usize,
arg5: usize,
arg6: usize,
wrapper_sp: *mut ffi::syscall_stackframe,
) -> Result<usize, GuestTransitionErr> {
let _guard = guard::enter_signal_exclusion_zone();
thread.leave_guest_execution()?;
let sys_no = Sysno::from(syscall as i32);
let result = if sys_no == Sysno::clone && arg2 != 0 {
thread.maybe_fork_as_guest(|| unsafe {
ffi::clone_syscall(
arg1,
arg2 as *mut libc::c_void,
arg3 as *mut i32,
arg4 as *mut i32,
arg5,
(*wrapper_sp).ret,
)
})?
} else if sys_no == Sysno::clone {
thread.maybe_fork_as_guest(|| {
let args = SyscallArgs::new(arg1, arg2, arg3, arg4, arg5, arg6);
let syscall = Syscall::from_raw(sys_no, args);
T::global()
.syscall(syscall, &LocalMemory::new())
.map_or_else(|e| -e.into_raw() as usize, |x| x as usize)
})?
} else if utils::is_vfork(sys_no, arg1) {
thread.maybe_fork_as_guest(|| unsafe {
let pid = ffi::vfork_syscall();
if pid == 0 {
// Child
// Even though this function doesn't return, this is
// safe because the thread is in `Guest` and that state
// will be correct in the child application when the
// jmp takes it there
ffi::vfork_return_from_child(wrapper_sp)
} else {
// parent
pid
}
})?
} else if sys_no == Sysno::clone3 {
let cl_args = unsafe { &*(arg1 as *const ffi::clone_args) };
if cl_args.stack == 0 {
thread.maybe_fork_as_guest(|| unsafe {
syscall!(sys_no, arg1, arg2, arg3, arg4, arg5, arg6)
.map_or_else(|e| -e.into_raw() as usize, |x| x as usize)
})?
} else {
thread.maybe_fork_as_guest(|| unsafe {
ffi::clone3_syscall(arg1, arg2, arg3, 0, arg5, (*wrapper_sp).ret)
})?
}
} else if sys_no == Sysno::exit {
// intercept the exit_group syscall and signal all the threads to exit
// in a predictable and trackable way
if thread.try_exit() {
terminate(arg1);
}
0
} else if sys_no == Sysno::exit_group {
// intercept the exit_group syscall and signal all the threads to exit
// in a predictable and trackable way
exit_group_with_thread(thread, arg1)
} else {
let args = SyscallArgs::new(arg1, arg2, arg3, arg4, arg5, arg6);
let syscall = Syscall::from_raw(sys_no, args);
thread.execute_as_guest(|| {
T::global()
.syscall(syscall, &LocalMemory::new())
.map_or_else(|e| -e.into_raw() as usize, |x| x as usize)
})?
};
thread.enter_guest_execution()?;
Ok(result)
}
/// Terminate this thread with no notifications
fn terminate(exit_code: usize) -> ! {
unsafe {
syscalls::syscall1(Sysno::exit, exit_code).expect("Exit should succeed");
}
unreachable!("The thread should have ended by now");
}
/// Perform and exit group with the current thread
fn exit_group_with_thread<T: ToolGlobal>(thread: &mut Thread<T>, exit_code: usize) -> usize {
thread.try_exit();
if let Some(exiting_pid) = thread::exit_all(|_, process_and_thread_id| unsafe {
syscalls::syscall3(
Sysno::tgkill,
process_and_thread_id.pid as usize,
process_and_thread_id.tid as usize,
CONTROLLED_EXIT_SIGNAL as usize,
)
.expect("Signaling thread failed");
}) {
if !thread::wait_for_all_to_exit(exiting_pid, T::global().get_exit_timeout()) {
T::global().on_exit_timeout()
} else {
terminate(exit_code)
}
} else {
0
}
}
pub fn exit_group<T: ToolGlobal>(exit_code: usize) -> usize {
if let Some(mut thread) = Thread::<T>::current() {
exit_group_with_thread(&mut thread, exit_code)
} else {
0
}
}
/// If any thread receives the exit signal call, this handler will gracefully
/// exit that thread
pub extern "C" fn handle_exit_signal<T: ToolGlobal>(
_: libc::c_int,
_: *const libc::siginfo_t,
_: *const libc::c_void,
) {
let mut thread = if let Some(thread) = Thread::<T>::current() {
thread
} else {
terminate(0);
};
if thread.try_exit() {
terminate(0);
}
}
extern "C" fn handle_vdso_clock_gettime<T: ToolGlobal>(
clockid: libc::clockid_t,
tp: *mut libc::timespec,
) -> i32 {
T::global().vdso_clock_gettime(clockid, tp)
}
extern "C" fn handle_vdso_getcpu<T: ToolGlobal>(
cpu: *mut u32,
node: *mut u32,
_unused: usize,
) -> i32 {
T::global().vdso_getcpu(cpu, node, _unused)
}
extern "C" fn handle_vdso_gettimeofday<T: ToolGlobal>(
tv: *mut libc::timeval,
tz: *mut libc::timezone,
) -> i32 {
T::global().vdso_gettimeofday(tv, tz)
}
extern "C" fn handle_vdso_time<T: ToolGlobal>(tloc: *mut libc::time_t) -> i32 {
T::global().vdso_time(tloc)
}
pub extern "C" fn handle_vdso<T: ToolGlobal>(
syscall: isize,
actual_fn: ffi::void_void_fn,
) -> Option<ffi::void_void_fn> {
use core::mem::transmute;
unsafe {
match Sysno::from(syscall as i32) {
Sysno::clock_gettime => {
vdso::clock_gettime = transmute(actual_fn as *const ());
transmute(handle_vdso_clock_gettime::<T> as *const ())
}
Sysno::getcpu => {
vdso::getcpu = transmute(actual_fn as *const ());
transmute(handle_vdso_getcpu::<T> as *const ())
}
Sysno::gettimeofday => {
vdso::gettimeofday = transmute(actual_fn as *const ());
transmute(handle_vdso_gettimeofday::<T> as *const ())
}
Sysno::time => {
vdso::time = transmute(actual_fn as *const ());
transmute(handle_vdso_time::<T> as *const ())
}
_ => None,
}
}
}
pub extern "C" fn handle_rdtsc<T: ToolGlobal>() -> u64 {
T::global().rdtsc()
}
|
use std::ops::Range;
use std::ops::RangeTo;
use std::ops::RangeFrom;
use std::ops::RangeFull;
use std::iter::Enumerate;
use nom::*;
use std::str;
use std::str::FromStr;
use std::str::Utf8Error;
#[derive(Debug, Clone, PartialEq)]
pub enum Token {
Comment(String),
Float(f64),
Integer(i64),
Char(char),
String(String),
Bool(bool),
Identifier(String),
Symbol(Symbol),
Keyword(Keyword),
EOF,
Illegal // for error
}
#[derive(Debug, Clone, PartialEq)]
pub enum Keyword {
While,
If,
Else,
Break,
True,
False,
Def,
Continue,
Return
}
#[derive(Debug, Clone, PartialEq)]
pub enum Symbol {
LParenthesis, // (
RParenthesis, // )
LBrace, // {
RBrace, // }
LBracket, // [
RBracket, // ]
LineEnd, // \n
SemiColon, // ;
Assign, // =
Equal, // ==
NotEqual, // !=
GT, // >
GE, // >=
LT, // <
LE, // <=
Plus, // +
Minus, // -
Mult, // *
Div, // /
Mod, // %
Not, // !
Comma // ,
}
#[derive(Debug, Clone, PartialEq, Copy)]
pub struct Tokens<'a> {
pub toks: &'a [Token],
pub start: usize,
pub end: usize
}
impl<'a> Tokens<'a> {
pub fn new(vec: &'a Vec<Token>) -> Self {
Tokens {
toks: vec.as_slice(),
start: 0,
end: vec.len(),
}
}
}
impl<'a> InputLength for Tokens<'a> {
#[inline(always)]
fn input_len(&self) -> usize {
self.toks.len()
}
}
impl InputLength for Token {
#[inline(always)]
fn input_len(&self) -> usize { 1 }
}
impl<'a> Slice<Range<usize>> for Tokens<'a> {
#[inline]
fn slice(&self, range: Range<usize>) -> Self {
Tokens {
toks: self.toks.slice(range.clone()),
start: self.start + range.start,
end: self.start + range.end
}
}
}
impl<'a> Slice<RangeTo<usize>> for Tokens<'a> {
#[inline]
fn slice(&self, range: RangeTo<usize>) -> Self {
self.slice(0..range.end)
}
}
impl<'a> Slice<RangeFrom<usize>> for Tokens<'a> {
#[inline]
fn slice(&self, range: RangeFrom<usize>) -> Self {
self.slice(range.start..self.end - self.start)
}
}
impl<'a> Slice<RangeFull> for Tokens<'a> {
#[inline]
fn slice(&self, _: RangeFull) -> Self {
Tokens {
toks: self.toks,
start: self.start,
end: self.end,
}
}
}
impl<'a> InputIter for Tokens<'a> {
type Item = &'a Token;
type RawItem = Token;
type Iter = Enumerate<::std::slice::Iter<'a, Token>>;
type IterElem = ::std::slice::Iter<'a, Token>;
#[inline]
fn iter_indices(&self) -> Enumerate<::std::slice::Iter<'a, Token>> {
self.toks.iter().enumerate()
}
#[inline]
fn iter_elements(&self) -> ::std::slice::Iter<'a, Token> {
self.toks.iter()
}
#[inline]
fn position<P>(&self, predicate: P) -> Option<usize>
where
P: Fn(Self::RawItem) -> bool,
{
self.toks.iter().position(|b| predicate(b.clone()))
}
#[inline]
fn slice_index(&self, count: usize) -> Option<usize> {
if self.toks.len() >= count {
Some(count)
} else {
None
}
}
}
// todo generate operator lexer by macros
// operator
named!(left_parenthesis<&[u8], Token>,
do_parse!(tag!("(") >> (Token::Symbol(Symbol::LParenthesis)))
);
named!(right_parenthesis<&[u8], Token>,
do_parse!(tag!(")") >> (Token::Symbol(Symbol::RParenthesis)))
);
named!(left_bracket<&[u8], Token>,
do_parse!(tag!("[") >> (Token::Symbol(Symbol::LBracket)))
);
named!(right_bracket<&[u8], Token>,
do_parse!(tag!("]") >> (Token::Symbol(Symbol::RBracket)))
);
named!(left_brace<&[u8], Token>,
do_parse!(tag!("{") >> (Token::Symbol(Symbol::LBrace)))
);
named!(right_brace<&[u8], Token>,
do_parse!(tag!("}") >> (Token::Symbol(Symbol::RBrace)))
);
named!(line_end<&[u8], Token>,
do_parse!(tag!("\n") >> (Token::Symbol(Symbol::LineEnd)))
);
named!(semi_colon<&[u8], Token>,
do_parse!(tag!(";") >> (Token::Symbol(Symbol::SemiColon)))
);
named!(assign_op<&[u8], Token>,
do_parse!(tag!("=") >> (Token::Symbol(Symbol::Assign)))
);
named!(equal_op<&[u8], Token>,
do_parse!(tag!("==") >> (Token::Symbol(Symbol::Equal)))
);
named!(not_equal_op<&[u8], Token>,
do_parse!(tag!("!=") >> (Token::Symbol(Symbol::NotEqual)))
);
named!(greater_than<&[u8], Token>,
do_parse!(tag!(">") >> (Token::Symbol(Symbol::GT)))
);
named!(greater_eq<&[u8], Token>,
do_parse!(tag!(">=") >> (Token::Symbol(Symbol::GE)))
);
named!(lesser_than<&[u8], Token>,
do_parse!(tag!("<") >> (Token::Symbol(Symbol::LT)))
);
named!(lesser_eq<&[u8], Token>,
do_parse!(tag!("<=") >> (Token::Symbol(Symbol::LE)))
);
named!(plus_op<&[u8], Token>,
do_parse!(tag!("+") >> (Token::Symbol(Symbol::Plus)))
);
named!(minus_op<&[u8], Token>,
do_parse!(tag!("-") >> (Token::Symbol(Symbol::Minus)))
);
named!(mult_op<&[u8], Token>,
do_parse!(tag!("*") >> (Token::Symbol(Symbol::Mult)))
);
named!(div_op<&[u8], Token>,
do_parse!(tag!("/") >> (Token::Symbol(Symbol::Div)))
);
named!(mod_op<&[u8], Token>,
do_parse!(tag!("%") >> (Token::Symbol(Symbol::Mod)))
);
named!(not_op<&[u8], Token>,
do_parse!(tag!("!") >> (Token::Symbol(Symbol::Not)))
);
named!(comma<&[u8], Token>,
do_parse!(tag!(",") >> (Token::Symbol(Symbol::Comma)))
);
// all operators
named!(lex_operator<&[u8], Token>, alt!(
equal_op|
not_equal_op|
greater_eq|
lesser_eq|
greater_than|
lesser_than|
plus_op|
minus_op|
mult_op |
div_op|
mod_op|
not_op|
line_end|
assign_op
));
// punctuation
named!(lex_punctuation<&[u8], Token>, alt!(
left_parenthesis |
right_parenthesis|
left_bracket|
right_bracket|
left_brace|
right_brace|
semi_colon|
comma
));
// keywords and identifier
fn parse_keywords_ident(c: &str, rest: Option<&str>) -> Token {
let mut s = c.to_owned();
s.push_str(rest.unwrap_or(""));
match s.as_ref() {
"if" => Token::Keyword(Keyword::If),
"else" => Token::Keyword(Keyword::Else),
"while" => Token::Keyword(Keyword::While),
"def" => Token::Keyword(Keyword::Def),
"true" => Token::Bool(true),
"false" => Token::Bool(false),
"return" => Token::Keyword(Keyword::Return),
_ => Token::Identifier(s)
}
}
macro_rules! check(
($input:expr, $submac:ident!( $($args:tt)* )) => (
{
let mut failed = false;
for &idx in $input {
if !$submac!(idx, $($args)*) {
failed = true;
break;
}
}
if failed {
IResult::Error(Err::Code(ErrorKind::Custom(20)))
} else {
IResult::Done(&b""[..], $input)
}
}
);
($input:expr, $f:expr) => (
check!($input, call!($f));
);
);
named!(take_1_char, flat_map!(take!(1), check!(is_alphabetic)));
named!(lex_reserved_ident<&[u8], Token>,
do_parse!(
c: map_res!(call!(take_1_char), str::from_utf8) >>
rest: opt!(complete!(map_res!(alphanumeric, str::from_utf8))) >>
(parse_keywords_ident(c, rest))
)
);
// integer
named!(lex_integer<&[u8], Token>,
do_parse!(
number: alt_complete!(
map_opt!(preceded!(tag!("0x"), hex_digit), |v| vu8_to_token(v, 16)) |
map_opt!(preceded!(tag!("0o"), oct_digit), |v| vu8_to_token(v, 8)) |
map_opt!(preceded!(tag!("0b"), take_while!(is_one_or_zero)), |v| vu8_to_token(v, 2)) |
map_opt!(digit, |v| vu8_to_token(v, 10))
) >> (u32_to_token(number))
));
#[inline]
fn vu8_to_token(n: &[u8], radix: u32) -> Option<i64> {
let s = str::from_utf8(n).unwrap();
i64::from_str_radix(s, radix).ok()
}
#[inline]
fn u32_to_token(number: i64) -> Token {
Token::Integer(number)
}
#[inline]
fn is_one_or_zero(c: u8) -> bool {
c == b'0' || c == b'1'
}
// float
named!(parse_float_exp, recognize!(do_parse!(
alt!(tag!("e") | tag!("E"))
>> opt!(alt!(tag!("+") | tag!("-")))
>> digit
>> ())));
named!(lex_float<&[u8], Token>,
do_parse!(
float: map_res!( recognize!( do_parse!(
alt!(
delimited!(digit, tag!("."), opt!(complete!(digit))) |
delimited!(opt!(digit), tag!("."), digit))
>> opt!(complete!(parse_float_exp))
>> ())),
str::from_utf8)
>> (parse_float_token(float)))
);
#[inline]
fn parse_float_token(float: &str) -> Token {
Token::Float(f64::from_str(float).unwrap())
}
// Strings
fn pis(input: &[u8]) -> IResult<&[u8], Vec<u8>> {
let (i1, c1) = try_parse!(input, take!(1));
match c1 {
b"\"" => IResult::Done(input, vec![]),
b"\\" => {
let (i2, c2) = try_parse!(i1, take!(1));
pis(i2).map(|done| concat_slice_vec(c2, done))
}
c => pis(i1).map(|done| concat_slice_vec(c, done)),
}
}
fn concat_slice_vec(c: &[u8], done: Vec<u8>) -> Vec<u8> {
let mut new_vec = c.to_vec();
new_vec.extend(&done);
new_vec
}
fn convert_vec_utf8(v: Vec<u8>) -> Result<String, Utf8Error> {
let slice = v.as_slice();
str::from_utf8(slice).map(|s| s.to_owned())
}
named!(string<String>,
delimited!(
tag!("\""),
map_res!(pis, convert_vec_utf8),
tag!("\"")
)
);
named!(lex_string<&[u8], Token>,
do_parse!(
s: string >>
(Token::String(s))
)
);
// char
named!(lex_char<&[u8], Token>,
delimited!(
char!('\''),
map!(pic, |x| Token::Char(x)),
char!('\'')
)
);
fn pic(input: &[u8]) -> IResult<&[u8], char> {
let (i1, c1) = try_parse!(input, take!(1));
match c1 {
b"\\" => {
let (i2, c2) = try_parse!(i1, take!(1));
match c2 {
b"a" => IResult::Done(i2, '\x07'),
b"b" => IResult::Done(i2, '\x08'),
b"f" => IResult::Done(i2, '\x0c'),
b"n" => IResult::Done(i2, '\n'),
b"r" => IResult::Done(i2, '\r'),
b"t" => IResult::Done(i2, '\t'),
b"v" => IResult::Done(i2, '\x0b'),
b"\\" => IResult::Done(i2, '\\'),
_ => IResult::Error(Err::Code(ErrorKind::Custom(20)))
}
}
b"\'" => IResult::Error(Err::Code(ErrorKind::Char)),
c => IResult::Done(i1, char::from(c[0]))
}
}
// Illegal tokens
named!(lex_illegal<&[u8], Token>,
do_parse!(take!(1) >> (Token::Illegal))
);
// comment
named!(lex_line_comment<&[u8], Token>,
preceded!(
tag!("//"),
map_res!(take_until!("\n"), |x| {
str::from_utf8(x).map(|y| Token::Comment(y.to_owned()))
})
)
);
named!(lex_multiline_comment<&[u8], Token>,
delimited!(
tag!("/*"),
map_res!(take_until!("*/"), |x| {
str::from_utf8(x).map(|y| Token::Comment(y.to_owned()))
}),
tag!("*/")
)
);
named!(lex_comment<&[u8], Token>,alt_complete!(
lex_line_comment | lex_multiline_comment
));
// all
named!(lex_token<&[u8], Token>, alt_complete!(
lex_comment |
lex_float |
lex_integer |
lex_punctuation |
lex_string |
lex_reserved_ident |
lex_operator |
lex_char |
lex_illegal
));
pub struct Lexer;
impl Lexer {
pub fn lex_tokens(bytes: &[u8]) -> IResult<&[u8], Vec<Token>> {
lex_tokens(bytes).map(|result| [&result[..], &vec![Token::EOF][..]].concat())
}
}
named!(pub space, eat_separator!(&b" \t"[..]));
macro_rules! wsl (
($i:expr, $($args:tt)*) => ({
sep!($i, space, $($args)*)
})
);
named!(lex_tokens<&[u8], Vec<Token>>, wsl!(many0!(lex_token)));
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn op_test() {
assert_eq!(equal_op(b"==").to_result(), Ok(Token::Symbol(Symbol::Equal)));
}
#[test]
fn number_test() {
assert_eq!(lex_integer(b"123").to_result(), Ok(Token::Integer(123)));
assert_eq!(lex_integer(b"0x123").to_result(), Ok(Token::Integer(0x123)));
assert_eq!(lex_integer(b"0o100").to_result(), Ok(Token::Integer(0o100)));
assert_eq!(lex_integer(b"0b100").to_result(), Ok(Token::Integer(0b100)));
assert_eq!(lex_float(b"1.32").to_result(), Ok(Token::Float(1.32)));
assert_eq!(lex_float(b"1.32e4").to_result(), Ok(Token::Float(1.32e4)));
}
#[test]
fn string_test() {
assert_eq!(lex_string(b"\"hello \\\n world\"").to_result(),
Ok(Token::String("hello \n world".to_string())));
}
#[test]
fn char_test() {
assert_eq!(lex_char(b"'a'").to_result(), Ok(Token::Char('a')));
assert_eq!(lex_char(b"'\\\\'").to_result(), Ok(Token::Char('\\')));
assert_eq!(lex_char(b"''").to_result(), Err(Err::Code(ErrorKind::Char)));
}
#[test]
fn comment_test() {
assert_eq!(lex_comment(b"//line comment\ni = 2").to_result(),
Ok(Token::Comment("line comment".to_string())));
assert_eq!(lex_comment(b"/* one\ntwo\n three\n */").to_result(),
Ok(Token::Comment(" one\ntwo\n three\n ".to_string())));
}
#[test]
fn lex_reserved_ident() {
let s = b"
str = \"123456\" // comment
/* hello
world!*/
c = '\\n'
d = 12.3
d = 12.3e3
even = -0
odd = 0
i = 1
while i < 10 {
if i % 2 == 0 {
even = even + i;
} else {
odd = odd + 1;
}
i = i + 1
}
(even + odd) * 12 / 12 - 4
";
let tokens = Lexer::lex_tokens(s).to_result().unwrap();
let result = vec![
Token::Symbol(Symbol::LineEnd),
Token::Identifier("str".to_string()),
Token::Symbol(Symbol::Assign),
Token::String("123456".to_string()),
Token::Comment(" comment".to_string()),
Token::Symbol(Symbol::LineEnd),
Token::Comment(" hello\n world!".to_string()),
Token::Symbol(Symbol::LineEnd),
Token::Identifier("c".to_string()),
Token::Symbol(Symbol::Assign),
Token::Char('\n'),
Token::Symbol(Symbol::LineEnd),
Token::Identifier("d".to_string()),
Token::Symbol(Symbol::Assign),
Token::Float(12.3),
Token::Symbol(Symbol::LineEnd),
Token::Identifier("d".to_string()),
Token::Symbol(Symbol::Assign),
Token::Float(12.3e3),
Token::Symbol(Symbol::LineEnd),
Token::Identifier("even".to_string()),
Token::Symbol(Symbol::Assign),
Token::Symbol(Symbol::Minus),
Token::Integer(0),
Token::Symbol(Symbol::LineEnd),
Token::Identifier("odd".to_string()),
Token::Symbol(Symbol::Assign),
Token::Integer(0),
Token::Symbol(Symbol::LineEnd),
Token::Identifier("i".to_string()),
Token::Symbol(Symbol::Assign),
Token::Integer(1),
Token::Symbol(Symbol::LineEnd),
Token::Keyword(Keyword::While),
Token::Identifier("i".to_string()),
Token::Symbol(Symbol::LT),
Token::Integer(10),
Token::Symbol(Symbol::LBrace),
Token::Symbol(Symbol::LineEnd),
Token::Keyword(Keyword::If),
Token::Identifier("i".to_string()),
Token::Symbol(Symbol::Mod),
Token::Integer(2),
Token::Symbol(Symbol::Equal),
Token::Integer(0),
Token::Symbol(Symbol::LBrace),
Token::Symbol(Symbol::LineEnd),
Token::Identifier("even".to_string()),
Token::Symbol(Symbol::Assign),
Token::Identifier("even".to_string()),
Token::Symbol(Symbol::Plus),
Token::Identifier("i".to_string()),
Token::Symbol(Symbol::SemiColon),
Token::Symbol(Symbol::LineEnd),
Token::Symbol(Symbol::RBrace),
Token::Keyword(Keyword::Else),
Token::Symbol(Symbol::LBrace),
Token::Symbol(Symbol::LineEnd),
Token::Identifier("odd".to_string()),
Token::Symbol(Symbol::Assign),
Token::Identifier("odd".to_string()),
Token::Symbol(Symbol::Plus),
Token::Integer(1),
Token::Symbol(Symbol::SemiColon),
Token::Symbol(Symbol::LineEnd),
Token::Symbol(Symbol::RBrace),
Token::Symbol(Symbol::LineEnd),
Token::Identifier("i".to_string()),
Token::Symbol(Symbol::Assign),
Token::Identifier("i".to_string()),
Token::Symbol(Symbol::Plus),
Token::Integer(1),
Token::Symbol(Symbol::LineEnd),
Token::Symbol(Symbol::RBrace),
Token::Symbol(Symbol::LineEnd),
Token::Symbol(Symbol::LParenthesis),
Token::Identifier("even".to_string()),
Token::Symbol(Symbol::Plus),
Token::Identifier("odd".to_string()),
Token::Symbol(Symbol::RParenthesis),
Token::Symbol(Symbol::Mult),
Token::Integer(12),
Token::Symbol(Symbol::Div),
Token::Integer(12),
Token::Symbol(Symbol::Minus),
Token::Integer(4),
Token::Symbol(Symbol::LineEnd),
Token::EOF
];
assert_eq!(tokens, result)
}
} |
pub fn read_lines(filename: String) -> Vec<String> {
let rows = std::fs::read_to_string(filename).expect("Failed to read input");
return rows.split("\n").map(|s| s.to_string()).collect();
}
|
use metrics::*;
use specs::prelude::*;
use std::time::Instant;
use tokio::prelude::Sink;
use types::*;
use websocket::OwnedMessage;
use std::mem;
use std::sync::mpsc::{channel, Receiver};
pub struct PollComplete {
channel: Receiver<(ConnectionId, OwnedMessage)>,
}
impl PollComplete {
pub fn new(channel: Receiver<(ConnectionId, OwnedMessage)>) -> Self {
Self { channel }
}
}
impl<'a> System<'a> for PollComplete {
type SystemData = (Write<'a, Connections>, ReadExpect<'a, MetricsHandler>);
fn run(&mut self, (mut conns, metrics): Self::SystemData) {
let start = Instant::now();
let mut cnt = 0;
while let Ok((id, msg)) = self.channel.try_recv() {
cnt += 1;
match conns.0.get_mut(&id) {
Some(ref mut conn) => {
Connections::send_sink(&mut conn.sink, msg);
}
// The connection probably closed,
// do nothing
None => trace!(
target: "server",
"Tried to send message to closed connection {:?}",
id
),
}
}
metrics.count("packets-sent", cnt).unwrap();
for conn in conns.iter_mut() {
conn.sink
.poll_complete()
.map_err(|e| {
info!("poll_complete failed with error {:?}", e);
})
.err();
}
metrics
.time_duration("poll-complete", Instant::now() - start)
.err();
}
}
use dispatch::SystemInfo;
use std::any::Any;
impl SystemInfo for PollComplete {
type Dependencies = ();
fn name() -> &'static str {
concat!(module_path!(), "::", line!())
}
fn new() -> Self {
unimplemented!();
}
fn new_args(mut a: Box<Any>) -> Self {
let r = a
.downcast_mut::<Receiver<(ConnectionId, OwnedMessage)>>()
.unwrap();
// Replace the channel within the box with a
// dummy one, which will be dropped immediately
// anyway
Self::new(mem::replace(r, channel().1))
}
}
|
use clap::{App, Arg, ArgMatches};
use result::{FdownError, Result};
use std::env;
use std::ffi::OsString;
const CONFIG: &'static str = "config";
const COUNT: &'static str = "count";
const CATEGORY: &'static str = "category";
const DEFAULT_CONFIG: &'static str = "~/.fdown";
const SUBS: &'static str = "subs";
const UNSAVE: &'static str = "unsave";
pub struct Args<'a> {
matches: ArgMatches<'a>,
}
impl<'a> Args<'a> {
pub fn parse() -> Result<Args<'a>> {
Args::parse_from(env::args_os())
}
fn parse_from<I, T>(itr: I) -> Result<Args<'a>>
where I: IntoIterator<Item = T>,
T: Into<OsString> {
let matches = try!(parse_cmd_line_from(itr));
Ok(Args { matches: matches })
}
pub fn config_file_location(&self) -> &str {
// Return something else and error if file not found
return self.matches.value_of(CONFIG).unwrap_or(DEFAULT_CONFIG);
}
pub fn list_subs(&self) -> bool {
self.matches.occurrences_of(SUBS) > 0
}
pub fn should_unsave(&self) -> bool {
self.matches.occurrences_of(UNSAVE) > 0
}
pub fn filter_category(&self) -> Option<&str> {
self.matches.value_of(CATEGORY)
}
pub fn number_of_entries(&self) -> usize {
self.matches.value_of(COUNT).unwrap_or("20").parse::<usize>().unwrap()
}
pub fn write_to_directory(&self) -> bool {
false
}
}
fn parse_cmd_line_from<'a, I, T>(itr: I) -> Result<ArgMatches<'a>>
where I: IntoIterator<Item = T>,
T: Into<OsString> {
let builder = App::new("fdown")
.version("0.0.1")
.author("George Madrid (gmadrid@gmail.com)")
.arg(Arg::with_name(CATEGORY)
.short("C")
.long(CATEGORY)
.help("Only process entries in this category")
.takes_value(true))
.arg(Arg::with_name(CONFIG)
.long(CONFIG)
.takes_value(true)
.help("Location of the config file"))
.arg(Arg::with_name(COUNT)
.long(COUNT)
.short("c")
.takes_value(true)
.help("Number of entries to download"))
.arg(Arg::with_name(SUBS)
.long(SUBS)
.help("List the subscriptions"))
.arg(Arg::with_name(UNSAVE)
.short("U")
.long(UNSAVE)
.help("Unsave the entry after saving it.")
.requires(CATEGORY));
builder.get_matches_from_safe(itr).map_err(FdownError::from)
}
#[cfg(test)]
mod tests {
use super::*;
fn args_from<'a, 'b, 'c>(lst: &'a [&'b str]) -> Args<'c> {
Args::parse_from(lst.iter()).unwrap()
}
#[test]
fn count() {
// Test default
let args = args_from(&["foo"]);
assert_eq!(20, args.number_of_entries());
let args = args_from(&["foo", "--count", "57"]);
assert_eq!(57, args.number_of_entries());
}
#[test]
#[should_panic]
fn count_missing() {
args_from(&["foo", "--count"]);
}
#[test]
fn filter_category() {
let args = args_from(&["foo"]);
assert_eq!(None, args.filter_category());
let args = args_from(&["foo", "-C", "quux"]);
assert_eq!("quux", args.filter_category().unwrap());
let args = args_from(&["foo", "--category", "bam"]);
assert_eq!("bam", args.filter_category().unwrap());
}
#[test]
#[should_panic]
fn filter_category_missing() {
args_from(&["foo", "-C"]);
}
#[test]
fn subs() {
let args = args_from(&["foo", "--subs"]);
assert_eq!(true, args.list_subs());
let args = args_from(&["foo"]);
assert_eq!(false, args.list_subs());
}
#[test]
fn unsave() {
let args = args_from(&["foo", "-C", "cat"]);
assert_eq!(false, args.should_unsave());
let args = args_from(&["foo", "-C", "cat", "-U"]);
assert_eq!(true, args.should_unsave());
let args = args_from(&["foo", "-C", "cat", "--unsave"]);
assert_eq!(true, args.should_unsave());
}
#[test]
#[should_panic]
fn unsave_no_cat() {
args_from(&["foo", "-U"]);
}
#[test]
fn config_file_location() {
let args = Args::parse_from(["foo", "--config", "foobar"].iter()).unwrap();
assert_eq!("foobar", args.config_file_location());
// Test default
let args = Args::parse_from(["foo"].iter()).unwrap();
assert_eq!("~/.fdown", args.config_file_location());
}
#[should_panic]
#[test]
fn config_file_location_missing() {
Args::parse_from(["foo", "--config"].iter()).unwrap();
}
}
|
use std::io::{BufRead, Write};
use actix::prelude::*;
use futures::{future, prelude::*, stream};
use crate::error::CliError;
use git_lfs_spec::transfer::custom;
#[derive(Debug, Clone)]
struct Input(custom::Event);
impl Message for Input {
type Result = Result<Output, CliError>;
}
#[derive(Debug, Clone)]
struct Output(custom::Event);
impl Message for Output {
type Result = Result<(), ()>;
}
pub struct Transfer {
engine: Option<actix::Addr<Engine>>,
}
impl Default for Transfer {
fn default() -> Self {
Transfer { engine: None }
}
}
impl Actor for Transfer {
type Context = Context<Self>;
fn started(&mut self, ctx: &mut <Self as Actor>::Context) {
let mut read_it =
std::io::BufReader::new(std::io::stdin())
.lines()
.map(|r| -> Result<Input, CliError> {
r.map_err(CliError::Io).and_then(|buf| {
serde_json::from_str(&buf)
.map(Input)
.map_err(CliError::SerdeJsonError)
})
});
// TODO: Convert to stdin-reading style used in Clean implementation.
ctx.add_stream(stream::poll_fn(move || -> Poll<Option<Input>, CliError> {
read_it.next().transpose().map(Async::Ready)
}));
}
}
impl StreamHandler<Input, CliError> for Transfer {
fn handle(&mut self, event: Input, ctx: &mut <Self as Actor>::Context) {
match (self.engine.clone(), event) {
(None, Input(custom::Event::Init(init))) => {
self.engine = Some(Engine::new(init).start());
println!("{{}}");
}
(None, event) => {
panic!("{}", CliError::UnexpectedEvent(event.0));
}
(Some(_), Input(custom::Event::Init(init))) => {
panic!("{}", CliError::UnexpectedEvent(custom::Event::Init(init)));
}
(Some(_), Input(custom::Event::Terminate)) => {
System::current().stop();
}
(Some(engine), event) => {
ctx.wait(actix::fut::wrap_future(engine.send(event.clone())).then(
move |res, actor: &mut Self, ctx| match res.unwrap() {
Ok(response) => {
println!(
"{}",
serde_json::to_string(&response.0)
.expect("Failed to serialize an event")
);
actix::fut::ok(())
}
Err(err) => {
panic!("{:?}", err);
actix::fut::err(())
}
},
));
}
};
}
fn error(&mut self, err: CliError, ctx: &mut Context<Self>) -> Running {
panic!("{:?}", err);
}
}
impl Handler<Output> for Transfer {
type Result = <Output as Message>::Result;
fn handle(&mut self, event: Output, ctx: &mut <Self as Actor>::Context) -> Self::Result {
println!(
"{}",
serde_json::to_string(&event.0).expect("Failed to serialize an event")
);
Ok(())
}
}
struct Engine {
init: custom::Init,
}
impl Engine {
fn new(init: custom::Init) -> Self {
Self { init }
}
}
impl Actor for Engine {
type Context = Context<Self>;
}
impl Handler<Input> for Engine {
type Result = ResponseActFuture<Self, Output, CliError>;
fn handle(&mut self, event: Input, ctx: &mut <Self as Actor>::Context) -> Self::Result {
match (event.0, &self.init.operation) {
(custom::Event::Download(download), custom::Operation::Download) => {
let cid = crate::ipfs::sha256_to_cid(cid::Codec::DagProtobuf, &download.object.oid);
if let Some(cid) = cid {
let oid = download.object.oid.clone();
let mut output_path = std::env::current_dir().unwrap();
output_path.push(&download.object.oid);
let mut output = std::fs::File::create(&output_path).unwrap();
Box::new(
actix::fut::wrap_stream(
ipfs_api::IpfsClient::default()
.block_get(&crate::ipfs::Path::ipfs(cid.clone()).to_string())
.map_err(|err| err.to_string())
.map_err(CliError::IpfsApiError)
.and_then(move |x| {
output.write_all(&x).map(|_| x.len()).map_err(CliError::Io)
}),
)
.fold(0, move |mut bytes_so_far, x, actor: &mut Self, ctx| {
bytes_so_far += x as u64;
println!(
"{}",
serde_json::to_string(&custom::Event::Progress(
custom::Progress {
oid: oid.clone(),
bytes_so_far,
bytes_since_last: x as u64,
}
.into()
))
.expect("Failed to serialize an event")
);
// TODO: Don't disobey actix style and just print events here, there must be a better way...
// ctx.spawn(actix::fut::wrap_future(actor.transfer.send(
// Output(custom::Event::Progress(custom::Progress {
// oid: oid.clone(),
// bytes_so_far,
// bytes_since_last: x,
// })),
// ).then(|_| {
// future::ok(())
// })));
actix::fut::ok(bytes_so_far)
})
.map(move |_, _, _| {
Output(custom::Event::Complete(
custom::Complete {
oid: download.object.oid.clone(),
error: None,
path: Some(output_path),
}
.into(),
))
}),
)
} else {
Box::new(actix::fut::wrap_future::<_, Self>(future::ok(Output(
custom::Event::Complete(
custom::Complete {
oid: download.object.oid.clone(),
error: Some(custom::Error {
code: 404,
message: "Object not found".to_string(),
}),
path: None,
}
.into(),
),
))))
}
}
// Upload transfer is dummy, the smudge filter adds files to IPFS already
// TODO: just check the sha256 hash with a /api/v0/block/get
(custom::Event::Upload(upload), custom::Operation::Upload) => Box::new(
actix::fut::wrap_future::<_, Self>(future::ok(Output(custom::Event::Complete(
custom::Complete {
oid: upload.object.oid,
error: None,
path: None,
}
.into(),
)))),
),
(event, _) => Box::new(actix::fut::wrap_future::<_, Self>(future::err(
CliError::UnexpectedEvent(event),
))),
}
}
}
|
use std::default::Default;
use std::from_str;
use std::fmt;
use types::elements::*;
use types::vertexs::*;
// Obj file structure
pub struct Obj {
// Vertex data
pub geometric_vertices: Vec<GeometricVertex>,
pub texture_vertices: Vec<TextureVertex>,
pub vertex_normals: Vec<VertexNormals>,
pub parameter_space_vertices: Vec<ParameterSpaceVertex>,
// Elements
pub points: Vec<Point>,
pub faces: Vec<Face>,
// Free-form curve/surfave body statements
// Connectivity between free-form surfaces
// Grouping
// Display/render attributes
}
pub enum LineType {
GeometricVertexLine(GeometricVertex),
TextureVertexLine(TextureVertex),
VertexNormalsLine(VertexNormals),
ParameterSpaceVertexLine(ParameterSpaceVertex),
PointLine(Point),
FaceLine(Face),
Failed,
Unused,
}
impl fmt::Show for LineType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
GeometricVertexLine(vertex) => write!(f, "{}", vertex),
TextureVertexLine(vertex) => write!(f, "{}", vertex),
VertexNormalsLine(vertex) => write!(f, "{}", vertex),
ParameterSpaceVertexLine(vertex) => write!(f, "{}", vertex),
PointLine(element) => write!(f, "{}", element),
FaceLine(element) => write!(f, "{}", element),
_ => write!(f, ""),
}
}
}
impl Default for Obj {
fn default() -> Obj {
Obj {
geometric_vertices: Vec::new(),
texture_vertices: Vec::new(),
vertex_normals: Vec::new(),
parameter_space_vertices: Vec::new(),
points: Vec::new(),
faces: Vec::new(),
}
}
}
impl Obj {
pub fn vertices(&self) -> Box<Vec<LineType>> {
box self.geometric_vertices.iter().map(|v| GeometricVertexLine(*v)).chain(
self.texture_vertices.iter().map(|v| TextureVertexLine(*v)).chain(
self.vertex_normals.iter().map(|v| VertexNormalsLine(*v)).chain(
self.parameter_space_vertices.iter().map(|v| ParameterSpaceVertexLine(*v))))).collect()
}
pub fn elements(&self) -> Box<Vec<LineType>> {
box self.points.iter().map(|e| PointLine(*e)).chain(
self.faces.iter().map(|e| FaceLine(*e))).collect()
}
pub fn all(&self) -> Box<Vec<LineType>> {
box self.vertices().iter().map(|x| *x).chain(
self.elements().iter().map(|x| *x)).collect()
}
}
// Generic function to pull the numeric data from a line in an obj file
pub fn extract_line_data<T: from_str::FromStr>(line: &str) -> Vec<T> {
line.trim().split(' ').filter_map(from_str).collect()
} |
fn main() {
let x = 5;
let y = 10;
println!("x = {} and y = {}", x, y);
} |
//! Simple echo websocket server.
//! Open `http://localhost:8080/ws/index.html` in browser
//! or [python console client](https://github.com/actix/actix-web/blob/master/examples/websocket-client.py)
//! could be used for testing.
#![allow(unused_variables)]
extern crate actix;
extern crate actix_web;
extern crate env_logger;
use actix::prelude::*;
use actix_web::{
http, middleware, server, fs, ws, App, HttpRequest, HttpResponse, Error};
/// do websocket handshake and start `MyWebSocket` actor
fn ws_index(r: HttpRequest) -> Result<HttpResponse, Error> {
ws::start(r, MyWebSocket)
}
/// websocket connection is long running connection, it easier
/// to handle with an actor
struct MyWebSocket;
impl Actor for MyWebSocket {
type Context = ws::WebsocketContext<Self>;
}
/// Handler for `ws::Message`
impl StreamHandler<ws::Message, ws::ProtocolError> for MyWebSocket {
fn handle(&mut self, msg: ws::Message, ctx: &mut Self::Context) {
// process websocket messages
println!("WS: {:?}", msg);
match msg {
ws::Message::Ping(msg) => ctx.pong(&msg),
ws::Message::Text(text) => ctx.text(text),
ws::Message::Binary(bin) => ctx.binary(bin),
ws::Message::Close(_) => {
ctx.stop();
}
_ => (),
}
}
}
fn main() {
::std::env::set_var("RUST_LOG", "actix_web=info");
env_logger::init();
let sys = actix::System::new("ws-example");
server::new(
|| App::new()
// enable logger
.middleware(middleware::Logger::default())
// websocket route
.resource("/ws/", |r| r.method(http::Method::GET).f(ws_index))
// static files
.handler("/", fs::StaticFiles::new("../static/")
.index_file("index.html")))
// start http server on 127.0.0.1:8080
.bind("127.0.0.1:8080").unwrap()
.start();
println!("Started http server: 127.0.0.1:8080");
let _ = sys.run();
}
|
extern crate gtk;
extern crate gio;
use gtk::{
WidgetExt, WindowExt, ContainerExt,
TextViewExt, TextBufferExt, GtkApplicationExt,
DialogExt, FileChooserExt, BinExt, Cast
};
use gio::{
ApplicationExt, SimpleActionExt, ActionMapExt,
MenuExt, FileExt
};
fn main() {
match gtk::Application::new("com.github.nixiesquid.note_bit", gio::APPLICATION_HANDLES_OPEN) {
Ok(app) => {
app.connect_activate(|app| {
let new_window_action = gio::SimpleAction::new("new_window", None);
{
let app = app.clone();
new_window_action.connect_activate(move |_, _| {
create_window(&app);
});
}
let open_action = gio::SimpleAction::new("open", None);
{
let app = app.clone();
open_action.connect_activate(move |_, _| {
if let Some(file) = run_file_chooser_dialog() {
open(file, app.clone());
}
});
}
let quit_action = gio::SimpleAction::new("quit", None);
{
let app = app.clone();
new_window_action.connect_activate(move |_, _| {
app.quit();
});
}
app.add_action(&new_window_action);
app.add_action(&open_action);
app.add_action(&quit_action);
{
use gio::{ Menu, MenuItem };
let menubar = Menu::new();
// file menu
let submenu_file = Menu::new();
let create_new_window = MenuItem::new("New Window", "app.new_window");
let open = MenuItem::new("Open", "app.open_action");
let quit = MenuItem::new("Quit", "app.quit");
submenu_file.append_item(&create_new_window);
submenu_file.append_item(&open);
submenu_file.append_item(&quit);
// edit menu
let submenu_edit = Menu::new();
let copy = MenuItem::new("Copy", "win.copy");
let paste = MenuItem::new("Paste", "win.paste");
submenu_edit.append_item(©);
submenu_edit.append_item(&paste);
menubar.append_submenu("File", &submenu_file);
menubar.append_submenu("Edit", &submenu_edit);
app.set_menubar(&menubar);
}
create_window(&app);
});
app.run(&[""]);
},
Err(_) => {
println!("Application start up error");
}
};
}
fn create_window(app: >k::Application) -> gtk::ApplicationWindow {
let win = gtk::ApplicationWindow::new(&app);
win.set_default_size(800, 600);
win.set_title("NoteBit");
let scr_win = gtk::ScrolledWindow::new(None, None);
let txt_view = gtk::TextView::new();
scr_win.add(&txt_view);
win.add(&scr_win);
let copy_action = gio::SimpleAction::new("copy", None);
{
let txt_view = txt_view.clone();
copy_action.connect_activate(move |_, _| {
let clipboard = txt_view.get_clipboard(&gdk::SELECTION_CLIPBOARD);
txt_view.get_buffer().unwrap().copy_clipboard(&clipboard);
});
}
let paste_action = gio::SimpleAction::new("paste", None);
{
let txt_view = txt_view.clone();
paste_action.connect_activate(move |_, _| {
let clipboard = txt_view.get_clipboard(&gdk::SELECTION_CLIPBOARD);
let buf = txt_view.get_buffer().unwrap();
buf.paste_clipboard(&clipboard, None, txt_view.get_editable());
});
}
win.add_action(©_action);
win.add_action(&paste_action);
win.show_all();
win
}
fn run_file_chooser_dialog() -> Option<gio::File> {
let dialog = gtk::FileChooserDialog::new::<gtk::Window>
(
Some("Open File"),
None,
gtk::FileChooserAction::Open
);
dialog.add_button("Calncel", gtk::ResponseType::Cancel.into());
dialog.add_button("Open", gtk::ResponseType::Accept.into());
let file;
if dialog.run() == gtk::ResponseType::Accept.into() {
if let Some(path) = dialog.get_filename() {
file = Some(gio::File::new_for_path(path.as_path()))
} else {
file = None
}
} else {
file = None
}
dialog.destroy();
file
}
fn open(file: gio::File, app: gtk::Application) {
let win = create_window(&app);
load_file(file.clone(), win.clone());
win.set_title(file.get_basename().unwrap().to_str().unwrap());
}
fn load_file(file: gio::File, win: gtk::ApplicationWindow) {
if let Ok((v, _)) = file.load_contents(None) {
let text = String::from_utf8(v).unwrap();
let scr_win = win.get_child().unwrap().downcast::<gtk::ScrolledWindow>().ok().unwrap();
let txt_view = scr_win.get_child().unwrap().downcast::<gtk::TextView>().ok().unwrap();
let buf = txt_view.get_buffer().unwrap();
buf.set_text(&text);
}
}
|
use ::Provider;
use ::Temperature;
use ::TemperatureUnits;
pub struct FakeProvider;
impl Provider for FakeProvider {
fn get_temperature(&self) -> Option<Temperature> {
Some(Temperature {
digit: 23,
milli: 500,
unit: TemperatureUnits::Celsius,
})
}
}
|
use crate::connection::{ConnectionError, Credentials, Request as EthaneRequest};
use reqwest::blocking::Client;
use reqwest::header::HeaderMap;
/// Wraps a blocking http client
pub struct Http {
/// The domain where requests are sent
address: String,
credentials: Option<Credentials>,
client: Client,
}
impl Http {
pub fn new(address: &str, credentials: Option<Credentials>) -> Self {
Self {
address: address.to_owned(),
credentials,
client: Client::new(),
}
}
fn json_request_headers(&self) -> HeaderMap {
let mut headers = HeaderMap::new();
if let Some(credentials) = &self.credentials {
headers.insert(
"Authorization",
credentials.to_auth_string().parse().unwrap(),
);
}
headers.insert("Content-Type", "application/json".parse().unwrap());
headers.insert("Accept", "application/json".parse().unwrap());
headers
}
}
impl EthaneRequest for Http {
fn request(&mut self, cmd: String) -> Result<String, ConnectionError> {
self.client
.post(&self.address)
.headers(self.json_request_headers())
.body(cmd)
.send()
.map_err(|e| ConnectionError::HttpError(e.to_string()))?
.text()
.map_err(|e| ConnectionError::HttpError(e.to_string()))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn prepare_request() {
let address = "http://127.0.0.1";
let credentials = Credentials::Basic(String::from("check!"));
let client = Http::new(address, Some(credentials));
let headers = client.json_request_headers();
assert_eq!(headers.get("Authorization").unwrap(), "Basic check!");
assert_eq!(headers.get("Content-Type").unwrap(), "application/json");
assert_eq!(headers.get("Accept").unwrap(), "application/json");
}
}
|
//! OpenLibrary work schemas.
use friendly::scalar;
use crate::arrow::*;
use crate::ids::index::IdIndex;
use crate::prelude::*;
pub use super::source::OLWorkRecord;
use super::source::Row;
/// Work row in extracted Parquet.
#[derive(Debug, Clone, ArrowField, ArrowSerialize, ArrowDeserialize)]
pub struct WorkRec {
pub id: i32,
pub key: String,
pub title: Option<String>,
}
/// Work-author link in extracted Parquet.
#[derive(Debug, Clone, ArrowField, ArrowSerialize, ArrowDeserialize)]
pub struct WorkAuthorRec {
pub id: i32,
pub pos: i16,
pub author: i32,
}
/// Work-subject record in extracted Parquet.
#[derive(Debug, Clone, ArrowField, ArrowSerialize, ArrowDeserialize)]
pub struct WorkSubjectRec {
pub id: i32,
pub subject: String,
}
/// Process author source records into Parquet.
///
/// This must be run **after** the author processor.
pub struct WorkProcessor {
last_id: i32,
author_ids: IdIndex<String>,
rec_writer: TableWriter<WorkRec>,
author_writer: TableWriter<WorkAuthorRec>,
subject_writer: TableWriter<WorkSubjectRec>,
}
impl WorkProcessor {
/// Create a new work processor.
pub fn new() -> Result<WorkProcessor> {
Ok(WorkProcessor {
last_id: 0,
author_ids: IdIndex::load_standard("authors.parquet")?,
rec_writer: TableWriter::open("works.parquet")?,
author_writer: TableWriter::open("work-authors.parquet")?,
subject_writer: TableWriter::open("work-subjects.parquet")?,
})
}
}
impl ObjectWriter<Row<OLWorkRecord>> for WorkProcessor {
fn write_object(&mut self, row: Row<OLWorkRecord>) -> Result<()> {
self.last_id += 1;
let id = self.last_id;
self.rec_writer.write_object(WorkRec {
id,
key: row.key.clone(),
title: row.record.title.clone(),
})?;
for pos in 0..row.record.authors.len() {
let akey = row.record.authors[pos].key();
if let Some(akey) = akey {
let aid = self.author_ids.intern(akey)?;
let pos = pos as i16;
self.author_writer.write_object(WorkAuthorRec {
id,
pos,
author: aid,
})?;
}
}
for subject in row.record.subjects {
self.subject_writer
.write_object(WorkSubjectRec { id, subject })?;
}
Ok(())
}
fn finish(self) -> Result<usize> {
let nr = self.rec_writer.finish()?;
info!("wrote {} work records", scalar(nr));
let na = self.author_writer.finish()?;
info!("wrote {} work-author records", scalar(na));
let ns = self.subject_writer.finish()?;
info!("wrote {} work-subject records", scalar(ns));
self.author_ids
.save_standard("author-ids-after-works.parquet")?;
Ok(self.last_id as usize)
}
}
|
use std::{env, fs};
fn main() {
let args: Vec<String> = env::args().collect();
if args.get(1).is_none() {
panic!("Supply a file to run against");
}
let content = fs::read_to_string(args.get(1).unwrap()).expect("Reading file went wrong");
let mut plays: Vec<(char, char)> = Vec::new();
for line in content.lines() {
let s: Vec<char> = line.trim().chars().collect();
plays.push((s[0], s[2]));
}
part1(&plays);
part2(&plays);
}
fn get_score(play: (char, char)) -> u64 {
let acs = match play.1 {
'X' => 1,
'Y' => 2,
'Z' => 3,
_ => panic!("Unsupported action: {}", play.1),
};
acs + match play {
('A', 'X') => 3,
('A', 'Y') => 6,
('A', 'Z') => 0,
('B', 'X') => 0,
('B', 'Y') => 3,
('B', 'Z') => 6,
('C', 'X') => 6,
('C', 'Y') => 0,
('C', 'Z') => 3,
_ => 0,
}
}
fn part1(n: &Vec<(char, char)>) {
let mut score = 0;
for play in n {
score += get_score(*play)
}
println!("Part 1: {}", score);
}
fn get_action(play: (char, char)) -> char {
match play {
('A', 'X') => 'Z', // Lose
('A', 'Y') => 'X', // Draw
('A', 'Z') => 'Y', // Win
('B', 'X') => 'X', // Lose
('B', 'Y') => 'Y', // Draw
('B', 'Z') => 'Z', // Win
('C', 'X') => 'Y', // Lose
('C', 'Y') => 'Z', // Draw
('C', 'Z') => 'X', // Win
_ => '0',
}
}
fn part2(n: &Vec<(char, char)>) {
let mut score = 0;
for play in n {
score += get_score((play.0, get_action(*play)))
}
println!("Part 2: {}", score);
}
|
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
use core::ops::{Index, IndexMut, Range, RangeFrom, RangeTo, RangeFull};
use crate::string::{ByteStr};
impl Index<usize> for ByteStr {
type Output = u8;
fn index(&self, idx: usize) -> &u8 {
&self.0[idx]
}
}
impl IndexMut<usize> for ByteStr {
fn index_mut(&mut self, idx: usize) -> &mut u8 {
&mut self.0[idx]
}
}
impl Index<RangeFull> for ByteStr {
type Output = ByteStr;
fn index(&self, _: RangeFull) -> &ByteStr { self }
}
impl IndexMut<RangeFull> for ByteStr {
fn index_mut(&mut self, _: RangeFull) -> &mut ByteStr { self }
}
impl Index<RangeTo<usize>> for ByteStr {
type Output = ByteStr;
fn index(&self, idx: RangeTo<usize>) -> &ByteStr {
self.0[idx].as_ref()
}
}
impl IndexMut<RangeTo<usize>> for ByteStr {
fn index_mut(&mut self, idx: RangeTo<usize>) -> &mut ByteStr {
self.0[idx].as_mut()
}
}
impl Index<RangeFrom<usize>> for ByteStr {
type Output = ByteStr;
fn index(&self, idx: RangeFrom<usize>) -> &ByteStr {
self.0[idx].as_ref()
}
}
impl IndexMut<RangeFrom<usize>> for ByteStr {
fn index_mut(&mut self, idx: RangeFrom<usize>) -> &mut ByteStr {
self.0[idx].as_mut()
}
}
impl Index<Range<usize>> for ByteStr {
type Output = ByteStr;
fn index(&self, idx: Range<usize>) -> &ByteStr {
self.0[idx].as_ref()
}
}
impl IndexMut<Range<usize>> for ByteStr {
fn index_mut(&mut self, idx: Range<usize>) -> &mut ByteStr {
self.0[idx].as_mut()
}
}
|
use crate::{
pipeline::{
render_mesh::RenderMesh,
light::IsLight,
material::IsMaterial,
},
camera::{
IsCamera,
}
};
use glow::HasContext;
use luminance::{
context::GraphicsContext,
pipeline::PipelineState,
render_state::RenderState,
tess::TessSliceIndex,
face_culling::{
FaceCulling,
FaceCullingOrder,
FaceCullingMode
}
};
use cgmath::*;
use std::ops::Deref;
use std::sync::atomic::Ordering;
pub struct Renderer3D<'a> {
pub mesh_count: usize,
pub vert_count: usize,
pub lights: Vec<&'a dyn IsLight>,
pub light_count: usize,
pub render_state: RenderState,
}
impl<'a> Renderer3D<'a> {
pub fn new() -> Self {
Self {
mesh_count: 0,
vert_count: 0,
lights: Vec::new(),
light_count: 0,
render_state: RenderState::default().set_face_culling(FaceCulling::new(FaceCullingOrder::CCW, FaceCullingMode::Back)),
}
}
pub fn prepare_frame(&mut self, gl: &glow::Context) {
unsafe {
gl.clear(glow::COLOR_BUFFER_BIT | glow::DEPTH_BUFFER_BIT);
gl.clear_color(127.0 / 255.0, 103.0 / 255.0, 181.0 / 255.0, 1.0);
}
self.vert_count = 0;
}
pub fn finish_frame(&mut self) {
//TODO: Optimise this, no need to loop through every light to reset lol
for light in &self.lights {
light.count_ref().store(0, Ordering::Relaxed);
}
self.light_count = self.lights.len();
self.lights = Vec::new();
}
//Function really only used internally, as it's quite unintuitive lol
pub fn use_light(&mut self, light: &'a dyn IsLight) {
self.lights.push(light);
}
pub fn draw_mesh<C: IsCamera>(&mut self, surface: &mut crate::surface::Surface, gl: &glow::Context, camera: C, mesh: &RenderMesh, material: &dyn IsMaterial, model_matrix: Matrix4<f32>) {
let back_buffer = surface.back_buffer().expect("Couldn't get the backbuffer!");
let projection = camera.get_proj(surface.width(), surface.height());
let view = camera.get_view();
surface.pipeline_builder().pipeline(
&back_buffer,
&PipelineState::default(),
|_, mut shd_gate| {
material.bind_texture(gl);
shd_gate.shade(&material.program(), |iface, mut rdr_gate| {
let handle = material.program().deref().handle();
for light in &self.lights {
light.upload_fields(gl, handle);
}
camera.upload_fields(gl, handle);
iface.projection.update(projection.into());
iface.view.update(view.into());
material.upload_fields(gl);
rdr_gate.render(&self.render_state, |mut tess_gate| {
iface.model.update(model_matrix.into()); //tc = transform component
tess_gate.render(mesh.tess.slice(..))
})
});
unsafe { gl.bind_texture(glow::TEXTURE_2D, None); }
}
);
self.vert_count += mesh.vert_count;
}
}
|
pub trait Mapper<T> {
fn map(&self) -> T;
fn map_to(&self, destination: T) -> T { destination }
} |
use crate::eval::LNum;
struct HexStrComp<'a> {
whole_part: &'a str,
fractional_part: Option<&'a str>,
exponent_part: Option<&'a str>,
}
fn get_componenets(s: &str) -> Result<HexStrComp, String> {
// let's get the exponent first
let exponent_splitter = if s.contains("p") { "p" } else { "P" };
let exponent_part: Option<&str>;
let exponent_components: Vec<&str> = s.split(exponent_splitter).collect();
let mut remaining: &str = s;
match exponent_components.len() {
1 => {
exponent_part = None;
} // no component
2 => {
exponent_part = Some(exponent_components[1]);
remaining = exponent_components[0];
}
_ => return Err("Invalid components".to_string()),
};
// we have the exponent part now, lets check for the other part
let fractional_components: Vec<&str> = remaining.split(".").collect();
let fractional_part: Option<&str>;
let whole_part: &str;
match fractional_components.len() {
1 => {
fractional_part = None;
whole_part = remaining;
}
2 => {
fractional_part = Some(fractional_components[1]);
whole_part = fractional_components[0];
}
_ => {
return Err("invalid components (fractional)".to_string());
}
}
return Ok(HexStrComp {
whole_part,
fractional_part,
exponent_part,
});
}
/**
* Parse a floating hexdecimal number
* (no negatives need to be handled here)
**/
fn parse_lua_hex_internal(s: &str) -> Result<LNum, String> {
let hex_str_comp: HexStrComp;
match get_componenets(s) {
Ok(hsc) => hex_str_comp = hsc,
Err(e) => return Err(e),
}
// at this point we are going to build up the final value
// TODO this could be more performant with cleverness
let components: Vec<_> = s.split(".").collect();
if components.len() == 0 {
return Err("Empty string cannot be hex parsed".to_string());
}
if hex_str_comp.fractional_part.is_none() && hex_str_comp.exponent_part.is_none() {
// this is actually an integer, treat it as such
let result_i64 = i64::from_str_radix(hex_str_comp.whole_part, 16);
match result_i64 {
Ok(v) => return Ok(LNum::Int(v)),
Err(e) => return Err(e.to_string()),
}
}
// otherwise we are actually going to be using a fractional component
let whole_part_s = hex_str_comp.whole_part;
let mut result: f64;
match i64::from_str_radix(whole_part_s, 16) {
Ok(v) => result = v as f64,
Err(e) => return Err(e.to_string()),
}
match hex_str_comp.fractional_part {
None => {}
Some(v) => {
match i64::from_str_radix(v, 16) {
Ok(m) => {
// the fractional component is equal to 10^(-n) * the component
// 0.4 => 4 * 10^-1
// 0.40 => 4 * 10^-2
let fractional_component: f64 =
(m as f64) * (10 as f64).powf(-(components[1].len() as f64));
result = result + fractional_component;
}
Err(e) => return Err(e.to_string()),
}
}
}
match hex_str_comp.exponent_part {
None => {}
Some(exp_s) => {
match i64::from_str_radix(exp_s, 16) {
Ok(m) => {
// this exponent component does *2^(val)
let multiplier = (2 as f64).powf(m as f64);
result = result * multiplier;
}
Err(e) => return Err(e.to_string()),
}
}
}
return Ok(LNum::Float(result));
}
pub fn parse_lua_hex(n: &str) -> Result<LNum, String> {
let start_str = if n.starts_with("0x") { "0x" } else { "0X" };
if n.starts_with(start_str) {
let hex_wo_pfx = n.trim_start_matches(start_str);
return parse_lua_hex_internal(hex_wo_pfx);
} else {
return Err("not a hex string".to_string());
}
}
#[test]
fn test_hex_parsing() {
assert_eq!(parse_lua_hex(&String::from("0x4")), Ok(LNum::Int(4)));
assert_eq!(parse_lua_hex(&String::from("0x10")), Ok(LNum::Int(16)));
assert_eq!(
parse_lua_hex(&String::from("0x10.5")),
Ok(LNum::Float(16.5))
);
assert_eq!(parse_lua_hex("0x1p4"), Ok(LNum::Float(16.0)));
assert_eq!(
parse_lua_hex(&String::from("10.5")),
Err(String::from("not a hex string"))
);
}
|
use self::Msg::*;
use gtk::prelude::*;
use gtk::Orientation::Vertical;
use img_dedup::SimilarPair;
use log::debug;
use relm::{connect, Relm, Widget};
use relm_attributes::widget;
use relm_derive::Msg;
use std::collections::BinaryHeap;
pub struct Model {
files: BinaryHeap<SimilarPair>,
current_pair: Option<SimilarPair>,
}
#[derive(Msg)]
pub enum Msg {
SetFiles(BinaryHeap<SimilarPair>),
Next,
}
#[widget]
impl Widget for CompareWidget {
fn model(_relm: &Relm<Self>, files: BinaryHeap<SimilarPair>) -> Model {
Model {
files,
current_pair: None,
}
}
fn update(&mut self, event: Msg) {
match event {
SetFiles(files) => {
self.model.files = files;
self.next();
}
Next => self.next(),
};
}
view! {
gtk::Box {
orientation: Vertical,
gtk::Label {
text: "Image Deduplicator",
},
#[name="leftimage"]
gtk::Image {
},
#[name="rightimage"]
gtk::Image {
},
#[name="nextbutton"]
gtk::Button {
clicked => Next,
label: "Next Pair",
},
},
}
}
// Maybe more of this should be moved to the library
impl CompareWidget {
fn next(&mut self) {
self.model.current_pair = self.model.files.pop();
if let Some(pair) = &self.model.current_pair {
debug!("{:?}", pair);
self.leftimage.set_from_file(&pair.left.borrow().path);
self.rightimage.set_from_file(&pair.right.borrow().path);
}
}
}
|
#[async_trait] impl < R > FromReader < crate :: ERROR, R > for Connect where
Self : Sized, R : Read + std :: marker :: Unpin + std :: marker :: Send
{
async fn from_reader(reader : & mut R) -> Result < Self, crate :: ERROR >
{
let c_flags : ConnectFlags ;
Ok(Connect(< Protocol > ::
from_bytes(& bytes[(() / 8) ..((() + (< Protocol > :: SIZE_IN_BITS)) / 8)])
?, < ProtocolLevel > ::
from_bytes(& bytes[(() / 8) ..((() + (< ProtocolLevel > :: SIZE_IN_BITS)) /8)]) ?,
{
let result : ConnectFlags = < ConnectFlags > ::
from_bytes(& bytes[(() / 8) ..((() + (< ConnectFlags > :: SIZE_IN_BITS))/ 8)]) ?;
c_flags = result . clone();
result
},
< KeepAlive > ::from_bytes(& bytes[(() / 8) ..((() + (< KeepAlive > :: SIZE_IN_BITS)) / 8)])?,
< Properties > ::from_bytes(& bytes[(() / 8) ..((() + (< Properties > :: SIZE_IN_BITS)) / 8)])?,
< ClientID > ::from_bytes(& bytes[(() / 8) ..((() + (< ClientID > :: SIZE_IN_BITS)) / 8)])?,
{
match c_flags . WillFlag
{
true => Some(< WillProperties > ::from_bytes(& bytes[(() / 8) ..((() +(< WillProperties > ::SIZE_IN_BITS)) / 8)]) ?),
false=> None
}
},
{
match c_flags . WillFlag
{
true => Some(<WillTopic > ::from_bytes(& bytes[(() / 8) ..((() +(< WillTopic > :: SIZE_IN_BITS))/ 8)]) ?),
false => None
}
},
{
match c_flags . WillFlag
{
true => Some(< WillPayload > ::from_bytes(& bytes[(() / 8) ..((() +(< WillPayload > ::SIZE_IN_BITS)) / 8)]) ?),
false => None
}
},
{
match c_flags . UserNameFlag
{
true => Some(< Username > ::from_bytes(& bytes[(() / 8) ..((() + (< Username > :: SIZE_IN_BITS))/ 8)]) ?),
false => None
}
},
{
match c_flags . PasswordFlag
{
true =>
Some(< Password > ::
from_bytes(& bytes
[(() / 8) ..
((() +
(< Password > :: SIZE_IN_BITS))
/ 8)]) ?), false => None
}
}))
}
} |
extern crate rand;
use std::sync::{Arc, Mutex};
use std::sync::atomic::{AtomicBool, Ordering};
use std::thread;
use std::time;
#[derive(Clone)]
struct Item {
score: i32,
is_unique: bool
}
impl Item {
pub fn new(score: i32) -> Item {
Item {
score: score,
is_unique: true
}
}
pub fn set_isunique(&mut self, value: bool) {
self.is_unique = value;
}
}
struct ItemTracker {
first: Option<Item>,
}
impl ItemTracker {
pub fn new() -> ItemTracker {
ItemTracker { first: None }
}
pub fn process_item (&mut self, mut item: Item) -> bool {
match self.first {
None => {
self.first = Some(item);
false
}
Some(ref mut first_item) => {
if first_item.score == item.score {
first_item.set_isunique(false);
item.set_isunique(false);
println!("We have found an duplicate of the first item!");
true
}
else {
false
}
}
}
}
}
fn main() {
let items_store = Arc::new(Mutex::new(Vec::<Item>::new()));
let done = Arc::new(AtomicBool::new(false));
let producer_store = items_store.clone();
let producer_done = done.clone();
let producer = thread::spawn(move || {
while !producer_done.load(Ordering::Relaxed) {
let mut items = producer_store.lock().unwrap();
let score = rand::random::<i32>() % 10000;
items.push(Item::new(score));
}
});
let consumer_store = items_store.clone();
let consumer = thread::spawn(move || {
let mut tracker = ItemTracker::new();
let mut index = 0;
'consumer: loop {
std::thread::sleep(time::Duration::from_millis(10));
let mut items = consumer_store.lock().unwrap();
for ref mut item in items[index..].iter_mut() {
if tracker.process_item(item.clone())
{
done.store(true, Ordering::Relaxed);
break 'consumer;
}
}
index = items.len();
}
});
producer.join().unwrap();
consumer.join().unwrap();
println!("Done");
} |
use crypto::Secret;
use parameters::{ClientTransportParameters, ServerTransportParameters, TransportParameters};
use types::Side;
use super::{QuicError, QuicResult};
use codec::Codec;
use std::str;
use std::io::Cursor;
use super::QUIC_VERSION;
use hex;
use ring::aead::AES_256_GCM;
use ring::digest::SHA256;
use snow;
use snow::NoiseBuilder;
use snow::params::NoiseParams;
lazy_static! {
static ref PARAMS: NoiseParams = "Noise_IK_25519_AESGCM_SHA256".parse().unwrap();
}
const STATIC_DUMMY_SECRET : [u8; 32] = [
0xe0, 0xb0, 0xc4, 0x42, 0x98, 0xfc, 0x1c, 0x14,
0x9a, 0xfb, 0xf4, 0xc8, 0x99, 0x6f, 0xb9, 0x24,
0x27, 0xae, 0x41, 0xe4, 0x64, 0x9b, 0x93, 0x4c,
0xa4, 0x95, 0x99, 0x1b, 0x78, 0x52, 0xb8, 0x55
];
pub struct ClientSession {
static_key : [u8; 32], // client secret
remote_key : [u8; 32], // server public
session : Option<snow::Session>, // noise snow session
params_remote : Option<ServerTransportParameters>, // authenticated remote transport parameters
params_local : ClientTransportParameters, // transport parameters
}
pub struct ServerSession {
static_key : [u8; 32], // server secret
session : Option<snow::Session>, // noise snow session
params_remote : Option<ClientTransportParameters>, // authenticated remote transport parameters
params_local : ServerTransportParameters, // transport parameters
auth_check : fn([u8; 32]) -> bool // application specific auth. check
}
pub trait Session {
fn process_handshake_message(&mut self, &[u8]) -> QuicResult<HandshakeResult>;
fn set_prologue(&mut self, prologue : &[u8]) -> QuicResult<()>;
fn get_transport_parameters(&self) -> Option<TransportParameters>;
}
fn no_auth(pk : [u8; 32]) -> bool {
println!("debug : client identity : {}", hex::encode(&pk));
true
}
pub fn client_session(
remote_key : [u8; 32],
static_key : Option<[u8; 32]>,
params : ClientTransportParameters,
) -> ClientSession {
ClientSession {
static_key: static_key.unwrap_or(STATIC_DUMMY_SECRET),
params_remote: None,
params_local: params,
remote_key,
session: None
}
}
pub fn server_session(
static_key : [u8; 32],
params : ServerTransportParameters,
) -> ServerSession {
ServerSession {
static_key: static_key,
params_remote: None,
params_local: params,
session: None,
auth_check: no_auth,
}
}
impl Session for ClientSession {
fn set_prologue(&mut self, _prologue : &[u8]) -> QuicResult<()> {
Err(QuicError::General("setting prologue on client".to_owned()))
}
fn get_transport_parameters(&self) -> Option<TransportParameters> {
match &self.params_remote {
Some(p) => Some(p.parameters.clone()),
None => None,
}
}
fn process_handshake_message(&mut self, msg: &[u8]) -> QuicResult<HandshakeResult> {
let session = self.session.as_mut().unwrap();
let mut payload = vec![0u8; 65535];
match session.read_message(msg, &mut payload) {
Ok(n) => {
// parse server transport parameters
self.params_remote = Some({
let mut read = Cursor::new(&payload[..n]);
ServerTransportParameters::decode(&mut read)?
});
assert!(session.is_initiator());
assert!(session.is_handshake_finished());
// export key material
let (k1, k2) = session.export().unwrap();
let secret = Secret::For1Rtt(&AES_256_GCM, &SHA256, k1.to_vec(), k2.to_vec());
println!("debug : params_remote = {:?}", &self.params_remote);
println!("debug : exporting key material from Noise:");
println!("debug : i->r : {}", hex::encode(k1));
println!("debug : i<-r : {}", hex::encode(k2));
Ok((None, Some(secret)))
},
Err(_) => Err(QuicError::General("failed to decrypt noise".to_owned()))
}
}
}
impl ClientSession {
pub fn create_handshake_request(&mut self, prologue : &[u8]) -> QuicResult<Vec<u8>> {
// sanity check
if let Some(_) = self.session {
panic!("Multiple calls to create_handshake_request");
}
// build Noise session
self.session = Some({
let builder = NoiseBuilder::new(PARAMS.clone());
builder
.prologue(prologue)
.local_private_key(&self.static_key)
.remote_public_key(&self.remote_key)
.build_initiator().unwrap()
});
// serialize parameters
let session = self.session.as_mut().unwrap();
let mut payload = Vec::new();
self.params_local.encode(&mut payload);
let mut msg = vec![0u8; 65535];
let len = session.write_message(&payload, &mut msg).unwrap();
Ok(msg[..len].to_owned())
}
}
impl Session for ServerSession {
fn set_prologue(&mut self, prologue : &[u8]) -> QuicResult<()> {
match self.session {
Some(_) =>
Err(QuicError::General("setting prologue after processing handshake request".to_owned())),
None => {
self.session = Some({
let builder = NoiseBuilder::new(PARAMS.clone());
builder
.local_private_key(&self.static_key)
.prologue(prologue)
.build_responder().unwrap()
});
Ok(())
}
}
}
fn get_transport_parameters(&self) -> Option<TransportParameters> {
match &self.params_remote {
Some(p) => Some(p.parameters.clone()),
None => None,
}
}
fn process_handshake_message(&mut self, msg: &[u8]) -> QuicResult<HandshakeResult> {
println!("debug : process handshake message");
let session = self.session.as_mut().unwrap();
let mut payload = vec![0u8; 65535];
match session.read_message(msg, &mut payload) {
Ok(n) => {
// parse client transport parameters
let parameters = {
let mut read = Cursor::new(&payload[..n]);
ClientTransportParameters::decode(&mut read)?
};
self.params_remote = Some(parameters.clone());
println!("debug : client parameters {:?}", ¶meters);
// validate initial_version (this is the only supported version)
if parameters.initial_version != QUIC_VERSION {
return Err(
QuicError::General("failed to decrypt noise".to_owned())
);
};
// validate client identity
let auth_ok = match session.get_remote_static() {
None => false,
Some(key) => {
let mut pk = [0u8; 32];
pk[..].clone_from_slice(key);
(self.auth_check)(pk)
}
};
if !auth_ok {
return Err(
QuicError::General("client idenity rejected".to_owned())
);
}
// create handshake response
let resp = {
let mut payload = Vec::new();
let mut msg = vec![0u8; 65535];
self.params_local.encode(&mut payload);
let len = session.write_message(&payload, &mut msg).unwrap();
assert!(session.is_handshake_finished());
msg[..len].to_owned()
};
// export transport keys
println!("debug : exporting key material from Noise:");
assert!(!session.is_initiator());
assert!(session.is_handshake_finished());
let (k1, k2) = session.export().unwrap();
let secret = Secret::For1Rtt(
&AES_256_GCM,
&SHA256,
k1.to_vec(),
k2.to_vec()
);
println!("debug : i->r : {}", hex::encode(k1));
println!("debug : i<-r : {}", hex::encode(k2));
Ok((Some(resp), Some(secret)))
},
Err(_) => Err(QuicError::General("failed to decrypt noise".to_owned()))
}
}
}
pub trait QuicSide {
fn side(&self) -> Side;
}
impl QuicSide for ClientSession {
fn side(&self) -> Side {
Side::Client
}
}
impl QuicSide for ServerSession {
fn side(&self) -> Side {
Side::Server
}
}
type HandshakeResult = (Option<Vec<u8>>, Option<Secret>);
fn to_vec<T: Codec>(val: &T) -> Vec<u8> {
let mut bytes = Vec::new();
val.encode(&mut bytes);
bytes
}
const ALPN_PROTOCOL: &str = "hq-11";
|
// Multiples of 3 and 5
// If we list all the natural numbers below 10 that are multiples of 3 or 5, we get 3, 5, 6 and 9. The sum of these multiples is 23.
// Find the sum of all the multiples of 3 or 5 below 1000.
fn main() {
let mut sum = 0;
for num in 1..1001 {
if num % 3 == 0 || num % 5 == 0 {
sum += num;
println!("num = {}", num);
}
}
println!("Sum = {}", sum);
}
|
pub use super::models::Session;
pub use super::models::new::Session as NewSession;
use db::{DatabaseConnection, SelectError};
generate_crud_fns!(sessions, NewSession, Session);
/// Fetches a session from the database along with whether it's the current session.
pub fn get_session(conn: &DatabaseConnection, id: i32) -> Result<(bool, Session), SelectError> {
match get_latest_session(conn) {
Ok(ref s) if s.id == id => return Ok((true, s.clone())),
Ok(_) => (),
Err(SelectError::NoSuchValue()) => (), // Pass to try below
Err(e @ SelectError::DieselError(_)) => return Err(e),
}
let res = generate_select_body!(single, conn, sessions, Session, (id, id))?;
Ok((false, res))
}
pub fn get_latest_session(conn: &DatabaseConnection) -> Result<Session, SelectError> {
use diesel::prelude::*;
use schema::sessions::dsl::*;
let res = sessions
.filter(force_archive.eq(false))
.order(created.desc())
.first::<Session>(conn.raw())?;
Ok(res)
}
pub fn get_all(conn: &DatabaseConnection) -> Result<Vec<(bool, Session)>, SelectError> {
use diesel::prelude::*;
use schema::sessions::dsl::*;
let res = sessions.order(created.desc()).load::<Session>(conn.raw())?;
let mut first = true;
Ok(res.into_iter()
.map(|it| {
let out = if first && !it.force_archive {
(true, it)
} else {
(false, it)
};
first = false;
out
})
.collect())
}
|
use std::fs::File;
use std::io::{BufReader, BufWriter};
use crate::cartridge::{MirrorMode, Rom, RomMapper};
use crate::savable::Savable;
use super::Mapper;
pub struct Mapper4 {
rom: Rom,
target: u8,
prg_mode: bool,
chr_invert: bool,
mirror_mode: MirrorMode,
registers: [u8; 8],
prg_banks: [usize; 4],
chr_banks: [usize; 8],
irq_reload: u8,
irq_counter: u8,
irq_enable: bool,
pending_irq: Option<bool>,
ram: Vec<u8>,
}
impl Mapper4 {
pub fn new(rom: Rom) -> Self {
Self {
rom,
target: 0,
prg_mode: false,
chr_invert: false,
mirror_mode: MirrorMode::Horizontal,
registers: [0; 8],
prg_banks: [0; 4],
chr_banks: [0; 8],
irq_reload: 0,
irq_counter: 0,
irq_enable: false,
pending_irq: None,
ram: vec![0; 0x2000],
}
}
}
impl RomMapper for Mapper4 {}
impl Savable for Mapper4 {
fn save(&self, output: &mut BufWriter<File>) -> bincode::Result<()> {
self.rom.save(output)?;
bincode::serialize_into::<&mut BufWriter<File>, _>(output, &self.target)?;
bincode::serialize_into::<&mut BufWriter<File>, _>(output, &self.prg_mode)?;
bincode::serialize_into::<&mut BufWriter<File>, _>(output, &self.chr_invert)?;
bincode::serialize_into::<&mut BufWriter<File>, _>(output, &self.mirror_mode)?;
bincode::serialize_into::<&mut BufWriter<File>, _>(output, &self.irq_reload)?;
bincode::serialize_into::<&mut BufWriter<File>, _>(output, &self.irq_counter)?;
bincode::serialize_into::<&mut BufWriter<File>, _>(output, &self.irq_enable)?;
bincode::serialize_into::<&mut BufWriter<File>, _>(output, &self.pending_irq)?;
for i in 0..8 {
bincode::serialize_into::<&mut BufWriter<File>, _>(output, &self.registers[i])?;
bincode::serialize_into::<&mut BufWriter<File>, _>(output, &self.chr_banks[i])?;
}
for i in 0..4 {
bincode::serialize_into::<&mut BufWriter<File>, _>(output, &self.prg_banks[i])?;
}
for i in 0..0x2000 {
bincode::serialize_into::<&mut BufWriter<File>, _>(output, &self.ram[i])?;
}
Ok(())
}
fn load(&mut self, input: &mut BufReader<File>) -> bincode::Result<()> {
self.rom.load(input)?;
self.target = bincode::deserialize_from::<&mut BufReader<File>, _>(input)?;
self.prg_mode = bincode::deserialize_from::<&mut BufReader<File>, _>(input)?;
self.chr_invert = bincode::deserialize_from::<&mut BufReader<File>, _>(input)?;
self.mirror_mode = bincode::deserialize_from::<&mut BufReader<File>, _>(input)?;
self.irq_reload = bincode::deserialize_from::<&mut BufReader<File>, _>(input)?;
self.irq_counter = bincode::deserialize_from::<&mut BufReader<File>, _>(input)?;
self.irq_enable = bincode::deserialize_from::<&mut BufReader<File>, _>(input)?;
self.pending_irq = bincode::deserialize_from::<&mut BufReader<File>, _>(input)?;
for i in 0..8 {
self.registers[i] = bincode::deserialize_from::<&mut BufReader<File>, _>(input)?;
self.chr_banks[i] = bincode::deserialize_from::<&mut BufReader<File>, _>(input)?;
}
for i in 0..4 {
self.prg_banks[i] = bincode::deserialize_from::<&mut BufReader<File>, _>(input)?;
}
for i in 0..0x2000 {
self.ram[i] = bincode::deserialize_from::<&mut BufReader<File>, _>(input)?;
}
Ok(())
}
}
impl Mapper for Mapper4 {
fn read_prg(&mut self, addr: u16) -> u8 {
match addr {
0x6000..=0x7FFF => self.ram[(addr & 0x1FFF) as usize],
0x8000..=0xFFFF => {
let reg_index = match addr {
0x8000..=0x9FFF => 0,
0xA000..=0xBFFF => 1,
0xC000..=0xDFFF => 2,
0xE000..=0xFFFF => 3,
_ => 0,
};
let index = self.prg_banks[reg_index] + (addr & 0x1FFF) as usize;
self.rom.prg[index]
}
_ => 0,
}
}
fn write_prg(&mut self, addr: u16, data: u8) {
let even = addr & 0x1 == 0;
match addr {
0x6000..=0x7FFF => self.ram[(addr & 0x1FFF) as usize] = data,
0x8000..=0x9FFF if even => {
self.target = data & 0x7;
self.prg_mode = data & 0x40 != 0;
self.chr_invert = data & 0x80 != 0;
}
0x8000..=0x9FFF => {
self.registers[self.target as usize] = data;
match self.chr_invert {
true => {
self.chr_banks[0] = self.registers[2] as usize * 0x400;
self.chr_banks[1] = self.registers[3] as usize * 0x400;
self.chr_banks[2] = self.registers[4] as usize * 0x400;
self.chr_banks[3] = self.registers[5] as usize * 0x400;
self.chr_banks[4] = (self.registers[0] & 0xFE) as usize * 0x400;
self.chr_banks[5] = (self.registers[0] & 0xFE) as usize * 0x400 + 0x400;
self.chr_banks[6] = (self.registers[1] & 0xFE) as usize * 0x400;
self.chr_banks[7] = (self.registers[1] & 0xFE) as usize * 0x400 + 0x400;
}
false => {
self.chr_banks[0] = (self.registers[0] & 0xFE) as usize * 0x400;
self.chr_banks[1] = (self.registers[0] & 0xFE) as usize * 0x400 + 0x400;
self.chr_banks[2] = (self.registers[1] & 0xFE) as usize * 0x400;
self.chr_banks[3] = (self.registers[1] & 0xFE) as usize * 0x400 + 0x400;
self.chr_banks[4] = self.registers[2] as usize * 0x400;
self.chr_banks[5] = self.registers[3] as usize * 0x400;
self.chr_banks[6] = self.registers[4] as usize * 0x400;
self.chr_banks[7] = self.registers[5] as usize * 0x400;
}
}
match self.prg_mode {
true => {
self.prg_banks[0] = (self.rom.header.prg_count() * 2 - 2) * 0x2000;
self.prg_banks[2] = (self.registers[6] & 0x3F) as usize * 0x2000;
}
false => {
self.prg_banks[0] = (self.registers[6] & 0x3F) as usize * 0x2000;
self.prg_banks[2] = (self.rom.header.prg_count() * 2 - 2) * 0x2000;
}
}
self.prg_banks[1] = (self.registers[7] & 0x3F) as usize * 0x2000;
}
0xA000..=0xBFFF if even => match data & 0x1 != 0 {
true => self.mirror_mode = MirrorMode::Horizontal,
false => self.mirror_mode = MirrorMode::Vertical,
},
0xC000..=0xDFFF if even => self.irq_reload = data,
0xC000..=0xDFFF => self.irq_counter = 0,
0xE000..=0xFFFF if even => {
self.irq_enable = false;
self.pending_irq = None;
}
0xE000..=0xFFFF => self.irq_enable = true,
_ => {}
}
}
fn read_chr(&mut self, addr: u16) -> u8 {
if self.rom.header.chr_count() == 0 {
return self.rom.chr[addr as usize];
}
let reg_index = match addr {
0x0000..=0x03FF => 0,
0x0400..=0x07FF => 1,
0x0800..=0x0BFF => 2,
0x0C00..=0x0FFF => 3,
0x1000..=0x13FF => 4,
0x1400..=0x17FF => 5,
0x1800..=0x1BFF => 6,
0x1C00..=0x1FFF => 7,
_ => 0,
};
let index = self.chr_banks[reg_index] + (addr & 0x3FF) as usize;
self.rom.chr[index]
}
fn write_chr(&mut self, addr: u16, data: u8) {
if self.rom.header.chr_count() == 0 {
self.rom.chr[addr as usize] = data;
}
}
fn mirror_mode(&self) -> MirrorMode {
match self.rom.header.four_screen() {
true => MirrorMode::FourScreen,
false => self.mirror_mode,
}
}
fn reset(&mut self) {
self.target = 0;
self.prg_mode = false;
self.chr_invert = false;
self.mirror_mode = MirrorMode::Horizontal;
self.irq_reload = 0;
self.irq_counter = 0;
self.irq_enable = false;
self.pending_irq = None;
self.registers.fill(0);
self.chr_banks.fill(0);
self.prg_banks[0] = 0;
self.prg_banks[1] = 0x2000;
self.prg_banks[2] = (self.rom.header.prg_count() * 2 - 2) as usize * 0x2000;
self.prg_banks[3] = (self.rom.header.prg_count() * 2 - 1) as usize * 0x2000;
}
fn inc_scanline(&mut self) {
match self.irq_counter == 0 {
true => self.irq_counter = self.irq_reload,
false => self.irq_counter -= 1,
}
if self.irq_counter == 0 && self.irq_enable {
self.pending_irq = Some(true);
}
}
fn poll_irq(&mut self) -> bool {
self.pending_irq.take().is_some()
}
}
|
#[derive(Clone, Copy)]
pub struct Color {
pub r: f64,
pub g: f64,
pub b: f64,
}
impl Color {
pub fn grey(c: f64) -> Self {
Self { r: c, g: c, b: c }
}
pub fn black() -> Self {
Self {
r: 0.0,
g: 0.0,
b: 0.0,
}
}
}
impl std::ops::Add<Color> for Color {
type Output = Color;
fn add(self, rhs: Color) -> Color {
Color {
r: self.r + rhs.r,
g: self.g + rhs.g,
b: self.b + rhs.b,
}
}
}
impl std::ops::Mul<Color> for Color {
type Output = Color;
fn mul(self, rhs: Color) -> Color {
Color {
r: self.r * rhs.r,
g: self.g * rhs.g,
b: self.b * rhs.b,
}
}
}
impl std::ops::Mul<f64> for Color {
type Output = Color;
fn mul(self, s: f64) -> Color {
Color {
r: self.r * s,
g: self.g * s,
b: self.b * s,
}
}
}
impl std::iter::Sum for Color {
fn sum<I>(iter: I) -> Color
where
I: Iterator<Item = Color>,
{
iter.fold(Color::black(), std::ops::Add::add)
}
}
|
/*
* Copyright (C) 2019-2021 TON Labs. All Rights Reserved.
*
* Licensed under the SOFTWARE EVALUATION License (the "License"); you may not use
* this file except in compliance with the License.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific TON DEV software governing permissions and
* limitations under the License.
*/
//! Function and event param types.
use std::fmt;
use Param;
use crate::{AbiError, contract::ABI_VERSION_2_0};
use crate::contract::{ABI_VERSION_1_0, ABI_VERSION_2_1, AbiVersion};
use ton_types::{BuilderData, Result, error};
/// Function and event param types.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ParamType {
/// uint<M>: unsigned integer type of M bits.
Uint(usize),
/// int<M>: signed integer type of M bits.
Int(usize),
/// varuint<M>: variable length unsigned integer type of maximum M bytes.
VarUint(usize),
/// varint<M>: variable length integer type of maximum M bytes.
VarInt(usize),
/// bool: boolean value.
Bool,
/// Tuple: several values combined into tuple.
Tuple(Vec<Param>),
/// T[]: dynamic array of elements of the type T.
Array(Box<ParamType>),
/// T[k]: dynamic array of elements of the type T.
FixedArray(Box<ParamType>, usize),
/// cell - tree of cells
Cell,
/// hashmap - values dictionary
Map(Box<ParamType>, Box<ParamType>),
/// TON message address
Address,
/// byte array
Bytes,
/// fixed size byte array
FixedBytes(usize),
/// UTF8 string
String,
/// Nanograms
Token,
/// Timestamp
Time,
/// Message expiration time
Expire,
/// Public key
PublicKey,
/// Optional parameter
Optional(Box<ParamType>),
/// Parameter stored in reference
Ref(Box<ParamType>),
}
impl fmt::Display for ParamType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.type_signature())
}
}
impl ParamType {
/// Returns type signature according to ABI specification
pub fn type_signature(&self) -> String {
match self {
ParamType::Uint(size) => format!("uint{}", size),
ParamType::Int(size) => format!("int{}", size),
ParamType::VarUint(size) => format!("varuint{}", size),
ParamType::VarInt(size) => format!("varint{}", size),
ParamType::Bool => "bool".to_owned(),
ParamType::Tuple(params) => {
let mut signature = "".to_owned();
for param in params {
signature += ",";
signature += ¶m.kind.type_signature();
}
signature.replace_range(..1, "(");
signature + ")"
},
ParamType::Array(ref param_type) => format!("{}[]", param_type.type_signature()),
ParamType::FixedArray(ref param_type, size) =>
format!("{}[{}]", param_type.type_signature(), size),
ParamType::Cell => "cell".to_owned(),
ParamType::Map(key_type, value_type) =>
format!("map({},{})", key_type.type_signature(), value_type.type_signature()),
ParamType::Address => format!("address"),
ParamType::Bytes => format!("bytes"),
ParamType::FixedBytes(size) => format!("fixedbytes{}", size),
ParamType::String => format!("string"),
ParamType::Token => format!("gram"),
ParamType::Time => format!("time"),
ParamType::Expire => format!("expire"),
ParamType::PublicKey => format!("pubkey"),
ParamType::Optional(ref param_type) => format!("optional({})", param_type.type_signature()),
ParamType::Ref(ref param_type) => format!("ref({})", param_type.type_signature()),
}
}
pub fn set_components(&mut self, components: Vec<Param>) -> Result<()> {
match self {
ParamType::Tuple(params) => {
if components.len() == 0 {
Err(error!(AbiError::EmptyComponents))
} else {
Ok(*params = components)
}
}
ParamType::Array(array_type) => {
array_type.set_components(components)
}
ParamType::FixedArray(array_type, _) => {
array_type.set_components(components)
}
ParamType::Map(_, value_type) => {
value_type.set_components(components)
}
ParamType::Optional(inner_type) => {
inner_type.set_components(components)
}
ParamType::Ref(inner_type) => {
inner_type.set_components(components)
}
_ => {
if components.len() != 0 {
Err(error!(AbiError::UnusedComponents))
} else {
Ok(())
}
},
}
}
/// Check if parameter type is supoorted in particular ABI version
pub fn is_supported(&self, abi_version: &AbiVersion) -> bool {
match self {
ParamType::Time | ParamType::Expire | ParamType::PublicKey => abi_version >= &ABI_VERSION_2_0,
ParamType::String | ParamType::Optional(_)| ParamType::VarInt(_) | ParamType::VarUint(_) => abi_version >= &ABI_VERSION_2_1,
ParamType::Ref(_) => false,
_ => abi_version >= &ABI_VERSION_1_0,
}
}
pub fn get_map_key_size(&self) -> Result<usize> {
match self {
ParamType::Int(size) | ParamType::Uint(size) => Ok(*size),
ParamType::Address => Ok(crate::token::STD_ADDRESS_BIT_LENGTH),
_ => Err(error!(AbiError::InvalidData {
msg: "Only integer and std address values can be map keys".to_owned()
}))
}
}
pub(crate) fn varint_size_len(size: usize) -> usize {
8 - ((size - 1) as u8).leading_zeros() as usize
}
pub(crate) fn is_large_optional(&self) -> bool {
self.max_bit_size() >= BuilderData::bits_capacity() ||
self.max_refs_count() >= BuilderData::references_capacity()
}
pub(crate) fn max_refs_count(&self) -> usize {
match self {
// in-cell serialized types
ParamType::Uint(_) | ParamType::Int(_) | ParamType::VarUint(_) |ParamType::VarInt(_)
| ParamType::Bool | ParamType::Address | ParamType::Token | ParamType::Time
| ParamType::Expire |ParamType::PublicKey => 0,
// reference serialized types
ParamType::Array(_) | ParamType::FixedArray(_, _) | ParamType::Cell | ParamType::String
| ParamType::Map(_, _) | ParamType::Bytes | ParamType::FixedBytes(_)
| ParamType::Ref(_) => 1,
// tuple refs is sum of inner types refs
ParamType::Tuple(params) => {
params
.iter()
.fold(0, |acc, param| acc + param.kind.max_refs_count())
},
// large optional is serialized into reference
ParamType::Optional(param_type) => {
if param_type.is_large_optional() {
1
} else {
param_type.max_refs_count()
}
},
}
}
pub(crate) fn max_bit_size(&self) -> usize {
match self {
ParamType::Uint(size) => *size,
ParamType::Int(size) => *size,
ParamType::VarUint(size) => Self::varint_size_len(*size) + (size - 1) * 8,
ParamType::VarInt(size) => Self::varint_size_len(*size) + (size - 1) * 8,
ParamType::Bool => 1,
ParamType::Array(_) => 33,
ParamType::FixedArray(_, _) => 1,
ParamType::Cell => 0,
ParamType::Map(_, _) => 1,
ParamType::Address => 591,
ParamType::Bytes | ParamType::FixedBytes(_) => 0,
ParamType::String => 0,
ParamType::Token => 124,
ParamType::Time => 64,
ParamType::Expire => 32,
ParamType::PublicKey => 257,
ParamType::Ref(_) => 0,
ParamType::Tuple(params) => {
params
.iter()
.fold(0, |acc, param| acc + param.kind.max_bit_size())
},
ParamType::Optional(param_type) => {
if param_type.is_large_optional() {
1
} else {
1 + param_type.max_bit_size()
}
}
}
}
}
|
#![allow(dead_code)]
/// # Render engine middlware
/// Currently only `HandlebarsEngine` support
///
/// ## Features
/// * Init Tempalte engine
/// * Adding Template paths
/// * useful additional helpers with strong params checking
/// * helpers logger for critical situations
///
/// ## Helpers
/// * `link` - css link helper
/// * `script` - js link helper
///
/// it's included paths ./views/**/*
use iron::prelude::*;
use iron::status;
use hbs::{HandlebarsEngine, DirectorySource};
use handlebars::{Handlebars, RenderError, RenderContext, Helper, Context};
use std::error::Error;
use std::collections::BTreeMap;
use rustc_serialize::json::{self, Json, ToJson};
use rustc_serialize::json::DecoderError::*;
use rustc_serialize::Decodable;
use hbs::{Template};
use handlebars::Renderable;
const DEBUG_RENDER: bool = true;
/// Alias for Basic Data struct
pub type BaseDataMap = BTreeMap<String, Json>;
/// Alias for basic Iron Response Result
pub type RenderResult = IronResult<Response>;
/// Templфte Render strшct
pub struct Render {
pub data : BaseDataMap
}
/// BaseDataMap Json decoder trait
pub trait BaseDataMapDecoder {
fn decode<J: Decodable>(&self) -> J;
}
/// Omplementation for Jscond decoding
/// from BaseDataMap that implemented Decodable trait
/// to specific generic type.
/// It useful for struct init via BaseDataMap data.
/// For Example - models.
impl BaseDataMapDecoder for BaseDataMap {
/// Json decoder for BaseDataMap
fn decode<J: Decodable>(&self) -> J {
let json_obj: Json = Json::Object(self.to_owned());
match json::decode(&json_obj.to_string()) {
Ok(decoded) => decoded,
Err(err) => {
let mut msg = "Json parse error";
if let ExpectedError(_, _) = err {
msg = "Validation field expected (wrong type for field)"
}
panic!("\
\n\n |> Validator::new error: {:?}\
\n |> Validation fields: {:?}\
\n |> Message: {}\
\n |> At source code: => ", err, json_obj, msg);
}
}
}
}
/// Basic render with StatusOK tempalte name and data
/// basic usage:
/// `Render::new("my/template/path", ())``
impl Render {
/// Render Template file with status 200
pub fn new<T: ToJson>(name: &str, data: T) -> RenderResult {
let mut resp = Response::new();
resp.set_mut(Template::new(name, data)).set_mut(status::Ok);
Ok(resp)
}
}
/// Init Template renderer and add Tempaltes path.
/// It invoke to after middleware
pub fn template_render(paths: Vec<&str>) -> HandlebarsEngine {
// First init Handlebars
let mut hregistry = Handlebars::new();
// Add helpers to Handlebars
hregistry.register_helper("link", Box::new(link_helper));
hregistry.register_helper("script", Box::new(script_helper));
hregistry.register_helper("active", Box::new(active_page_helper));
hregistry.register_helper("ifeq", Box::new(ifeq_helper));
hregistry.register_helper("ifgt", Box::new(ifgt_helper));
// Our instance HandlebarsEngine depended of Handlebars
let mut template = HandlebarsEngine::from(hregistry);
// Add a directory source, all files with .html suffix
// will be loaded as template
for path in paths.iter() {
template.add(Box::new(DirectorySource::new(path, ".html")));
}
// load templates from all registered sources
if let Err(r) = template.reload() {
// Paniced cause it critical situation
panic!("{:?}", r.description());
}
template
}
/// Css link Helper
/// usege: `{{#link ["some/url1", "some/url2"]}}{{/link}}`
fn link_helper(_: &Context, h: &Helper, _: &Handlebars, rc: &mut RenderContext) -> Result<(), RenderError> {
let css_links = try!(h.param(0)
.and_then(|v| v.value().as_array())
.ok_or(RenderError::new("|> link_helper - param 1 with array type is required")));
let mut css = "".to_owned();
for link in css_links.iter() {
let link = try!(link
.as_string()
.ok_or(RenderError::new("|> link_helper - array param with string type is required")));
css = format!("{}\t<link rel=\"stylesheet\" type=\"text/css\" href=\"{}\">\n", css, link);
}
try!(rc.writer.write(css.into_bytes().as_ref()));
Ok(())
}
/// Js link Helper
/// usege: `{{#script ["some/url1", "some/url2"]}}{{/script}}`
fn script_helper(_: &Context, h: &Helper, _: &Handlebars, rc: &mut RenderContext) -> Result<(), RenderError> {
let js_links = try!(h.param(0)
.and_then(|v| v.value().as_array())
.ok_or(RenderError::new("|> script_helper - param 1 with array type is required")));
let mut js = "".to_owned();
for link in js_links.iter() {
let link = try!(link
.as_string()
.ok_or(RenderError::new("|> script_helper - array param with string type is required")));
js = format!("{}\t<script type=\"text/javascript\" charset=\"utf-8\" src=\"{}\"></script>\n", js, link);
}
try!(rc.writer.write(js.into_bytes().as_ref()));
Ok(())
}
/// Active helper.
/// It checking is value same with exacted value
/// usege: `{{#active "pages" module }}{{/active}}`
/// It should pre-init value at Handler!
fn active_page_helper(_: &Context, h: &Helper, _: &Handlebars, rc: &mut RenderContext) -> Result<(), RenderError> {
// println!("ACTIVE=> {:?} {:?}\n", h.param(0), h.param(1));
let exact_page = try!(h.param(0)
.and_then(|v| v.value().as_string())
.ok_or(RenderError::new("|> active_page - param 1 with string type is required")));
let active_page = try!(h.param(1)
.and_then(|v| v.value().as_string())
.ok_or(RenderError::new("|> active_page - param 2 with string type is required")));
let mut active = "".to_owned();
if exact_page == active_page {
active = "active".to_owned();
}
try!(rc.writer.write(active.into_bytes().as_ref()));
Ok(())
}
fn ifeq_helper(ctx: &Context, h: &Helper, hbs: &Handlebars, rc: &mut RenderContext) -> Result<(), RenderError> {
let value = try!(h.param(0)
.and_then(|v| Some(v.value()) )
.ok_or(RenderError::new("|> ifeq_helper - param 1 with is required")));
let eq_field = try!(h.param(1)
.and_then(|v| Some(v.value()) )
.ok_or(RenderError::new("|> ifeq_helper - param 2 with is required")));
println!("==> {:?}\n\n", h.param(0));
let is_true = value == eq_field;
if is_true {
if let Some(tpl) = h.template() {
tpl.render(ctx, hbs, rc)?;
}
} else {
if let Some(tpl) = h.inverse() {
tpl.render(ctx, hbs, rc)?;
}
}
Ok(())
}
fn ifgt_helper(_ctx: &Context, _h: &Helper, _hbs: &Handlebars, rc: &mut RenderContext) -> Result<(), RenderError> {
let active = "".to_owned();;
/* let value = try!(h.param(0)
.and_then(|v| Some(v.value()) )
.ok_or(RenderError::new("|> ifgt_helper - param 1 withis required")));
if DEBUG_RENDER {
let eq_field = try!(h.param(1)
.and_then(|v| Some(v.value()))
.ok_or(RenderError::new("|> ifgt_helper - param 2 with is required")));
println!("IFGT==> {:?}\n\n", value);
active = "5 23".to_owned();
println!("IFGT==>>> {:?}\n\n", value);
let (_, _, _) = (eq_field, hbs, ctx);
}*/
try!(rc.writer.write(active.into_bytes().as_ref()));
Ok(())
}
|
pub fn reverse_endian_u32 (hash: u32) -> u32 {
( ((hash & 0x000000FF) << 24) | ((hash & 0xFF000000) >> 24) | ((hash & 0x00FF0000) >> 8) | ((hash & 0x0000FF00) << 8) )
} |
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// SyntheticsVariableParser : Details of the parser to use for the global variable.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SyntheticsVariableParser {
#[serde(rename = "type")]
pub _type: crate::models::SyntheticsGlobalVariableParserType,
/// Regex or JSON path used for the parser. Not used with type `raw`.
#[serde(rename = "value", skip_serializing_if = "Option::is_none")]
pub value: Option<String>,
}
impl SyntheticsVariableParser {
/// Details of the parser to use for the global variable.
pub fn new(_type: crate::models::SyntheticsGlobalVariableParserType) -> SyntheticsVariableParser {
SyntheticsVariableParser {
_type,
value: None,
}
}
}
|
// `match` is a valid method for handling `Option`. However, you may
// eventually find heavy usage tedious, especially with operations only
// valid with an input. In these cases, combinators can be used to
// manage control flow in a modular fashion.
//
// `Option` has a buil in method called `map()`, a vombinator for the simple
// mapping of `Some -> Some` and `None -> None`. Multiple `map()` calls can
// be chained together for even more flexibility.
//
// In the following example, `process()` replaces all functions previous
// to it while staying compact
#![allow(dead_code)]
#[derive(Debug)] enum Food { Apple, Carrot, Potato }
#[derive(Debug)] struct Peeled(Food);
#[derive(Debug)] struct Chopped(Food);
#[derive(Debug)] struct Cooked(Food);
fn peel(food: Option<Food>) -> Option<Peeled> {
match food {
Some(food) => Some(Peeled(food)),
None => None,
}
}
fn chop(peeled: Option<Peeled>) -> Option<Chopped> {
match peeled {
Some(Peeled(food)) => Some(Chopped(food)),
None => None,
}
}
fn cook(chopped: Option<Chopped>) -> Option<Cooked> {
chopped.map(|Chopped(food)| Cooked(food))
}
fn process(food: Option<Food>) -> Option<Cooked> {
food.map(|f| Peeled(f))
.map(|Peeled(f)| Chopped(f))
.map(|Chopped(f)| Cooked(f))
}
fn eat(food: Option<Cooked>) {
match food {
Some(food) => println!("Mmm. I love {:?}", food),
None => println!("Oh no! It wasn't edible"),
}
}
fn main() {
let apple = Some(Food::Apple);
let carrot = Some(Food::Carrot);
let potato = None;
let cooked_apple = cook(chop(peel(apple)));
let cooked_carrot = cook(chop(peel(carrot)));
let cooked_potato = process(potato);
eat(cooked_apple);
eat(cooked_carrot);
eat(cooked_potato);
}
|
// loop, while & for
fn main() {
// loop
loop_fn();
// while
while_fn();
// for
for_fn();
}
fn for_fn() {
// this is FAST
{
println!("Iteration through array: for");
let array = [1, 2, 3, 4, 5, 6, 7, 8];
for i in array.iter() {
println!("{}", i);
}
}
// Rocket launch counter: for
{
println!("Rocket launch counter");
for i in (1..4).rev() {
println!("{}", i);
}
println!("LIFTOFF!!!");
}
}
fn while_fn() {
{
println!("Rocket launch counter");
let mut counter = 3;
while counter != 0 {
println!("{}", counter);
counter -= 1;
}
println!("LIFTOFF!!!");
}
// this is SLOW
{
println!("Iteration through array: while");
let array = [1, 2, 3, 4, 5, 6, 7, 8];
let mut index = 0;
while index <= array.len() - 1 {
println!("{}", array[index]);
index += 1;
}
}
}
fn loop_fn() {
let mut counter = 0;
let result = loop {
counter += 1;
if counter == 10 {
break counter * 2;
}
};
println!("Result is : {}", result);
println!("Counter is at : {}", counter);
}
|
extern crate futures;
extern crate tokio;
extern crate tokio_stomp;
use std::time::Duration;
use tokio_stomp::*;
use tokio::executor::current_thread::{run, spawn};
use futures::future::ok;
use futures::prelude::*;
// Stream data from the UK national rail datafeed.
// See http://nrodwiki.rockshore.net/index.php/Darwin:Push_Port for more information
fn main() {
// Dummy usernames/passwords
let username = "d3user";
let password = "d3password";
let queue = std::env::var("STOMP_QUEUE").expect("Env var STOMP_QUEUE not found");
let uri = "datafeeds.nationalrail.co.uk:61613";
let (fut, tx) = tokio_stomp::connect(uri.into(), Some(username.into()), Some(password.into())).unwrap();
tx.unbounded_send(ClientMsg::Subscribe {
destination: queue.into(),
id: "1".into(),
ack: None,
}).unwrap();
std::thread::sleep(Duration::from_secs(1));
let fut = fut.for_each(move |item| {
for (k, v) in item.extra_headers {
println!("{}:{}", String::from_utf8_lossy(&k), String::from_utf8_lossy(&v))
}
if let ServerMsg::Message { body, .. } = item.content {
println!("{}\n", String::from_utf8_lossy(&body.unwrap()));
}
ok(())
}).map_err(|e| eprintln!("{}", e));
run(|_| spawn(fut));
}
|
extern crate env_logger;
extern crate fuse;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate jsonfs;
#[macro_use]
extern crate serde_json;
mod setup;
use std::fs::OpenOptions;
use std::io::prelude::*;
#[test]
fn bad_write_does_nothing() {
let _fs = setup::get_fs();
let path = setup::from_mntpt("target_for_bad_writes");
{
let mut file = OpenOptions::new()
.read(true)
.open(&path)
.expect("unable to open in read-write mode");
let mut actual: [u8; 256] = [0; 256];
let n = file.read(&mut actual).expect("unable to read from file");
let expected = b"{}\n";
assert!(n == expected.len());
assert_eq!(&actual[..n], expected);
}
{
let mut file = OpenOptions::new()
.append(true)
.open(&path)
.expect("unable to open in read-write mode");
assert!(file.write(b"thisisnotavalidjsonobject").is_err());
}
{
let mut file = OpenOptions::new()
.read(true)
.open(&path)
.expect("unable to open in read-write mode");
let mut actual: [u8; 256] = [0; 256];
let n = file.read(&mut actual).expect("unable to read from file");
let expected = b"{}\n";
assert!(n == expected.len());
assert_eq!(&actual[..n], expected);
}
}
|
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
mod nullbuf;
pub use nullbuf::NullTerminatedBuf;
#[macro_use]
#[allow(unused_macros)]
pub mod str_enum;
pub mod case;
pub mod convert;
pub mod json;
|
use core::task::{Context, Poll, Waker};
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::collections::VecDeque;
use std::net::SocketAddr;
use std::sync::atomic::AtomicBool;
use std::sync::atomic::Ordering;
use std::time::Duration;
use futures::executor::block_on;
use futures::future::Future;
use futures::future::{BoxFuture, FutureExt};
use futures::lock::Mutex;
use smoltcp::iface::{Interface, InterfaceBuilder, Routes};
use smoltcp::time::Instant;
pub use smoltcp::wire::IpAddress;
pub use smoltcp::{Error as SmoltcpError, Result as SmoltcpResult};
use super::socket::{
AsyncSocket, OnTcpSocketData, OnUdpSocketData, Socket, SocketConnectionError,
SocketReceiveError, SocketSendError, SocketType,
};
use super::virtual_tun::{
OnVirtualTunRead, OnVirtualTunWrite, VirtualTunInterface as VirtualTunDevice,
VirtualTunReadError, VirtualTunWriteError,
};
#[cfg(feature = "vpn")]
use super::vpn_client::{
PhyReceive, PhyReceiveError, PhySend, PhySendError, VpnClient, VpnConnectionError,
VpnDisconnectionError,
};
#[cfg(feature = "log")]
use log::{debug, error, info, warn};
use smoltcp::phy::Device;
use smoltcp::phy::TapInterface as TapDevice;
use smoltcp::phy::TunInterface as TunDevice;
use smoltcp::socket::{
SocketHandle, SocketSet, TcpSocket, TcpSocketBuffer, UdpSocket, UdpSocketBuffer,
};
pub use smoltcp::wire::{IpCidr, IpEndpoint, Ipv4Address, Ipv6Address};
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
use std::pin::Pin;
use std::sync::Arc;
#[cfg(feature = "async")]
use tokio::io::{AsyncRead, AsyncWrite};
use crate::DEFAULT_MTU;
pub enum PollWaitError {
NoPollWait,
}
pub type OnPollWait = Arc<dyn Fn(Duration) -> std::result::Result<(), PollWaitError> + Send + Sync>;
#[derive(Debug)]
pub enum SpinError {
NoCallback,
NoSocket,
Unknown(String),
}
#[doc(hidden)]
pub fn __feed__<T, R>(arg: T, f: impl FnOnce(T) -> R) -> R {
f(arg)
}
macro_rules! for_each_device {
(
$scrutinee:expr, $closure:expr $(,)?
) => {{
use $crate::async_smoltcp::SmolStackWithDevice;
use $crate::async_smoltcp::__feed__;
match $scrutinee {
SmolStackWithDevice::VirtualTun(inner) => __feed__(inner, $closure),
SmolStackWithDevice::Tun(inner) => __feed__(inner, $closure),
SmolStackWithDevice::Tap(inner) => __feed__(inner, $closure),
}
}};
}
struct OnSocketData {
pub on_tcp: Option<OnTcpSocketData>,
pub on_udp: Option<OnUdpSocketData>,
}
impl OnSocketData {
pub fn tcp(t: Option<OnTcpSocketData>) -> OnSocketData {
OnSocketData {
on_tcp: t,
on_udp: None,
}
}
pub fn udp(t: Option<OnUdpSocketData>) -> OnSocketData {
OnSocketData {
on_tcp: None,
on_udp: t,
}
}
pub fn none() -> OnSocketData {
OnSocketData {
on_tcp: None,
on_udp: None,
}
}
}
pub struct SmolStack<DeviceT>
where
DeviceT: for<'d> Device<'d>,
{
pub sockets: SocketSet<'static, 'static, 'static>,
pub interface: Interface<'static, 'static, 'static, DeviceT>,
socket_handles: HashMap<SocketHandle, (SocketType, OnSocketData)>,
should_stack_thread_stop: Arc<AtomicBool>,
read_wake_deque: Option<Arc<Mutex<VecDeque<Waker>>>>,
write_wake_deque: Option<Arc<Mutex<VecDeque<Waker>>>>,
}
pub enum SmolStackWithDevice {
VirtualTun(SmolStack<VirtualTunDevice>),
Tun(SmolStack<TunDevice>),
Tap(SmolStack<TapDevice>),
}
impl SmolStackWithDevice {
pub fn run(stack: Arc<Mutex<Self>>) {
std::thread::Builder::new()
.name("stack_thread".to_string())
.spawn(move || {
let stack = stack.clone();
let write_wake_deque_ = block_on(stack.clone().lock()).get_write_wake_deque();
loop {
block_on(stack.clone().lock()).poll().unwrap();
block_on(stack.clone().lock()).spin_all();
//TODO: use wake deque future mutex or normal mutex?
let waker = block_on(write_wake_deque_.clone().lock()).pop_front();
match waker {
Some(waker) => {
//info!("waking from write wake deque");
waker.wake()
}
None => {}
}
std::thread::sleep(std::time::Duration::from_millis(300));
if block_on(stack.clone().lock()).should_stop() {
info!("end of stack_thread");
break;
}
}
})
.unwrap();
}
}
impl SmolStackWithDevice {
#[cfg(feature = "vpn")]
pub fn new_from_vpn(
interface_name: &str,
//address: Option<IpCidr>,
default_v4_gateway: Option<Ipv4Address>,
default_v6_gateway: Option<Ipv6Address>,
mtu: Option<usize>,
ip_addrs: Option<Vec<IpCidr>>,
vpn_client: Arc<Mutex<dyn VpnClient + Send>>,
) -> SmolStackWithDevice {
let mtu = mtu.unwrap_or(DEFAULT_MTU);
let vpn_client_ = vpn_client.clone();
let on_virtual_tun_read = Arc::new(
move |buffer: &mut [u8]| -> std::result::Result<usize, VirtualTunReadError> {
let mut vpn_client_ = block_on(vpn_client_.lock());
let mut size = 0;
match vpn_client_.phy_receive(None, &mut |openvpn_buffer: &[u8]| {
for i in 0..openvpn_buffer.len() {
buffer[i] = openvpn_buffer[i]
}
size = openvpn_buffer.len();
}) {
Ok(()) => Ok(size),
Err(PhyReceiveError::NoDataAvailable) => Err(VirtualTunReadError::WouldBlock),
//TODO: do not panic below and treat the error?
Err(PhyReceiveError::Unknown(error_string)) => {
panic!("openvpn_client.receive() unknown error: {}", error_string);
}
}
},
);
let vpn_client_ = vpn_client.clone();
let on_virtual_tun_write = Arc::new(
move |f: &mut dyn FnMut(&mut [u8]),
size: usize|
-> std::result::Result<(), VirtualTunWriteError> {
let mut buffer = vec![0; size];
f(buffer.as_mut_slice());
match block_on(vpn_client_.lock()).phy_send(buffer.as_slice()) {
Ok(()) => Ok(()),
Err(PhySendError::Unknown(error_string)) => {
//TODO: not panic here, just treat the error
panic!(error_string);
}
}
},
);
let on_poll_wait = Arc::new(
|duration: std::time::Duration| -> std::result::Result<(), PollWaitError> { Ok(()) },
);
let read_wake_deque = Arc::new(Mutex::new(VecDeque::<Waker>::new()));
let write_wake_deque = Arc::new(Mutex::new(VecDeque::<Waker>::new()));
//let data_from_socket_ = data_from_socket.clone();
let read_wake_deque_ = read_wake_deque.clone();
let read_wake_deque_ = read_wake_deque.clone();
let on_dns_udp_data = Arc::new(
move |buffer: &[u8], address: Option<IpEndpoint>| -> std::result::Result<(), ()> {
info!("on_dns_udp_data for buffer with len {}", buffer.len());
//data_from_socket_.lock().unwrap().push_back((buffer.iter().cloned().collect(), address));
let waker = block_on(read_wake_deque_.lock()).pop_front();
match waker {
Some(waker) => {
info!("waking from read wake deque");
waker.wake();
}
None => {}
}
Ok(())
},
);
let device = VirtualTunDevice::new(
interface_name,
on_virtual_tun_read,
on_virtual_tun_write,
mtu,
)
.unwrap();
let default_ip_address = IpCidr::new(IpAddress::v4(192, 168, 69, 2), 24);
let mut routes = Routes::new(BTreeMap::new());
let default_v4_gateway = default_v4_gateway.unwrap_or(Ipv4Address::new(192, 168, 69, 100));
routes
.add_default_ipv4_route(default_v4_gateway)
.unwrap();
if default_v6_gateway.is_some() {
//TODO: find a good ipv6 to use
let default_v6_gateway =
default_v6_gateway.unwrap_or(Ipv6Address::new(1, 1, 1, 1, 1, 1, 1, 1));
routes
.add_default_ipv6_route(default_v6_gateway)
.unwrap();
}
let ip_addrs = ip_addrs.unwrap_or(vec![default_ip_address]);
let interface = InterfaceBuilder::new(device)
.ip_addrs(ip_addrs)
.routes(routes)
.finalize();
let socket_set = SocketSet::new(vec![]);
SmolStackWithDevice::VirtualTun(SmolStack {
sockets: socket_set,
interface: interface,
socket_handles: HashMap::new(),
should_stack_thread_stop: Arc::new(AtomicBool::new(false)),
read_wake_deque: Some(read_wake_deque.clone()),
write_wake_deque: Some(write_wake_deque.clone()),
})
}
#[cfg(feature = "tap")]
pub fn new_tap(
interface_name: &str,
address: Option<IpCidr>,
default_v4_gateway: Option<Ipv4Address>,
default_v6_gateway: Option<Ipv6Address>,
mtu: Option<usize>,
ip_addrs: Option<Vec<IpCidr>>,
) -> SmolStackWithDevice {
let mtu = mtu.unwrap_or(DEFAULT_MTU);
let device = TapDevice::new(interface_name).unwrap();
let default_ip_address = address.unwrap_or(IpCidr::new(IpAddress::v4(192, 168, 69, 2), 24));
let mut routes = Routes::new(BTreeMap::new());
let ip_addrs = ip_addrs.unwrap_or(vec![default_ip_address]);
let default_v4_gateway = default_v4_gateway.unwrap_or(Ipv4Address::new(192, 168, 69, 100));
routes
.add_default_ipv4_route(default_v4_gateway)
.unwrap();
if default_v6_gateway.is_some() {
//TODO: find a good ipv6 to use
let default_v6_gateway =
default_v6_gateway.unwrap_or(Ipv6Address::new(1, 1, 1, 1, 1, 1, 1, 1));
routes
.add_default_ipv6_route(default_v6_gateway)
.unwrap();
}
let interface = InterfaceBuilder::new(device)
.ip_addrs(ip_addrs)
.routes(routes)
.finalize();
let socket_set = SocketSet::new(vec![]);
SmolStackWithDevice::Tap(SmolStack {
sockets: socket_set,
interface: interface,
socket_handles: HashMap::new(),
should_stack_thread_stop: Arc::new(AtomicBool::new(false)),
read_wake_deque: None,
write_wake_deque: None,
})
}
#[cfg(feature = "tun")]
pub fn new_tun(
interface_name: &str,
address: Option<IpCidr>,
default_v4_gateway: Option<Ipv4Address>,
default_v6_gateway: Option<Ipv6Address>,
mtu: Option<usize>,
ip_addrs: Option<Vec<IpCidr>>,
) -> SmolStackWithDevice {
let mtu = mtu.unwrap_or(DEFAULT_MTU);
let device = TunDevice::new(interface_name).unwrap();
let default_ip_address = address.unwrap_or(IpCidr::new(IpAddress::v4(192, 168, 69, 2), 24));
let mut routes = Routes::new(BTreeMap::new());
let ip_addrs = ip_addrs.unwrap_or(vec![default_ip_address]);
let default_v4_gateway = default_v4_gateway.unwrap_or(Ipv4Address::new(192, 168, 69, 100));
routes
.add_default_ipv4_route(default_v4_gateway)
.unwrap();
if default_v6_gateway.is_some() {
//TODO: find a good ipv6 to use
let default_v6_gateway =
default_v6_gateway.unwrap_or(Ipv6Address::new(1, 1, 1, 1, 1, 1, 1, 1));
routes
.add_default_ipv6_route(default_v6_gateway)
.unwrap();
}
let interface = InterfaceBuilder::new(device)
.ip_addrs(ip_addrs)
.routes(routes)
.finalize();
let socket_set = SocketSet::new(vec![]);
SmolStackWithDevice::Tun(SmolStack {
sockets: socket_set,
interface: interface,
socket_handles: HashMap::new(),
should_stack_thread_stop: Arc::new(AtomicBool::new(false)),
read_wake_deque: None,
write_wake_deque: None,
})
}
fn get_write_wake_deque(&self) -> Arc<Mutex<VecDeque<Waker>>> {
for_each_device!(self, |stack| stack
.write_wake_deque
.as_ref()
.unwrap()
.clone())
}
fn should_stop(&self) -> bool {
for_each_device!(self, |stack| {
stack.should_stack_thread_stop.load(Ordering::Relaxed)
})
}
pub fn add_tcp_socket(
&mut self,
//stack: Arc<Mutex<Self>>,
on_tcp_socket_data: Option<OnTcpSocketData>,
) -> Result<SocketHandle, ()> {
for_each_device!(self, |stack_| {
let rx_buffer = TcpSocketBuffer::new(vec![0; 65000]);
let tx_buffer = TcpSocketBuffer::new(vec![0; 65000]);
let socket = TcpSocket::new(rx_buffer, tx_buffer);
let handle = stack_.sockets.add(socket);
stack_.socket_handles.insert(
handle,
(SocketType::TCP, OnSocketData::tcp(on_tcp_socket_data)),
);
Ok(handle)
})
}
pub fn add_udp_socket(
&mut self,
//stack: Arc<Mutex<Self>>,
on_udp_socket_data: Option<OnUdpSocketData>,
) -> Result<SocketHandle, ()> {
for_each_device!(self, |stack_| {
let rx_buffer = UdpSocketBuffer::new(Vec::new(), vec![0; 1024]);
let tx_buffer = UdpSocketBuffer::new(Vec::new(), vec![0; 1024]);
let socket = UdpSocket::new(rx_buffer, tx_buffer);
let handle = stack_.sockets.add(socket);
stack_.socket_handles.insert(
handle,
(SocketType::TCP, OnSocketData::udp(on_udp_socket_data)),
);
Ok(handle)
})
}
pub fn tcp_socket_send(
&mut self,
socket_handle: SocketHandle,
data: &[u8],
) -> Result<usize, SocketSendError> {
for_each_device!(self, |stack| {
let mut socket = stack.sockets.get::<TcpSocket>(socket_handle);
socket.send_slice(data).map_err(|e| e.into())
})
}
pub fn udp_socket_send(
&mut self,
socket_handle: SocketHandle,
data: &[u8],
addr: SocketAddr,
) -> Result<usize, SocketSendError> {
for_each_device!(self, |stack| {
let mut socket = stack.sockets.get::<UdpSocket>(socket_handle);
socket
.send_slice(data, addr.into())
.map(|_| data.len())
.map_err(|e| e.into())
})
}
fn tcp_connect(
&mut self,
socket_handle: SocketHandle,
addr: SocketAddr,
src_port: u16,
) -> Result<(), SocketConnectionError> {
for_each_device!(self, |stack| {
let mut socket = stack.sockets.get::<TcpSocket>(socket_handle);
socket.connect(addr, src_port).map_err(|e| e.into())
})
}
pub fn poll(&mut self) -> SmoltcpResult<bool> {
for_each_device!(self, |stack| {
let timestamp = Instant::now();
match stack.interface.poll(&mut stack.sockets, timestamp) {
Ok(b) => Ok(b),
Err(e) => {
panic!("{}", e);
}
}
})
}
pub fn spin_tcp<'b, DeviceT: for<'d> Device<'d>>(
stack: &mut SmolStack<DeviceT>,
socket_handle: &SocketHandle,
on_tcp_socket_data: OnTcpSocketData,
) -> std::result::Result<(), SpinError> {
let mut socket = stack.sockets.get::<TcpSocket>(socket_handle.clone());
if socket.can_recv() {
socket
.recv(|data| {
match on_tcp_socket_data(data) {
Ok(_) => {}
Err(_) => {}
}
(data.len(), ())
})
.unwrap();
}
Ok(())
}
pub fn spin_udp<DeviceT: for<'d> Device<'d>>(
stack: &mut SmolStack<DeviceT>,
socket_handle: &SocketHandle,
on_udp_socket_data: OnUdpSocketData,
) -> std::result::Result<(), SpinError> {
let mut socket = stack.sockets.get::<UdpSocket>(socket_handle.clone());
if socket.can_recv() {
let (buffer, endpoint) = socket.recv().unwrap();
let addr = endpoint.addr;
let port = endpoint.port;
let addr: IpAddr = match addr {
IpAddress::Ipv4(ipv4) => IpAddr::V4(ipv4.into()),
IpAddress::Ipv6(ipv6) => IpAddr::V6(ipv6.into()),
_ => return Err(SpinError::Unknown("spin address conversion error".into())),
};
match on_udp_socket_data(buffer, addr, port) {
Ok(_) => {}
Err(_) => return Err(SpinError::NoCallback),
}
}
Ok(())
}
pub fn spin_all(&mut self) -> std::result::Result<(), SpinError> {
for_each_device!(self, |stack| {
let mut smol_socket_handles = Vec::<SocketHandle>::new();
for (smol_socket_handle, _) in stack.socket_handles.iter() {
smol_socket_handles.push(smol_socket_handle.clone());
}
for smol_socket_handle in smol_socket_handles.iter_mut() {
let (socket_type, on_socket_data) = stack
.socket_handles
.get(&smol_socket_handle)
.ok_or(SpinError::NoSocket)
.unwrap();
match socket_type {
SocketType::TCP => {
let on_tcp_socket_data = on_socket_data
.on_tcp
.as_ref()
.ok_or(SpinError::NoCallback)
.unwrap();
SmolStackWithDevice::spin_tcp(
stack,
smol_socket_handle,
on_tcp_socket_data.clone(),
);
}
SocketType::UDP => {
let on_udp_socket_data = on_socket_data
.on_udp
.as_ref()
.ok_or(SpinError::NoCallback)
.unwrap();
SmolStackWithDevice::spin_udp(
stack,
smol_socket_handle,
on_udp_socket_data.clone(),
);
}
_ => unimplemented!("socket type not implemented yet"),
}
}
});
Ok(())
}
}
pub struct SmolSocket {
socket_handle: SocketHandle,
stack: Arc<Mutex<SmolStackWithDevice>>,
queue: Arc<Mutex<(VecDeque<Vec<u8>>, VecDeque<Waker>)>>,
}
impl SmolSocket {
pub fn new(
stack: Arc<Mutex<SmolStackWithDevice>>,
stack_ref: &mut SmolStackWithDevice,
) -> Result<SmolSocket, ()> {
//TODO: DOES THIS MUTEX BLOCK A FUTURE????
let queue = Arc::new(Mutex::new((
VecDeque::<Vec<u8>>::new(),
VecDeque::<Waker>::new(),
)));
let queue_ = queue.clone();
let on_data = Arc::new(move |data: &[u8]| -> Result<usize, SocketReceiveError> {
//TODO: can/should I block here?
let mut queue = block_on(queue_.lock());
queue.0.push_back(data.to_vec());
if let Some(waker) = queue.1.pop_front() {
waker.wake();
}
Ok(data.len())
});
let socket_handle = stack_ref.add_tcp_socket(Some(on_data));
Ok(SmolSocket {
socket_handle: socket_handle.map_err(|_| ())?,
stack: stack.clone(),
queue: queue.clone(),
})
}
}
#[cfg(feature = "async")]
impl AsyncRead for SmolSocket {
fn poll_read(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut tokio::io::ReadBuf<'_>,
) -> Poll<std::io::Result<()>> {
//self.stack.so
let mut queue = match self.queue.lock().boxed().as_mut().poll(cx) {
Poll::Pending => return Poll::Pending,
Poll::Ready(queue) => queue,
};
if let Some(packet) = queue.0.pop_front() {
//TODO: can we always put eberything here without error?
buf.put_slice(packet.as_slice());
Poll::Ready(Ok(()))
} else {
queue.1.push_back(cx.waker().clone());
Poll::Pending
}
}
}
#[cfg(feature = "async")]
impl AsyncWrite for SmolSocket {
fn poll_write(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &[u8],
) -> Poll<Result<usize, std::io::Error>> {
let mut stack = match self.stack.lock().boxed().as_mut().poll(cx) {
Poll::Pending => return Poll::Pending,
Poll::Ready(stack) => stack,
};
//TODO: map error
stack.tcp_socket_send(self.socket_handle.clone(), buf);
Poll::Ready(Ok(buf.len()))
}
fn poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), std::io::Error>> {
Poll::Ready(Ok(()))
}
fn poll_shutdown(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Result<(), std::io::Error>> {
Poll::Ready(Ok(()))
}
}
pub type SafeSmolStackWithDevice = SmolStackWithDevice;
//TODO: instead, implement Send for the C types?
unsafe impl Send for SafeSmolStackWithDevice {}
#[cfg(feature = "async")]
impl AsyncSocket for SmolSocket {
fn tcp_socket_send<'d>(
&'d mut self,
data: &'d [u8],
) -> Pin<Box<dyn Future<Output = Result<usize, SocketSendError>> + Send + 'd>> {
async move {
self.stack
.lock()
.await
.tcp_socket_send(self.socket_handle, data)
.map_err(|e| e.into())
}
.boxed()
}
fn tcp_connect<'d>(
&'d mut self,
addr: SocketAddr,
src_port: u16,
) -> Pin<Box<dyn Future<Output = Result<(), SocketConnectionError>> + Send + 'd>> {
async move {
self.stack
.lock()
.await
.tcp_connect(self.socket_handle, addr, src_port)
.map_err(|e| e.into())
}
.boxed()
}
fn tcp_receive<'d>(
&'d mut self,
f: &'d dyn Fn(&[u8]),
) -> Pin<Box<dyn Future<Output = Result<usize, SocketReceiveError>> + Send + 'd>> {
unimplemented!("would block arc and then leave it blocked for too long");
}
fn udp_socket_send<'d>(
&'d mut self,
data: &'d [u8],
addr: SocketAddr,
) -> Pin<Box<dyn Future<Output = Result<usize, SocketSendError>> + Send + 'd>> {
async move {
self.stack
.lock()
.await
.udp_socket_send(self.socket_handle, data, addr)
.map_err(|e| e.into())
}
.boxed()
}
fn udp_receive<'d>(
&'d mut self,
f: &'d dyn Fn(&[u8], SocketAddr, u16),
) -> Pin<Box<dyn Future<Output = Result<usize, SocketReceiveError>> + Send + 'd>> {
unimplemented!("would block arc and then leave it blocked for too long");
}
}
impl From<SmoltcpError> for SocketSendError {
fn from(e: SmoltcpError) -> SocketSendError {
match e {
SmoltcpError::Exhausted => SocketSendError::Exhausted,
_ => SocketSendError::Unknown(format!("{}", e)),
}
}
}
impl From<SmoltcpError> for SocketConnectionError {
fn from(e: SmoltcpError) -> SocketConnectionError {
match e {
SmoltcpError::Exhausted => SocketConnectionError::Exhausted,
_ => SocketConnectionError::Unknown(format!("{}", e)),
}
}
}
|
use crate::{Context, Error};
use poise::serenity_prelude as serenity;
fn create_embed<'a>(
f: &'a mut serenity::CreateEmbed,
author: &serenity::User,
name: &str,
description: &str,
links: &str,
) -> &'a mut serenity::CreateEmbed {
f.title(&name)
.description(&description)
.field("Links", &links, false)
.author(|f| {
if let Some(avatar_url) = author.avatar_url() {
f.icon_url(avatar_url);
}
f.name(&author.name)
})
.color(crate::EMBED_COLOR)
}
/// Asks details about your project and then posts it in the #showcase channel
///
/// Starts a prompt where you can enter information about a project you're working on. The bot \
/// will then post your project into the #showcase channel and open a thread to allow for \
/// discussion and feedback.
///
/// If you want to change the text later, edit your message and the bot will propagate the change.
/// You can also delete your message to delete the #showcase entry.
#[poise::command(prefix_command, slash_command)]
pub async fn showcase(ctx: Context<'_>) -> Result<(), Error> {
let ask_the_user = |query| async move {
poise::say_reply(ctx, format!("Please enter {}:", query)).await?;
let user_input = ctx
.author()
.await_reply(ctx.discord())
.channel_id(ctx.channel_id())
.timeout(std::time::Duration::from_secs(10 * 60))
.await;
let user_input = user_input.ok_or_else(|| {
Error::from(format!(
"You didn't enter {}. Please run the command again to restart",
query
))
})?;
match user_input.content.to_ascii_lowercase().trim() {
"abort" | "stop" | "cancel" | "break" | "terminate" | "exit" | "quit" => {
return Err(Error::from("Canceled the operation"))
}
_ => {}
}
Ok(user_input)
};
poise::say_reply(
ctx,
format!(
"Answer the following prompts to generate a <#{0}> entry. If you change your mind \
later, you can edit or delete your messages, and the <#{0}> entry will be edited \
or deleted accordingly.",
ctx.data().showcase_channel.0
),
)
.await?;
let name = ask_the_user("the name of your project").await?;
let description = ask_the_user("a description of what the project is about").await?;
let links =
ask_the_user("URLs related to your project, like a crates.io or repository link").await?;
let showcase_msg = ctx
.data()
.showcase_channel
.send_message(ctx.discord(), |f| {
f.allowed_mentions(|f| f).embed(|f| {
create_embed(
f,
ctx.author(),
&name.content,
&description.content,
&links.content,
)
})
})
.await?;
// TODO: Use ChannelId::create_public_thread once that's available
if let Err(e) = ctx
.discord()
.http
.create_public_thread(
showcase_msg.channel_id.0,
showcase_msg.id.0,
&std::iter::FromIterator::from_iter(std::iter::once((
String::from("name"),
serde_json::Value::String(name.content.clone()),
))),
)
.await
{
println!(
"Couldn't create associated thread for showcase entry: {}",
e
)
}
{
let output_message = showcase_msg.id.0 as i64;
let output_channel = showcase_msg.channel_id.0 as i64;
let input_channel = ctx.channel_id().0 as i64;
let name_input_message = name.id.0 as i64;
let description_input_message = description.id.0 as i64;
let links_input_message = links.id.0 as i64;
sqlx::query!(
"INSERT INTO showcase (
output_message,
output_channel,
input_channel,
name_input_message,
description_input_message,
links_input_message
) VALUES (?, ?, ?, ?, ?, ?)",
output_message,
output_channel,
input_channel,
name_input_message,
description_input_message,
links_input_message,
)
.execute(&ctx.data().database)
.await?;
}
poise::say_reply(
ctx,
format!(
"Your project was successfully posted in <#{}>",
ctx.data().showcase_channel.0
),
)
.await?;
Ok(())
}
pub async fn try_update_showcase_message(
ctx: &serenity::Context,
data: &crate::Data,
updated_message_id: serenity::MessageId,
) -> Result<(), Error> {
let man = updated_message_id.0 as i64;
if let Some(entry) = sqlx::query!(
"SELECT
output_message,
output_channel,
input_channel,
name_input_message,
description_input_message,
links_input_message
FROM showcase WHERE ? IN (name_input_message, description_input_message, links_input_message)",
man
)
.fetch_optional(&data.database)
.await?
{
let input_channel = serenity::ChannelId(entry.input_channel as u64);
let name_msg = input_channel
.message(ctx, entry.name_input_message as u64)
.await?;
let name = &name_msg.content;
let description = input_channel
.message(ctx, entry.description_input_message as u64)
.await?
.content;
let links = input_channel
.message(ctx, entry.links_input_message as u64)
.await?
.content;
serenity::ChannelId(entry.output_channel as u64).edit_message(
ctx,
entry.output_message as u64,
|f| f.embed(|f| create_embed(f, &name_msg.author, name, &description, &links)),
).await?;
}
Ok(())
}
pub async fn try_delete_showcase_message(
ctx: &serenity::Context,
data: &crate::Data,
deleted_message_id: serenity::MessageId,
) -> Result<(), Error> {
let deleted_message_id = deleted_message_id.0 as i64;
if let Some(entry) = sqlx::query!(
"SELECT
output_message,
output_channel
FROM showcase WHERE ? IN (name_input_message, description_input_message, links_input_message)",
deleted_message_id
)
.fetch_optional(&data.database)
.await?
{
serenity::ChannelId(entry.output_channel as u64).delete_message(ctx, entry.output_message as u64).await?;
}
Ok(())
}
|
use surf::http::Method;
use crate::endpoints::load_balancing::LoadBalancer;
use crate::framework::endpoint::Endpoint;
use crate::framework::ApiResultTraits;
/// List Load Balancers
/// https://api.cloudflare.com/#load-balancers-list-load-balancers
#[derive(Debug)]
pub struct ListLoadBalancers<'a> {
/// The Zone to list Load Balancers from.
pub zone_identifier: &'a str,
}
impl<'a> Endpoint<Vec<LoadBalancer>, ()> for ListLoadBalancers<'a> {
fn method(&self) -> Method {
Method::Get
}
fn path(&self) -> String {
format!("zones/{}/load_balancers", self.zone_identifier)
}
}
impl ApiResultTraits for Vec<LoadBalancer> {}
|
// Copyright 2015-2016 Joe Neeman.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::fmt::Debug;
//use dfa::{Dfa, PrefixPart, RetTrait};
use dfa::PrefixPart;
use itertools::Itertools;
use memchr::memchr;
use runner::Engine;
use runner::program::TableInsts;
#[derive(Clone, Debug)]
pub struct ForwardBackwardEngine<Ret> {
forward: TableInsts<(usize, u8)>,
backward: TableInsts<Ret>,
prefix: Prefix,
}
impl<Ret: Copy + Debug> ForwardBackwardEngine<Ret> {
pub fn new(forward: TableInsts<(usize, u8)>, prefix: Prefix, backward: TableInsts<Ret>) -> Self {
ForwardBackwardEngine {
forward: forward,
backward: backward,
prefix: prefix,
}
}
fn find_with_searcher<SearchFn>(&self, input: &[u8], search: SearchFn)
-> Option<(usize, usize, Ret)>
where SearchFn: Fn(&[u8], usize) -> Option<usize> {
let mut pos = 0;
while let Some(start) = search(input, pos) {
match self.forward.find_from(input, start, 0) {
Ok((end, (rev_state, look_ahead))) => {
let rev_pos = end.saturating_sub(look_ahead as usize);
let (start_pos, ret) = self.backward
.longest_backward_find_from(input, rev_pos, rev_state)
.expect("BUG: matched forward but failed to match backward");
return Some((start_pos, rev_pos, ret));
},
Err(end) => {
pos = end + 1;
},
}
}
None
}
}
impl<Ret: Copy + Debug + 'static> Engine<Ret> for ForwardBackwardEngine<Ret> {
fn find(&self, s: &str) -> Option<(usize, usize, Ret)> {
let input = s.as_bytes();
if self.forward.is_empty() {
return None;
}
match self.prefix {
Prefix::Empty => self.find_with_searcher(
input,
|s, pos| if pos <= s.len() { Some(pos) } else { None }
),
Prefix::ByteSet { ref bytes, offset } => self.find_with_searcher(
input,
|s, pos| if pos + offset <= s.len() {
s[(pos + offset)..].iter().position(|c| bytes[*c as usize]).map(|x| x + pos)
} else {
None
}
),
Prefix::Byte { byte, offset } => self.find_with_searcher(
input,
|s, pos| if pos + offset <= s.len() {
memchr(byte, &input[(pos + offset)..]).map(|x| x + pos)
} else {
None
}
),
//Prefix::ByteBackwards { .. } => unimplemented!(),
}
}
fn clone_box(&self) -> Box<Engine<Ret>> {
Box::new(self.clone())
}
}
/// A `Prefix` is the first part of a DFA. Anything matching the DFA should start with
/// something matching the `Prefix`.
///
/// The purpose of a `Prefix` is that scanning through the input looking for the `Prefix` should be
/// much faster than running the DFA naively.
#[derive(Clone, Debug)]
pub enum Prefix {
// Matches every position.
Empty,
// Matches a single byte in a particular set and then rewinds some number of bytes.
ByteSet { bytes: Vec<bool>, offset: usize },
// Matches a specific byte and then rewinds some number of bytes.
Byte { byte: u8, offset: usize },
// Matches a specific byte and then runs a DFA backwards.
//ByteBackwards { byte: u8, rev: Dfa<()> },
}
// How big we allow the byte sets to be. In order for byte sets to be a performance win, finding a
// byte in the set needs to be sufficiently rare; therefore, we only use small sets. There might be
// room for a better heuristic, though: we could use large sets that only have rare bytes.
const MAX_BYTE_SET_SIZE: usize = 16;
impl Prefix {
fn byte_prefix(parts: &[PrefixPart]) -> Option<Prefix> {
fn common_prefix<'a>(s1: &'a [u8], s2: &'a [u8]) -> &'a [u8] {
let prefix_len = s1.iter().zip(s2.iter())
.take_while(|pair| pair.0 == pair.1)
.count();
&s1[0..prefix_len]
}
let mut parts = parts.iter();
if let Some(first) = parts.next() {
let lit = parts.fold(&first.0[..], |acc, p| common_prefix(acc, &p.0));
if !lit.is_empty() {
// See if the common prefix contains a full codepoint. If it does, search for the last
// byte of that codepoint.
let cp_last_byte = ((!lit[0]).leading_zeros() as usize).saturating_sub(1);
if cp_last_byte < lit.len() {
return Some(Prefix::Byte { byte: lit[cp_last_byte], offset: cp_last_byte });
}
}
}
None
}
fn byte_set_prefix(parts: &[PrefixPart]) -> Option<Prefix> {
let crit_byte_pos = |p: &PrefixPart| ((!p.0[0]).leading_zeros() as usize).saturating_sub(1);
let crit_byte_posns: Vec<usize> = parts.iter().map(crit_byte_pos).dedup().collect();
if crit_byte_posns.len() == 1 {
let crit_byte = crit_byte_posns[0];
if parts.iter().all(|x| x.0.len() > crit_byte) {
let mut crit_bytes: Vec<u8> = parts.iter().map(|x| x.0[crit_byte]).collect();
crit_bytes.sort();
crit_bytes.dedup();
if crit_bytes.len() <= MAX_BYTE_SET_SIZE {
let mut ret = vec![false; 256];
for &b in &crit_bytes {
ret[b as usize] = true;
}
return Some(Prefix::ByteSet { bytes: ret, offset: crit_byte });
}
}
}
None
}
/*
pub fn from_dfa<Ret: RetTrait>(dfa: &Dfa<Ret>) -> Prefix {
let parts = dfa.prefix_strings();
let first_try = Prefix::from_parts(parts);
/*
match first_try {
Prefix::Byte {..} => first_try,
_ => {
let crit_strings = dfa.critical_strings();
unimplemented!();
first_try
},
}
*/
unimplemented!();
}
*/
/// Converts a set of `PrefixParts` into a `Prefix` that matches any of the strings.
pub fn from_parts(mut parts: Vec<PrefixPart>) -> Prefix {
parts.retain(|x| !x.0.is_empty());
if let Some(pref) = Prefix::byte_prefix(&parts) {
pref
} else if let Some(pref) = Prefix::byte_set_prefix(&parts) {
pref
} else {
Prefix::Empty
}
}
}
#[cfg(test)]
mod tests {
use dfa::PrefixPart;
use super::*;
fn pref(strs: Vec<&str>) -> Prefix {
Prefix::from_parts(
strs.into_iter()
.enumerate()
.map(|(i, s)| PrefixPart(s.as_bytes().to_vec(), i))
.collect())
}
#[test]
fn test_prefix_choice() {
use super::Prefix::*;
assert!(matches!(pref(vec![]), Empty));
assert!(matches!(pref(vec![""]), Empty));
assert!(matches!(pref(vec!["a"]), Byte {..}));
assert!(matches!(pref(vec!["", "a", ""]), Byte {..}));
assert!(matches!(pref(vec!["abc"]), Byte {..}));
assert!(matches!(pref(vec!["abc", ""]), Byte {..}));
assert!(matches!(pref(vec!["a", "b", "c"]), ByteSet {..}));
assert!(matches!(pref(vec!["a", "b", "", "c"]), ByteSet {..}));
assert!(matches!(pref(vec!["a", "baa", "", "c"]), ByteSet {..}));
assert!(matches!(pref(vec!["ab", "baa", "", "cb"]), ByteSet {..}));
assert!(matches!(pref(vec!["ab", "aaa", "", "acb"]), Byte {..}));
assert!(matches!(pref(vec!["ab", "abc", "abd"]), Byte {..}));
}
}
|
#[doc = "Reader of register SCAN_PARAM"]
pub type R = crate::R<u32, super::SCAN_PARAM>;
#[doc = "Writer for register SCAN_PARAM"]
pub type W = crate::W<u32, super::SCAN_PARAM>;
#[doc = "Register SCAN_PARAM `reset()`'s with value 0"]
impl crate::ResetValue for super::SCAN_PARAM {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `TX_ADDR`"]
pub type TX_ADDR_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TX_ADDR`"]
pub struct TX_ADDR_W<'a> {
w: &'a mut W,
}
impl<'a> TX_ADDR_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `SCAN_TYPE`"]
pub type SCAN_TYPE_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `SCAN_TYPE`"]
pub struct SCAN_TYPE_W<'a> {
w: &'a mut W,
}
impl<'a> SCAN_TYPE_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 1)) | (((value as u32) & 0x03) << 1);
self.w
}
}
#[doc = "Reader of field `SCAN_FILT_POLICY`"]
pub type SCAN_FILT_POLICY_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `SCAN_FILT_POLICY`"]
pub struct SCAN_FILT_POLICY_W<'a> {
w: &'a mut W,
}
impl<'a> SCAN_FILT_POLICY_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 3)) | (((value as u32) & 0x03) << 3);
self.w
}
}
#[doc = "Reader of field `DUP_FILT_EN`"]
pub type DUP_FILT_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DUP_FILT_EN`"]
pub struct DUP_FILT_EN_W<'a> {
w: &'a mut W,
}
impl<'a> DUP_FILT_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "Reader of field `DUP_FILT_CHK_ADV_DIR`"]
pub type DUP_FILT_CHK_ADV_DIR_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DUP_FILT_CHK_ADV_DIR`"]
pub struct DUP_FILT_CHK_ADV_DIR_W<'a> {
w: &'a mut W,
}
impl<'a> DUP_FILT_CHK_ADV_DIR_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
#[doc = "Reader of field `SCAN_RSP_ADVA_CHECK`"]
pub type SCAN_RSP_ADVA_CHECK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SCAN_RSP_ADVA_CHECK`"]
pub struct SCAN_RSP_ADVA_CHECK_W<'a> {
w: &'a mut W,
}
impl<'a> SCAN_RSP_ADVA_CHECK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
#[doc = "Reader of field `SCAN_RCV_IA_IN_PRIV`"]
pub type SCAN_RCV_IA_IN_PRIV_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SCAN_RCV_IA_IN_PRIV`"]
pub struct SCAN_RCV_IA_IN_PRIV_W<'a> {
w: &'a mut W,
}
impl<'a> SCAN_RCV_IA_IN_PRIV_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
#[doc = "Reader of field `SCAN_RPT_PEER_NRPA_ADDR_IN_PRIV`"]
pub type SCAN_RPT_PEER_NRPA_ADDR_IN_PRIV_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SCAN_RPT_PEER_NRPA_ADDR_IN_PRIV`"]
pub struct SCAN_RPT_PEER_NRPA_ADDR_IN_PRIV_W<'a> {
w: &'a mut W,
}
impl<'a> SCAN_RPT_PEER_NRPA_ADDR_IN_PRIV_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);
self.w
}
}
impl R {
#[doc = "Bit 0 - Device's own address type. 1 - addr type is random. 0 - addr type is public."]
#[inline(always)]
pub fn tx_addr(&self) -> TX_ADDR_R {
TX_ADDR_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bits 1:2 - 0x00 - passive scanning.(default) 0x01 - active scanning. 0x10 - RFU 0x11 - RFU"]
#[inline(always)]
pub fn scan_type(&self) -> SCAN_TYPE_R {
SCAN_TYPE_R::new(((self.bits >> 1) & 0x03) as u8)
}
#[doc = "Bits 3:4 - The scanner filter policy determines how the scanner processes advertising packets. 0x00 - Accept advertising packets from any device. 0x01 - Accept advertising packets from only devices in the whitelist. In the above 2 policies, the directed advertising packets which are not addressed to this device are ignored. 0x10 - Accept all undirected advertising packets and directed advertising packet addressed to this device. 0x11 - Accept undirected advertising packets from devices in the whitelist and directed advertising packet addressed to this device In the above 2 policies, the directed advertising packets where the initiator address is a resolvable private address are accepted. The above 2 policies are extended scanner filter policies."]
#[inline(always)]
pub fn scan_filt_policy(&self) -> SCAN_FILT_POLICY_R {
SCAN_FILT_POLICY_R::new(((self.bits >> 3) & 0x03) as u8)
}
#[doc = "Bit 5 - Filter duplicate packets. 1- Duplicate filtering enabled. 0- Duplicate filtering not enabled. This field is derived from the LE_set_scan_enable command."]
#[inline(always)]
pub fn dup_filt_en(&self) -> DUP_FILT_EN_R {
DUP_FILT_EN_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - This bit field is used to specify the Scanner duplicate filter behavior for ADV_DIRECT_IND packet when duplicate DUP_FILT_EN is set. This bit is valid only if PRIV_1_2 and PRIV_1_2_SCAN are set. 0 - Do not filter ADV_DIRECT_IND duplicate packets. 1 - Filter ADV_DIRECT_IND duplicate packets"]
#[inline(always)]
pub fn dup_filt_chk_adv_dir(&self) -> DUP_FILT_CHK_ADV_DIR_R {
DUP_FILT_CHK_ADV_DIR_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - This bit field is used to specify the Scanner behavior with respect to ADVA while receiving a SCAN_RSP packet. This bit is valid only if PRIV_1_2 and PRIV_1_2_SCAN are set. 0 - The ADVA in SCAN_RSP packets are not verified 1 - The ADVA in SCAN_RSP packets are verified against ADVA received in ADV packet . If it fails, then abort the packet."]
#[inline(always)]
pub fn scan_rsp_adva_check(&self) -> SCAN_RSP_ADVA_CHECK_R {
SCAN_RSP_ADVA_CHECK_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 8 - Scanner behavior when a peer Identity address is received in privacy mode. This bit is valid only if PRIV_1_2 and PRIV_1_2_SCAN are set. 1 - Accept packets with peer identity address not in the Resolving list in privacy mode 0 - Reject packets with peer identity address not in the Resolving list in privacy mode"]
#[inline(always)]
pub fn scan_rcv_ia_in_priv(&self) -> SCAN_RCV_IA_IN_PRIV_R {
SCAN_RCV_IA_IN_PRIV_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 9 - Scanner behavior when a peer Non Resolvable Private Address is received in privacy mode. This bit is valid only if PRIV_1_2 and PRIV_1_2_SCAN are set. This is applicable when whitelist is disabled. 1 - Only report packets with peer NRPA address in privacy mode 0 - Respond packets with peer NRPA address in privacy mode"]
#[inline(always)]
pub fn scan_rpt_peer_nrpa_addr_in_priv(&self) -> SCAN_RPT_PEER_NRPA_ADDR_IN_PRIV_R {
SCAN_RPT_PEER_NRPA_ADDR_IN_PRIV_R::new(((self.bits >> 9) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - Device's own address type. 1 - addr type is random. 0 - addr type is public."]
#[inline(always)]
pub fn tx_addr(&mut self) -> TX_ADDR_W {
TX_ADDR_W { w: self }
}
#[doc = "Bits 1:2 - 0x00 - passive scanning.(default) 0x01 - active scanning. 0x10 - RFU 0x11 - RFU"]
#[inline(always)]
pub fn scan_type(&mut self) -> SCAN_TYPE_W {
SCAN_TYPE_W { w: self }
}
#[doc = "Bits 3:4 - The scanner filter policy determines how the scanner processes advertising packets. 0x00 - Accept advertising packets from any device. 0x01 - Accept advertising packets from only devices in the whitelist. In the above 2 policies, the directed advertising packets which are not addressed to this device are ignored. 0x10 - Accept all undirected advertising packets and directed advertising packet addressed to this device. 0x11 - Accept undirected advertising packets from devices in the whitelist and directed advertising packet addressed to this device In the above 2 policies, the directed advertising packets where the initiator address is a resolvable private address are accepted. The above 2 policies are extended scanner filter policies."]
#[inline(always)]
pub fn scan_filt_policy(&mut self) -> SCAN_FILT_POLICY_W {
SCAN_FILT_POLICY_W { w: self }
}
#[doc = "Bit 5 - Filter duplicate packets. 1- Duplicate filtering enabled. 0- Duplicate filtering not enabled. This field is derived from the LE_set_scan_enable command."]
#[inline(always)]
pub fn dup_filt_en(&mut self) -> DUP_FILT_EN_W {
DUP_FILT_EN_W { w: self }
}
#[doc = "Bit 6 - This bit field is used to specify the Scanner duplicate filter behavior for ADV_DIRECT_IND packet when duplicate DUP_FILT_EN is set. This bit is valid only if PRIV_1_2 and PRIV_1_2_SCAN are set. 0 - Do not filter ADV_DIRECT_IND duplicate packets. 1 - Filter ADV_DIRECT_IND duplicate packets"]
#[inline(always)]
pub fn dup_filt_chk_adv_dir(&mut self) -> DUP_FILT_CHK_ADV_DIR_W {
DUP_FILT_CHK_ADV_DIR_W { w: self }
}
#[doc = "Bit 7 - This bit field is used to specify the Scanner behavior with respect to ADVA while receiving a SCAN_RSP packet. This bit is valid only if PRIV_1_2 and PRIV_1_2_SCAN are set. 0 - The ADVA in SCAN_RSP packets are not verified 1 - The ADVA in SCAN_RSP packets are verified against ADVA received in ADV packet . If it fails, then abort the packet."]
#[inline(always)]
pub fn scan_rsp_adva_check(&mut self) -> SCAN_RSP_ADVA_CHECK_W {
SCAN_RSP_ADVA_CHECK_W { w: self }
}
#[doc = "Bit 8 - Scanner behavior when a peer Identity address is received in privacy mode. This bit is valid only if PRIV_1_2 and PRIV_1_2_SCAN are set. 1 - Accept packets with peer identity address not in the Resolving list in privacy mode 0 - Reject packets with peer identity address not in the Resolving list in privacy mode"]
#[inline(always)]
pub fn scan_rcv_ia_in_priv(&mut self) -> SCAN_RCV_IA_IN_PRIV_W {
SCAN_RCV_IA_IN_PRIV_W { w: self }
}
#[doc = "Bit 9 - Scanner behavior when a peer Non Resolvable Private Address is received in privacy mode. This bit is valid only if PRIV_1_2 and PRIV_1_2_SCAN are set. This is applicable when whitelist is disabled. 1 - Only report packets with peer NRPA address in privacy mode 0 - Respond packets with peer NRPA address in privacy mode"]
#[inline(always)]
pub fn scan_rpt_peer_nrpa_addr_in_priv(&mut self) -> SCAN_RPT_PEER_NRPA_ADDR_IN_PRIV_W {
SCAN_RPT_PEER_NRPA_ADDR_IN_PRIV_W { w: self }
}
}
|
pub mod cr1 {
pub mod bidimode {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40003800u32 as *const u32) >> 15) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003800u32 as *const u32);
reg &= 0xFFFF7FFFu32;
reg |= (val & 0x1) << 15;
core::ptr::write_volatile(0x40003800u32 as *mut u32, reg);
}
}
}
pub mod bidioe {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40003800u32 as *const u32) >> 14) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003800u32 as *const u32);
reg &= 0xFFFFBFFFu32;
reg |= (val & 0x1) << 14;
core::ptr::write_volatile(0x40003800u32 as *mut u32, reg);
}
}
}
pub mod crcen {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40003800u32 as *const u32) >> 13) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003800u32 as *const u32);
reg &= 0xFFFFDFFFu32;
reg |= (val & 0x1) << 13;
core::ptr::write_volatile(0x40003800u32 as *mut u32, reg);
}
}
}
pub mod crcnext {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40003800u32 as *const u32) >> 12) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003800u32 as *const u32);
reg &= 0xFFFFEFFFu32;
reg |= (val & 0x1) << 12;
core::ptr::write_volatile(0x40003800u32 as *mut u32, reg);
}
}
}
pub mod dff {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40003800u32 as *const u32) >> 11) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003800u32 as *const u32);
reg &= 0xFFFFF7FFu32;
reg |= (val & 0x1) << 11;
core::ptr::write_volatile(0x40003800u32 as *mut u32, reg);
}
}
}
pub mod rxonly {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40003800u32 as *const u32) >> 10) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003800u32 as *const u32);
reg &= 0xFFFFFBFFu32;
reg |= (val & 0x1) << 10;
core::ptr::write_volatile(0x40003800u32 as *mut u32, reg);
}
}
}
pub mod ssm {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40003800u32 as *const u32) >> 9) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003800u32 as *const u32);
reg &= 0xFFFFFDFFu32;
reg |= (val & 0x1) << 9;
core::ptr::write_volatile(0x40003800u32 as *mut u32, reg);
}
}
}
pub mod ssi {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40003800u32 as *const u32) >> 8) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003800u32 as *const u32);
reg &= 0xFFFFFEFFu32;
reg |= (val & 0x1) << 8;
core::ptr::write_volatile(0x40003800u32 as *mut u32, reg);
}
}
}
pub mod lsbfirst {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40003800u32 as *const u32) >> 7) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003800u32 as *const u32);
reg &= 0xFFFFFF7Fu32;
reg |= (val & 0x1) << 7;
core::ptr::write_volatile(0x40003800u32 as *mut u32, reg);
}
}
}
pub mod spe {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40003800u32 as *const u32) >> 6) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003800u32 as *const u32);
reg &= 0xFFFFFFBFu32;
reg |= (val & 0x1) << 6;
core::ptr::write_volatile(0x40003800u32 as *mut u32, reg);
}
}
}
pub mod br {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40003800u32 as *const u32) >> 3) & 0x7
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003800u32 as *const u32);
reg &= 0xFFFFFFC7u32;
reg |= (val & 0x7) << 3;
core::ptr::write_volatile(0x40003800u32 as *mut u32, reg);
}
}
}
pub mod mstr {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40003800u32 as *const u32) >> 2) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003800u32 as *const u32);
reg &= 0xFFFFFFFBu32;
reg |= (val & 0x1) << 2;
core::ptr::write_volatile(0x40003800u32 as *mut u32, reg);
}
}
}
pub mod cpol {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40003800u32 as *const u32) >> 1) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003800u32 as *const u32);
reg &= 0xFFFFFFFDu32;
reg |= (val & 0x1) << 1;
core::ptr::write_volatile(0x40003800u32 as *mut u32, reg);
}
}
}
pub mod cpha {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40003800u32 as *const u32) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003800u32 as *const u32);
reg &= 0xFFFFFFFEu32;
reg |= val & 0x1;
core::ptr::write_volatile(0x40003800u32 as *mut u32, reg);
}
}
}
}
pub mod cr2 {
pub mod txeie {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40003804u32 as *const u32) >> 7) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003804u32 as *const u32);
reg &= 0xFFFFFF7Fu32;
reg |= (val & 0x1) << 7;
core::ptr::write_volatile(0x40003804u32 as *mut u32, reg);
}
}
}
pub mod rxneie {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40003804u32 as *const u32) >> 6) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003804u32 as *const u32);
reg &= 0xFFFFFFBFu32;
reg |= (val & 0x1) << 6;
core::ptr::write_volatile(0x40003804u32 as *mut u32, reg);
}
}
}
pub mod errie {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40003804u32 as *const u32) >> 5) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003804u32 as *const u32);
reg &= 0xFFFFFFDFu32;
reg |= (val & 0x1) << 5;
core::ptr::write_volatile(0x40003804u32 as *mut u32, reg);
}
}
}
pub mod ssoe {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40003804u32 as *const u32) >> 2) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003804u32 as *const u32);
reg &= 0xFFFFFFFBu32;
reg |= (val & 0x1) << 2;
core::ptr::write_volatile(0x40003804u32 as *mut u32, reg);
}
}
}
pub mod txdmaen {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40003804u32 as *const u32) >> 1) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003804u32 as *const u32);
reg &= 0xFFFFFFFDu32;
reg |= (val & 0x1) << 1;
core::ptr::write_volatile(0x40003804u32 as *mut u32, reg);
}
}
}
pub mod rxdmaen {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40003804u32 as *const u32) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003804u32 as *const u32);
reg &= 0xFFFFFFFEu32;
reg |= val & 0x1;
core::ptr::write_volatile(0x40003804u32 as *mut u32, reg);
}
}
}
}
pub mod sr {
pub mod bsy {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40003808u32 as *const u32) >> 7) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003808u32 as *const u32);
reg &= 0xFFFFFF7Fu32;
reg |= (val & 0x1) << 7;
core::ptr::write_volatile(0x40003808u32 as *mut u32, reg);
}
}
}
pub mod ovr {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40003808u32 as *const u32) >> 6) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003808u32 as *const u32);
reg &= 0xFFFFFFBFu32;
reg |= (val & 0x1) << 6;
core::ptr::write_volatile(0x40003808u32 as *mut u32, reg);
}
}
}
pub mod modf {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40003808u32 as *const u32) >> 5) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003808u32 as *const u32);
reg &= 0xFFFFFFDFu32;
reg |= (val & 0x1) << 5;
core::ptr::write_volatile(0x40003808u32 as *mut u32, reg);
}
}
}
pub mod crcerr {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40003808u32 as *const u32) >> 4) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003808u32 as *const u32);
reg &= 0xFFFFFFEFu32;
reg |= (val & 0x1) << 4;
core::ptr::write_volatile(0x40003808u32 as *mut u32, reg);
}
}
}
pub mod udr {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40003808u32 as *const u32) >> 3) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003808u32 as *const u32);
reg &= 0xFFFFFFF7u32;
reg |= (val & 0x1) << 3;
core::ptr::write_volatile(0x40003808u32 as *mut u32, reg);
}
}
}
pub mod chside {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40003808u32 as *const u32) >> 2) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003808u32 as *const u32);
reg &= 0xFFFFFFFBu32;
reg |= (val & 0x1) << 2;
core::ptr::write_volatile(0x40003808u32 as *mut u32, reg);
}
}
}
pub mod txe {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40003808u32 as *const u32) >> 1) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003808u32 as *const u32);
reg &= 0xFFFFFFFDu32;
reg |= (val & 0x1) << 1;
core::ptr::write_volatile(0x40003808u32 as *mut u32, reg);
}
}
}
pub mod rxne {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40003808u32 as *const u32) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003808u32 as *const u32);
reg &= 0xFFFFFFFEu32;
reg |= val & 0x1;
core::ptr::write_volatile(0x40003808u32 as *mut u32, reg);
}
}
}
}
pub mod dr {
pub mod dr {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x4000380Cu32 as *const u32) & 0xFFFF
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4000380Cu32 as *const u32);
reg &= 0xFFFF0000u32;
reg |= val & 0xFFFF;
core::ptr::write_volatile(0x4000380Cu32 as *mut u32, reg);
}
}
}
}
pub mod crcpr {
pub mod crcpoly {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40003810u32 as *const u32) & 0xFFFF
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003810u32 as *const u32);
reg &= 0xFFFF0000u32;
reg |= val & 0xFFFF;
core::ptr::write_volatile(0x40003810u32 as *mut u32, reg);
}
}
}
}
pub mod rxcrcr {
pub mod rxcrc {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40003814u32 as *const u32) & 0xFFFF
}
}
}
}
pub mod txcrcr {
pub mod txcrc {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40003818u32 as *const u32) & 0xFFFF
}
}
}
}
pub mod i2scfgr {
pub mod i2smod {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x4000381Cu32 as *const u32) >> 11) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4000381Cu32 as *const u32);
reg &= 0xFFFFF7FFu32;
reg |= (val & 0x1) << 11;
core::ptr::write_volatile(0x4000381Cu32 as *mut u32, reg);
}
}
}
pub mod i2se {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x4000381Cu32 as *const u32) >> 10) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4000381Cu32 as *const u32);
reg &= 0xFFFFFBFFu32;
reg |= (val & 0x1) << 10;
core::ptr::write_volatile(0x4000381Cu32 as *mut u32, reg);
}
}
}
pub mod i2scfg {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x4000381Cu32 as *const u32) >> 8) & 0x3
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4000381Cu32 as *const u32);
reg &= 0xFFFFFCFFu32;
reg |= (val & 0x3) << 8;
core::ptr::write_volatile(0x4000381Cu32 as *mut u32, reg);
}
}
}
pub mod pcmsync {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x4000381Cu32 as *const u32) >> 7) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4000381Cu32 as *const u32);
reg &= 0xFFFFFF7Fu32;
reg |= (val & 0x1) << 7;
core::ptr::write_volatile(0x4000381Cu32 as *mut u32, reg);
}
}
}
pub mod i2sstd {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x4000381Cu32 as *const u32) >> 4) & 0x3
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4000381Cu32 as *const u32);
reg &= 0xFFFFFFCFu32;
reg |= (val & 0x3) << 4;
core::ptr::write_volatile(0x4000381Cu32 as *mut u32, reg);
}
}
}
pub mod ckpol {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x4000381Cu32 as *const u32) >> 3) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4000381Cu32 as *const u32);
reg &= 0xFFFFFFF7u32;
reg |= (val & 0x1) << 3;
core::ptr::write_volatile(0x4000381Cu32 as *mut u32, reg);
}
}
}
pub mod datlen {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x4000381Cu32 as *const u32) >> 1) & 0x3
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4000381Cu32 as *const u32);
reg &= 0xFFFFFFF9u32;
reg |= (val & 0x3) << 1;
core::ptr::write_volatile(0x4000381Cu32 as *mut u32, reg);
}
}
}
pub mod chlen {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x4000381Cu32 as *const u32) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4000381Cu32 as *const u32);
reg &= 0xFFFFFFFEu32;
reg |= val & 0x1;
core::ptr::write_volatile(0x4000381Cu32 as *mut u32, reg);
}
}
}
}
pub mod i2spr {
pub mod mckoe {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40003820u32 as *const u32) >> 9) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003820u32 as *const u32);
reg &= 0xFFFFFDFFu32;
reg |= (val & 0x1) << 9;
core::ptr::write_volatile(0x40003820u32 as *mut u32, reg);
}
}
}
pub mod odd {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40003820u32 as *const u32) >> 8) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003820u32 as *const u32);
reg &= 0xFFFFFEFFu32;
reg |= (val & 0x1) << 8;
core::ptr::write_volatile(0x40003820u32 as *mut u32, reg);
}
}
}
pub mod i2sdiv {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40003820u32 as *const u32) & 0xFF
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40003820u32 as *const u32);
reg &= 0xFFFFFF00u32;
reg |= val & 0xFF;
core::ptr::write_volatile(0x40003820u32 as *mut u32, reg);
}
}
}
}
|
//! An attribute-like macro to implement traits for `Either`
//! (defined in [`either`](https://crates.io/crates/either) crate).
//! If your trait is implemented for both type `A` and `B`,
//! then it is automatically implemented for `Either<A, B>`.
//!
//! # Usage
//!
//! When defining a trait, wrap it with the macro `either_trait`.
//!
//! # Example
//!
//! ```rust
//! use either::Either;
//! use either_trait_macro::either_trait;
//!
//! #[either_trait]
//! /// Apply a function `n` times.
//! trait Apply {
//! fn times<T, F>(&self, t: T, f: F) -> T
//! where
//! F: Fn(T) -> T;
//! }
//!
//! struct Once;
//!
//! impl Apply for Once {
//! fn times<T, F>(&self, t: T, f: F) -> T
//! where
//! F: Fn(T) -> T,
//! {
//! f(t)
//! }
//! }
//!
//! impl Apply for u32 {
//! fn times<T, F>(&self, t: T, f: F) -> T
//! where
//! F: Fn(T) -> T,
//! {
//! let mut t = t;
//! for _ in 0..*self {
//! t = f(t);
//! }
//! t
//! }
//! }
//!
//! let either: Either<Once, u32> = Either::Left(Once);
//! assert_eq!(either.times(1, |x| x + 2), 3);
//! ```
//!
//! # Limitations
//!
//! This macro only supports traits without any associated
//! constant or associated type.
//! The first parameter of a trait method must be `self`,
//! `&self` or `&mut self`.
//! The types of other parameters and the return type
//! must not contain `Self`.
//!
extern crate proc_macro;
use proc_macro::TokenStream;
use quote::quote;
use syn::{
parse_macro_input, parse_quote, FnArg, Generics, Ident, ItemTrait, TraitItem, TraitItemMethod,
};
fn either_method(method: &TraitItemMethod) -> proc_macro2::TokenStream {
let sig = &method.sig;
let name = &sig.ident;
if let FnArg::Receiver(_) = sig.inputs[0] {
let args_left = sig.inputs.iter().skip(1).map(|arg| {
if let FnArg::Typed(arg) = arg {
&arg.pat
} else {
unreachable!()
}
});
let args_right = args_left.clone();
quote! {
#sig {
match self {
::either::Either::Left(left) => left.#name(#(#args_left),*),
::either::Either::Right(right) => right.#name(#(#args_right),*),
}
}
}
} else {
panic!("The first parameter of a trait method must be `self`, `&self` or `&mut self`.")
}
}
fn impl_item(name: &Ident, generics: &Generics) -> proc_macro2::TokenStream {
let (_impl_generics, ty_generics, where_clause) = generics.split_for_impl();
let mut extended_generics = generics.clone();
assert!(
extended_generics.type_params().all(|param| {
let name = param.ident.to_string();
name != "__L" && name != "__R"
}),
"Generic type parameters must not be `__L` or `__R`."
);
extended_generics
.params
.push(parse_quote!(__L: #name #ty_generics));
extended_generics
.params
.push(parse_quote!(__R: #name #ty_generics));
quote! {
impl #extended_generics #name #ty_generics for ::either::Either<__L, __R> #where_clause
}
}
#[proc_macro_attribute]
pub fn either_trait(_args: TokenStream, input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as ItemTrait);
let name = &input.ident;
let items = &input.items;
let impl_item = impl_item(&name, &input.generics);
let impl_methods = items.iter().map(|item| match item {
TraitItem::Method(method) => either_method(method),
_ => panic!("The trait must be without associated constants or associated types."),
});
let expand = quote! {
#input
#impl_item
{
#(#impl_methods)*
}
};
TokenStream::from(expand)
}
|
use git2::Repository;
use autorel_chlg::ChangeLog;
pub struct Release<V> {
pub prev_version: Option<V>,
pub version: V,
pub changelog: ChangeLog,
pub repo: Repository,
}
|
use crate::error::Error;
use crate::{CoordDimensions, CoordSeq, Geometry as GGeometry};
use geo_types::{
Coordinate, LineString, MultiLineString, MultiPoint, MultiPolygon, Point, Polygon,
};
use std;
use std::borrow::Borrow;
use std::convert::{TryFrom, TryInto};
#[allow(clippy::needless_lifetimes)]
fn create_coord_seq_from_vec<'a, 'b>(coords: &'a [Coordinate<f64>]) -> Result<CoordSeq<'b>, Error> {
create_coord_seq(coords.iter(), coords.len())
}
#[allow(clippy::needless_lifetimes)]
fn create_coord_seq<'a, 'b, It>(points: It, len: usize) -> Result<CoordSeq<'b>, Error>
where
It: Iterator<Item = &'a Coordinate<f64>>,
{
let mut coord_seq =
CoordSeq::new(len as u32, CoordDimensions::TwoD).expect("failed to create CoordSeq");
for (i, p) in points.enumerate() {
coord_seq.set_x(i, p.x)?;
coord_seq.set_y(i, p.y)?;
}
Ok(coord_seq)
}
impl<'a, 'b> TryFrom<&'a Point<f64>> for GGeometry<'b> {
type Error = Error;
fn try_from(other: &'a Point<f64>) -> Result<GGeometry<'b>, Self::Error> {
let coord_seq = create_coord_seq(std::iter::once(&other.0), 1)?;
GGeometry::create_point(coord_seq)
}
}
impl<'a> TryFrom<Point<f64>> for GGeometry<'a> {
type Error = Error;
fn try_from(other: Point<f64>) -> Result<GGeometry<'a>, Self::Error> {
GGeometry::try_from(&other)
}
}
impl<'a, T: Borrow<Point<f64>>> TryFrom<&'a [T]> for GGeometry<'a> {
type Error = Error;
fn try_from(other: &'a [T]) -> Result<GGeometry<'a>, Self::Error> {
let geom_points = other
.iter()
.map(|p| p.borrow().try_into())
.collect::<Result<Vec<_>, _>>()?;
GGeometry::create_multipoint(geom_points)
}
}
impl<'a, 'b> TryFrom<&'a MultiPoint<f64>> for GGeometry<'b> {
type Error = Error;
fn try_from(other: &'a MultiPoint<f64>) -> Result<GGeometry<'b>, Self::Error> {
let points: Vec<_> = other
.0
.iter()
.map(|p| p.try_into())
.collect::<Result<Vec<_>, _>>()?;
GGeometry::create_multipoint(points)
}
}
impl<'a> TryFrom<MultiPoint<f64>> for GGeometry<'a> {
type Error = Error;
fn try_from(other: MultiPoint<f64>) -> Result<GGeometry<'a>, Self::Error> {
GGeometry::try_from(&other)
}
}
impl<'a, 'b> TryFrom<&'a LineString<f64>> for GGeometry<'b> {
type Error = Error;
fn try_from(other: &'a LineString<f64>) -> Result<GGeometry<'b>, Self::Error> {
let coord_seq = create_coord_seq_from_vec(other.0.as_slice())?;
GGeometry::create_line_string(coord_seq)
}
}
impl<'a> TryFrom<LineString<f64>> for GGeometry<'a> {
type Error = Error;
fn try_from(other: LineString<f64>) -> Result<GGeometry<'a>, Self::Error> {
GGeometry::try_from(&other)
}
}
impl<'a, 'b> TryFrom<&'a MultiLineString<f64>> for GGeometry<'b> {
type Error = Error;
fn try_from(other: &'a MultiLineString<f64>) -> Result<GGeometry<'b>, Self::Error> {
let lines: Vec<_> = other
.0
.iter()
.map(|p| p.try_into())
.collect::<Result<Vec<_>, _>>()?;
GGeometry::create_multiline_string(lines)
}
}
impl<'a> TryFrom<MultiLineString<f64>> for GGeometry<'a> {
type Error = Error;
fn try_from(other: MultiLineString<f64>) -> Result<GGeometry<'a>, Self::Error> {
GGeometry::try_from(&other)
}
}
// rust geo does not have the distinction LineString/LineRing, so we create a wrapper
struct LineRing<'a>(&'a LineString<f64>);
/// Convert a geo_types::LineString to a geos LinearRing
/// a LinearRing should be closed so cloase the geometry if needed
impl<'a, 'b> TryFrom<LineRing<'a>> for GGeometry<'b> {
type Error = Error;
fn try_from(other: LineRing<'a>) -> Result<GGeometry<'b>, Self::Error> {
let points = &(other.0).0;
let nb_points = points.len();
if nb_points > 0 && nb_points < 3 {
return Err(Error::InvalidGeometry(
"impossible to create a LinearRing, A LinearRing must have at least 3 coordinates"
.into(),
));
}
// if the geom is not closed we close it
let is_closed = nb_points > 0 && points.first() == points.last();
// Note: we also need to close a 2 points closed linearring, cf test closed_2_points_linear_ring
let need_closing = nb_points > 0 && (!is_closed || nb_points == 3);
let coord_seq = if need_closing {
create_coord_seq(
points.iter().chain(std::iter::once(&points[0])),
nb_points + 1,
)?
} else {
create_coord_seq(points.iter(), nb_points)?
};
GGeometry::create_linear_ring(coord_seq)
}
}
impl<'a, 'b> TryFrom<&'a Polygon<f64>> for GGeometry<'b> {
type Error = Error;
fn try_from(other: &'a Polygon<f64>) -> Result<GGeometry<'b>, Self::Error> {
let ring = LineRing(other.exterior());
let geom_exterior: GGeometry = ring.try_into()?;
let interiors: Vec<_> = other
.interiors()
.iter()
.map(|i| LineRing(i).try_into())
.collect::<Result<Vec<_>, _>>()?;
GGeometry::create_polygon(geom_exterior, interiors)
}
}
impl<'a> TryFrom<Polygon<f64>> for GGeometry<'a> {
type Error = Error;
fn try_from(other: Polygon<f64>) -> Result<GGeometry<'a>, Self::Error> {
GGeometry::try_from(&other)
}
}
impl<'a, 'b> TryFrom<&'a MultiPolygon<f64>> for GGeometry<'b> {
type Error = Error;
fn try_from(other: &'a MultiPolygon<f64>) -> Result<GGeometry<'b>, Self::Error> {
let polygons: Vec<_> = other
.0
.iter()
.map(|p| p.try_into())
.collect::<Result<Vec<_>, _>>()?;
GGeometry::create_multipolygon(polygons)
}
}
impl<'a> TryFrom<MultiPolygon<f64>> for GGeometry<'a> {
type Error = Error;
fn try_from(other: MultiPolygon<f64>) -> Result<GGeometry<'a>, Self::Error> {
GGeometry::try_from(&other)
}
}
#[cfg(test)]
mod test {
use super::LineRing;
use crate::{Geom, Geometry as GGeometry};
use geo_types::{
Coordinate, LineString, MultiLineString, MultiPoint, MultiPolygon, Point, Polygon,
};
use std::convert::TryInto;
fn coords(tuples: Vec<(f64, f64)>) -> Vec<Coordinate<f64>> {
tuples.into_iter().map(Coordinate::from).collect()
}
#[test]
fn polygon_contains_test() {
let exterior = LineString(coords(vec![
(0., 0.),
(0., 1.),
(1., 1.),
(1., 0.),
(0., 0.),
]));
let interiors = vec![LineString(coords(vec![
(0.1, 0.1),
(0.1, 0.9),
(0.9, 0.9),
(0.9, 0.1),
(0.1, 0.1),
]))];
let p = Polygon::new(exterior.clone(), interiors.clone());
assert_eq!(p.exterior(), &exterior);
assert_eq!(p.interiors(), interiors.as_slice());
let geom: GGeometry = p.try_into().unwrap();
assert!(geom.contains(&geom).unwrap());
let tmp: GGeometry = exterior.try_into().unwrap();
assert!(!geom.contains(&tmp).unwrap());
assert!(geom.covers(&tmp).unwrap());
assert!(geom.touches(&tmp).unwrap());
}
#[test]
fn multipolygon_contains_test() {
let exterior = LineString(coords(vec![
(0., 0.),
(0., 1.),
(1., 1.),
(1., 0.),
(0., 0.),
]));
let interiors = vec![LineString(coords(vec![
(0.1, 0.1),
(0.1, 0.9),
(0.9, 0.9),
(0.9, 0.1),
(0.1, 0.1),
]))];
let p = Polygon::new(exterior, interiors);
let mp = MultiPolygon(vec![p.clone()]);
let geom: GGeometry = (&mp).try_into().unwrap();
assert!(geom.contains(&geom).unwrap());
assert!(geom
.contains::<GGeometry>(&(&p).try_into().unwrap())
.unwrap());
}
#[test]
fn incorrect_multipolygon_test() {
let exterior = LineString(coords(vec![(0., 0.)]));
let interiors = vec![];
let p = Polygon::new(exterior, interiors);
let mp = MultiPolygon(vec![p.clone()]);
let geom: Result<GGeometry, _> = mp.try_into();
assert!(geom.is_err());
}
#[test]
fn incorrect_polygon_not_closed() {
// even if the polygon is not closed we can convert it to geos (we close it)
let exterior = LineString(coords(vec![
(0., 0.),
(0., 2.),
(2., 2.),
(2., 0.),
(0., 0.),
]));
let interiors = vec![LineString(coords(vec![
(0., 0.),
(0., 1.),
(1., 1.),
(1., 0.),
(0., 10.),
]))];
let p = Polygon::new(exterior, interiors);
let mp = MultiPolygon(vec![p]);
let _g: GGeometry = mp.try_into().unwrap(); // no error
}
/// a linear ring can be empty
#[test]
fn empty_linear_ring() {
let ls = LineString(vec![]);
let geom: GGeometry = LineRing(&ls).try_into().unwrap();
assert!(geom.is_valid());
assert!(geom.is_ring().unwrap());
assert_eq!(geom.get_coord_seq().unwrap().size().unwrap(), 0);
}
/// a linear ring should have at least 3 elements
#[test]
fn one_elt_linear_ring() {
let ls = LineString(coords(vec![(0., 0.)]));
let geom: Result<GGeometry, _> = LineRing(&ls).try_into();
let error = geom.err().unwrap();
assert_eq!(format!("{}", error), "Invalid geometry, impossible to create a LinearRing, A LinearRing must have at least 3 coordinates".to_string());
}
/// a linear ring should have at least 3 elements
#[test]
fn two_elt_linear_ring() {
let ls = LineString(coords(vec![(0., 0.), (0., 1.)]));
let geom: Result<GGeometry, _> = LineRing(&ls).try_into();
let error = geom.err().unwrap();
assert_eq!(format!("{}", error), "Invalid geometry, impossible to create a LinearRing, A LinearRing must have at least 3 coordinates".to_string());
}
/// an unclosed linearring is valid since we close it before giving it to geos
#[test]
fn unclosed_linear_ring() {
let ls = LineString(coords(vec![(0., 0.), (0., 1.), (1., 2.)]));
let geom: GGeometry = LineRing(&ls).try_into().unwrap();
assert!(geom.is_valid());
assert!(geom.is_ring().unwrap());
assert_eq!(geom.get_coord_seq().unwrap().size().unwrap(), 4);
}
/// a bit tricky
/// a ring should have at least 3 points.
/// in the case of a closed ring with only element eg:
///
/// let's take a point list: [p1, p2, p1]
///
/// p1 ----- p2
/// ^-------|
///
/// we consider it like a 3 points not closed ring (with the 2 last elements being equals...)
///
/// shapely (the python geos wrapper) considers that too
#[test]
fn closed_2_points_linear_ring() {
let ls = LineString(coords(vec![(0., 0.), (0., 1.), (1., 1.)]));
let geom: GGeometry = LineRing(&ls).try_into().unwrap();
assert!(geom.is_valid());
assert!(geom.is_ring().expect("is_ring failed"));
assert_eq!(geom.get_coord_seq().unwrap().size().unwrap(), 4);
}
/// a linear ring can be empty
#[test]
fn good_linear_ring() {
let ls = LineString(coords(vec![(0., 0.), (0., 1.), (1., 2.), (0., 0.)]));
let geom: GGeometry = LineRing(&ls).try_into().unwrap();
assert!(geom.is_valid());
assert!(geom.is_ring().unwrap());
assert_eq!(geom.get_coord_seq().unwrap().size().unwrap(), 4);
}
#[test]
fn test_conversion_multilinestring() {
let ls1 = LineString(coords(vec![(0., 0.), (0., 1.), (1., 2.)]));
let ls2 = LineString(coords(vec![(2., 2.), (3., 3.), (3., 2.)]));
let geom: GGeometry = MultiLineString(vec![ls1, ls2]).try_into().unwrap();
assert!(geom.is_valid());
}
#[test]
fn test_conversion_multipoint() {
let p1 = Point::new(0., 0.);
let p2 = Point::new(0., 1.);
let p3 = Point::new(1., 2.);
let geom: GGeometry = MultiPoint(vec![p1, p2, p3]).try_into().unwrap();
assert!(geom.is_valid());
}
}
|
use std::process;
pub fn part_one(input: &str) -> String {
let numbers = parse_input(input);
let mut val: Option<(i32, i32)> = None;
for n in numbers.iter() {
for m in numbers.iter() {
if n + m == 2020 {
val = Some((*n, *m))
}
}
}
match val {
None => {
println!("No value found!");
process::exit(1)
}
Some((n, m)) => {
(n * m).to_string()
}
}
}
pub fn part_two(input: &str) -> String {
let numbers = parse_input(input);
let mut val: Option<(i32, i32, i32)> = None;
for n in numbers.iter() {
for m in numbers.iter() {
for l in numbers.iter() {
if n + m + l == 2020 {
val = Some((*n, *m, *l))
}
}
}
}
match val {
None => {
println!("No value found!");
process::exit(1)
}
Some((n, m, l)) => {
(n * m * l).to_string()
}
}
}
fn parse_input(input: &str) -> Vec<i32> {
input.split("\n").map(|s| s.parse::<i32>().unwrap_or_else(|err| {
println!("error parsing input: {}", err);
process::exit(1)
})).collect()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parsing_works() {
let input = "1721\n979\n366\n299\n675\n1456";
let numbers = parse_input(input);
assert_eq!(numbers, vec![1721, 979, 366, 299, 675, 1456])
}
#[test]
fn test_part_one() {
let input = "1721\n979\n366\n299\n675\n1456";
let output = part_one(input);
assert_eq!(output, "514579")
}
#[test]
fn test_part_two() {
let input = "1721\n979\n366\n299\n675\n1456";
let output = part_two(input);
assert_eq!(output, "241861950")
}
} |
// Copyright (C) 2017 1aim GmbH
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use either::*;
use std::fmt;
use std::ops::Deref;
use std::str::FromStr;
use crate::carrier::Carrier;
use crate::country;
use crate::error;
use crate::extension::Extension;
use crate::formatter;
use crate::metadata::{Database, Metadata, DATABASE};
use crate::national_number::NationalNumber;
use crate::parser;
use crate::validator;
/// A phone number.
#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Hash, Debug)]
pub struct PhoneNumber {
/// The country calling code for this number, as defined by the International
/// Telecommunication Union (ITU). For example, this would be 1 for NANPA
/// countries, and 33 for France.
pub(crate) code: country::Code,
/// The National (significant) Number, as defined in International
/// Telecommunication Union (ITU) Recommendation E.164, without any leading
/// zero. The leading-zero is stored separately if required, since this is an
/// uint64 and hence cannot store such information. Do not use this field
/// directly: if you want the national significant number, call the
/// getNationalSignificantNumber method of PhoneNumberUtil.
///
/// For countries which have the concept of an "area code" or "national
/// destination code", this is included in the National (significant) Number.
/// Although the ITU says the maximum length should be 15, we have found
/// longer numbers in some countries e.g. Germany. Note that the National
/// (significant) Number does not contain the National (trunk) prefix.
/// Obviously, as a uint64, it will never contain any formatting (hyphens,
/// spaces, parentheses), nor any alphanumeric spellings.
pub(crate) national: NationalNumber,
/// Extension is not standardized in ITU recommendations, except for being
/// defined as a series of numbers with a maximum length of 40 digits. It is
/// defined as a string here to accommodate for the possible use of a leading
/// zero in the extension (organizations have complete freedom to do so, as
/// there is no standard defined). Other than digits, some other dialling
/// characters such as "," (indicating a wait) may be stored here.
pub(crate) extension: Option<Extension>,
/// The carrier selection code that is preferred when calling this phone
/// number domestically. This also includes codes that need to be dialed in
/// some countries when calling from landlines to mobiles or vice versa. For
/// example, in Columbia, a "3" needs to be dialed before the phone number
/// itself when calling from a mobile phone to a domestic landline phone and
/// vice versa.
///
/// Note this is the "preferred" code, which means other codes may work as
/// well.
pub(crate) carrier: Option<Carrier>,
}
/// Wrapper to make it easier to access information about the country of a
/// phone number.
pub struct Country<'a>(&'a PhoneNumber);
/// The phone number type.
#[derive(Copy, Clone, Eq, PartialEq, Serialize, Deserialize, Hash, Debug)]
#[serde(rename_all = "snake_case")]
pub enum Type {
///
FixedLine,
///
Mobile,
/// In some regions (e.g. the USA), it is impossible to distinguish between
/// fixed-line and mobile numbers by looking at the phone number itself.
FixedLineOrMobile,
/// Freephone lines.
TollFree,
///
PremiumRate,
/// The cost of this call is shared between the caller and the recipient, and
/// is hence typically less than PREMIUM_RATE calls. See
/// http://en.wikipedia.org/wiki/Shared_Cost_Service for more information.
SharedCost,
/// A personal number is associated with a particular person, and may be
/// routed to either a MOBILE or FIXED_LINE number. Some more information can
/// be found here: http://en.wikipedia.org/wiki/Personal_Numbers
PersonalNumber,
/// Voice over IP numbers. This includes TSoIP (Telephony Service over IP).
Voip,
///
Pager,
/// Used for "Universal Access Numbers" or "Company Numbers". They may be
/// further routed to specific offices, but allow one number to be used for a
/// company.
Uan,
///
Emergency,
/// Used for "Voice Mail Access Numbers".
Voicemail,
///
ShortCode,
///
StandardRate,
///
Carrier,
///
NoInternational,
/// A phone number is of type UNKNOWN when it does not fit any of the known
/// patterns for a specific region.
Unknown,
}
impl FromStr for PhoneNumber {
type Err = error::Parse;
fn from_str(s: &str) -> Result<Self, Self::Err> {
parser::parse(None, s)
}
}
impl fmt::Display for PhoneNumber {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.format())
}
}
impl PhoneNumber {
/// Get information about the country for the phone number.
pub fn country(&self) -> Country {
Country(self)
}
/// Get the country code.
pub fn code(&self) -> &country::Code {
&self.code
}
/// Get the national number.
pub fn national(&self) -> &NationalNumber {
&self.national
}
/// Get the extension.
pub fn extension(&self) -> Option<&Extension> {
self.extension.as_ref()
}
/// Get the carrier.
pub fn carrier(&self) -> Option<&Carrier> {
self.carrier.as_ref()
}
/// Prepare a formatter for this `PhoneNumber`.
///
/// # Example
///
/// ```
/// use phonenumber::{self, country, Mode};
///
/// let number = phonenumber::parse(Some(country::DE), "301/23456").unwrap()
/// .format().mode(Mode::National).to_string();
///
/// assert_eq!("030 123456", number);
/// ```
pub fn format(&self) -> formatter::Formatter<'_, 'static, 'static> {
formatter::format(self)
}
/// Prepare a formatter for this `PhoneNumber` with the given `Database`.
pub fn format_with<'n, 'd>(
&'n self,
database: &'d Database,
) -> formatter::Formatter<'n, 'd, 'static> {
formatter::format_with(database, self)
}
/// Get the metadata that applies to this phone number from the given
/// database.
pub fn metadata<'a>(&self, database: &'a Database) -> Option<&'a Metadata> {
match validator::source_for(database, self.code.value(), &self.national.to_string())? {
Left(region) => database.by_id(region.as_ref()),
Right(code) => database.by_code(&code).and_then(|m| m.into_iter().next()),
}
}
/// Check if the phone number is valid.
pub fn is_valid(&self) -> bool {
validator::is_valid(self)
}
/// Check if the phone number is valid with the given `Database`.
pub fn is_valid_with(&self, database: &Database) -> bool {
validator::is_valid_with(database, self)
}
/// Determine the [`Type`] of the phone number.
pub fn number_type(&self, database: &Database) -> Type {
match self.metadata(database) {
Some(metadata) => validator::number_type(metadata, &self.national.value.to_string()),
None => Type::Unknown,
}
}
}
impl<'a> Country<'a> {
pub fn code(&self) -> u16 {
self.0.code.value()
}
pub fn id(&self) -> Option<country::Id> {
self.0.metadata(&DATABASE).and_then(|m| m.id().parse().ok())
}
}
impl<'a> Deref for Country<'a> {
type Target = country::Code;
fn deref(&self) -> &Self::Target {
self.0.code()
}
}
#[cfg(test)]
mod test {
use crate::country::{self, *};
use crate::metadata::DATABASE;
use crate::Type;
use crate::{parser, Mode, PhoneNumber};
use anyhow::Context;
use rstest::rstest;
use rstest_reuse::{self, *};
fn parsed(number: &str) -> PhoneNumber {
parser::parse(None, number)
.with_context(|| format!("parsing {number}"))
.unwrap()
}
#[template]
#[rstest]
#[case(parsed("+80012340000"), None, Type::TollFree)]
#[case(parsed("+61406823897"), Some(AU), Type::Mobile)]
#[case(parsed("+611900123456"), Some(AU), Type::PremiumRate)]
#[case(parsed("+32474091150"), Some(BE), Type::Mobile)]
#[case(parsed("+34666777888"), Some(ES), Type::Mobile)]
#[case(parsed("+34612345678"), Some(ES), Type::Mobile)]
#[case(parsed("+441212345678"), Some(GB), Type::FixedLine)]
#[case(parsed("+13459492311"), Some(KY), Type::FixedLine)]
#[case(parsed("+16137827274"), Some(CA), Type::FixedLineOrMobile)]
#[case(parsed("+1 520 878 2491"), Some(US), Type::FixedLineOrMobile)]
#[case(parsed("+1-520-878-2491"), Some(US), Type::FixedLineOrMobile)]
// Case for issues
// https://github.com/whisperfish/rust-phonenumber/issues/46 and
// https://github.com/whisperfish/rust-phonenumber/issues/47
// #[case(parsed("+1 520-878-2491"), US)]
fn phone_numbers(
#[case] number: PhoneNumber,
#[case] country: Option<country::Id>,
#[case] r#type: Type,
) {
}
#[apply(phone_numbers)]
fn country_id(
#[case] number: PhoneNumber,
#[case] country: Option<country::Id>,
#[case] _type: Type,
) -> anyhow::Result<()> {
assert_eq!(country, number.country().id());
Ok(())
}
#[apply(phone_numbers)]
#[ignore]
// Format-parse roundtrip
fn round_trip_parsing(
#[case] number: PhoneNumber,
#[case] country: Option<country::Id>,
#[case] _type: Type,
#[values(Mode::International, Mode::E164, Mode::Rfc3966, Mode::National)] mode: Mode,
) -> anyhow::Result<()> {
let country_hint = if mode == Mode::National {
country
} else {
None
};
let formatted = number.format().mode(mode).to_string();
let parsed = parser::parse(country_hint, &formatted).with_context(|| {
format!("parsing {number} after formatting in {mode:?} mode as {formatted}")
})?;
// impl Eq for PhoneNumber does not consider differently parsed phone numbers to be equal.
// E.g., parsing 047409110 with BE country hint is the same phone number as +32474091150,
// but Eq considers them different.
assert_eq!(number, parsed);
Ok(())
}
#[apply(phone_numbers)]
fn number_type(
#[case] number: PhoneNumber,
#[case] _country: Option<country::Id>,
#[case] r#type: Type,
) {
assert_eq!(r#type, number.number_type(&DATABASE));
}
}
|
#![allow(missing_docs)] // TODO(mingwei)
use std::collections::HashMap;
pub use hydroflow_cli_integration::*;
use crate::scheduled::graph::Hydroflow;
pub async fn launch_flow(mut flow: Hydroflow) {
let stop = tokio::sync::oneshot::channel();
tokio::task::spawn_blocking(|| {
let mut line = String::new();
std::io::stdin().read_line(&mut line).unwrap();
assert!(line.starts_with("stop"));
stop.0.send(()).unwrap();
});
tokio::select! {
_ = stop.1 => {},
_ = flow.run_async() => {}
}
}
pub struct HydroCLI {
ports: HashMap<String, ServerOrBound>,
}
impl HydroCLI {
pub fn port(&mut self, name: &str) -> ServerOrBound {
self.ports.remove(name).unwrap()
}
}
pub async fn init() -> HydroCLI {
let mut input = String::new();
std::io::stdin().read_line(&mut input).unwrap();
let trimmed = input.trim();
let bind_config = serde_json::from_str::<HashMap<String, ServerBindConfig>>(trimmed).unwrap();
// config telling other services how to connect to me
let mut bind_results: HashMap<String, ServerPort> = HashMap::new();
let mut binds = HashMap::new();
for (name, config) in bind_config {
let bound = config.bind().await;
bind_results.insert(name.clone(), bound.sink_port());
binds.insert(name.clone(), bound);
}
let bind_serialized = serde_json::to_string(&bind_results).unwrap();
println!("ready: {bind_serialized}");
let mut start_buf = String::new();
std::io::stdin().read_line(&mut start_buf).unwrap();
let connection_defns = if start_buf.starts_with("start: ") {
serde_json::from_str::<HashMap<String, ServerPort>>(
start_buf.trim_start_matches("start: ").trim(),
)
.unwrap()
} else {
panic!("expected start");
};
let mut all_connected = HashMap::new();
for (name, defn) in connection_defns {
all_connected.insert(name, ServerOrBound::Server((&defn).into()));
}
for (name, defn) in binds {
all_connected.insert(name, ServerOrBound::Bound(defn));
}
HydroCLI {
ports: all_connected,
}
}
|
#[doc = "Reader of register RCC_APB5RSTCLRR"]
pub type R = crate::R<u32, super::RCC_APB5RSTCLRR>;
#[doc = "Writer for register RCC_APB5RSTCLRR"]
pub type W = crate::W<u32, super::RCC_APB5RSTCLRR>;
#[doc = "Register RCC_APB5RSTCLRR `reset()`'s with value 0"]
impl crate::ResetValue for super::RCC_APB5RSTCLRR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "SPI6RST\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SPI6RST_A {
#[doc = "0: Writing has no effect, reading means\r\n that the block reset is released"]
B_0X0 = 0,
#[doc = "1: Writing releases the block reset,\r\n reading means that the block reset is\r\n asserted"]
B_0X1 = 1,
}
impl From<SPI6RST_A> for bool {
#[inline(always)]
fn from(variant: SPI6RST_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `SPI6RST`"]
pub type SPI6RST_R = crate::R<bool, SPI6RST_A>;
impl SPI6RST_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SPI6RST_A {
match self.bits {
false => SPI6RST_A::B_0X0,
true => SPI6RST_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == SPI6RST_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == SPI6RST_A::B_0X1
}
}
#[doc = "Write proxy for field `SPI6RST`"]
pub struct SPI6RST_W<'a> {
w: &'a mut W,
}
impl<'a> SPI6RST_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SPI6RST_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the block reset is released"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(SPI6RST_A::B_0X0)
}
#[doc = "Writing releases the block reset, reading means that the block reset is asserted"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(SPI6RST_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "I2C4RST\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum I2C4RST_A {
#[doc = "0: Writing has no effect, reading means\r\n that the block reset is released"]
B_0X0 = 0,
#[doc = "1: Writing releases the block reset,\r\n reading means that the block reset is\r\n asserted"]
B_0X1 = 1,
}
impl From<I2C4RST_A> for bool {
#[inline(always)]
fn from(variant: I2C4RST_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `I2C4RST`"]
pub type I2C4RST_R = crate::R<bool, I2C4RST_A>;
impl I2C4RST_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> I2C4RST_A {
match self.bits {
false => I2C4RST_A::B_0X0,
true => I2C4RST_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == I2C4RST_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == I2C4RST_A::B_0X1
}
}
#[doc = "Write proxy for field `I2C4RST`"]
pub struct I2C4RST_W<'a> {
w: &'a mut W,
}
impl<'a> I2C4RST_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: I2C4RST_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the block reset is released"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(I2C4RST_A::B_0X0)
}
#[doc = "Writing releases the block reset, reading means that the block reset is asserted"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(I2C4RST_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "I2C6RST\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum I2C6RST_A {
#[doc = "0: Writing has no effect, reading means\r\n that the block reset is released"]
B_0X0 = 0,
#[doc = "1: Writing releases the block reset,\r\n reading means that the block reset is\r\n asserted"]
B_0X1 = 1,
}
impl From<I2C6RST_A> for bool {
#[inline(always)]
fn from(variant: I2C6RST_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `I2C6RST`"]
pub type I2C6RST_R = crate::R<bool, I2C6RST_A>;
impl I2C6RST_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> I2C6RST_A {
match self.bits {
false => I2C6RST_A::B_0X0,
true => I2C6RST_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == I2C6RST_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == I2C6RST_A::B_0X1
}
}
#[doc = "Write proxy for field `I2C6RST`"]
pub struct I2C6RST_W<'a> {
w: &'a mut W,
}
impl<'a> I2C6RST_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: I2C6RST_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the block reset is released"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(I2C6RST_A::B_0X0)
}
#[doc = "Writing releases the block reset, reading means that the block reset is asserted"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(I2C6RST_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "USART1RST\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum USART1RST_A {
#[doc = "0: Writing has no effect, reading means\r\n that the block reset is released"]
B_0X0 = 0,
#[doc = "1: Writing releases the block reset,\r\n reading means that the block reset is\r\n asserted"]
B_0X1 = 1,
}
impl From<USART1RST_A> for bool {
#[inline(always)]
fn from(variant: USART1RST_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `USART1RST`"]
pub type USART1RST_R = crate::R<bool, USART1RST_A>;
impl USART1RST_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> USART1RST_A {
match self.bits {
false => USART1RST_A::B_0X0,
true => USART1RST_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == USART1RST_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == USART1RST_A::B_0X1
}
}
#[doc = "Write proxy for field `USART1RST`"]
pub struct USART1RST_W<'a> {
w: &'a mut W,
}
impl<'a> USART1RST_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: USART1RST_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the block reset is released"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(USART1RST_A::B_0X0)
}
#[doc = "Writing releases the block reset, reading means that the block reset is asserted"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(USART1RST_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "STGENRST\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum STGENRST_A {
#[doc = "0: Writing has no effect, reading means\r\n that the block reset is released"]
B_0X0 = 0,
#[doc = "1: Writing releases the block reset,\r\n reading means that the block reset is\r\n asserted"]
B_0X1 = 1,
}
impl From<STGENRST_A> for bool {
#[inline(always)]
fn from(variant: STGENRST_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `STGENRST`"]
pub type STGENRST_R = crate::R<bool, STGENRST_A>;
impl STGENRST_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> STGENRST_A {
match self.bits {
false => STGENRST_A::B_0X0,
true => STGENRST_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == STGENRST_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == STGENRST_A::B_0X1
}
}
#[doc = "Write proxy for field `STGENRST`"]
pub struct STGENRST_W<'a> {
w: &'a mut W,
}
impl<'a> STGENRST_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: STGENRST_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the block reset is released"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(STGENRST_A::B_0X0)
}
#[doc = "Writing releases the block reset, reading means that the block reset is asserted"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(STGENRST_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 20)) | (((value as u32) & 0x01) << 20);
self.w
}
}
impl R {
#[doc = "Bit 0 - SPI6RST"]
#[inline(always)]
pub fn spi6rst(&self) -> SPI6RST_R {
SPI6RST_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 2 - I2C4RST"]
#[inline(always)]
pub fn i2c4rst(&self) -> I2C4RST_R {
I2C4RST_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - I2C6RST"]
#[inline(always)]
pub fn i2c6rst(&self) -> I2C6RST_R {
I2C6RST_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - USART1RST"]
#[inline(always)]
pub fn usart1rst(&self) -> USART1RST_R {
USART1RST_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 20 - STGENRST"]
#[inline(always)]
pub fn stgenrst(&self) -> STGENRST_R {
STGENRST_R::new(((self.bits >> 20) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - SPI6RST"]
#[inline(always)]
pub fn spi6rst(&mut self) -> SPI6RST_W {
SPI6RST_W { w: self }
}
#[doc = "Bit 2 - I2C4RST"]
#[inline(always)]
pub fn i2c4rst(&mut self) -> I2C4RST_W {
I2C4RST_W { w: self }
}
#[doc = "Bit 3 - I2C6RST"]
#[inline(always)]
pub fn i2c6rst(&mut self) -> I2C6RST_W {
I2C6RST_W { w: self }
}
#[doc = "Bit 4 - USART1RST"]
#[inline(always)]
pub fn usart1rst(&mut self) -> USART1RST_W {
USART1RST_W { w: self }
}
#[doc = "Bit 20 - STGENRST"]
#[inline(always)]
pub fn stgenrst(&mut self) -> STGENRST_W {
STGENRST_W { w: self }
}
}
|
#[doc = "Register `CKGAENR` reader"]
pub type R = crate::R<CKGAENR_SPEC>;
#[doc = "Register `CKGAENR` writer"]
pub type W = crate::W<CKGAENR_SPEC>;
#[doc = "Field `AXICKG` reader - AXI interconnect matrix clock gating This bit is set and reset by software."]
pub type AXICKG_R = crate::BitReader;
#[doc = "Field `AXICKG` writer - AXI interconnect matrix clock gating This bit is set and reset by software."]
pub type AXICKG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `AHBCKG` reader - AXI master AHB clock gating This bit is set and reset by software."]
pub type AHBCKG_R = crate::BitReader;
#[doc = "Field `AHBCKG` writer - AXI master AHB clock gating This bit is set and reset by software."]
pub type AHBCKG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CPUCKG` reader - AXI master CPU clock gating This bit is set and reset by software."]
pub type CPUCKG_R = crate::BitReader;
#[doc = "Field `CPUCKG` writer - AXI master CPU clock gating This bit is set and reset by software."]
pub type CPUCKG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SDMMCCKG` reader - AXI master SDMMC clock gating This bit is set and reset by software."]
pub type SDMMCCKG_R = crate::BitReader;
#[doc = "Field `SDMMCCKG` writer - AXI master SDMMC clock gating This bit is set and reset by software."]
pub type SDMMCCKG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `MDMACKG` reader - AXI master MDMA clock gating This bit is set and reset by software."]
pub type MDMACKG_R = crate::BitReader;
#[doc = "Field `MDMACKG` writer - AXI master MDMA clock gating This bit is set and reset by software."]
pub type MDMACKG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DMA2DCKG` reader - AXI master DMA2D clock gating This bit is set and reset by software."]
pub type DMA2DCKG_R = crate::BitReader;
#[doc = "Field `DMA2DCKG` writer - AXI master DMA2D clock gating This bit is set and reset by software."]
pub type DMA2DCKG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `LTDCCKG` reader - AXI master LTDC clock gating This bit is set and reset by software."]
pub type LTDCCKG_R = crate::BitReader;
#[doc = "Field `LTDCCKG` writer - AXI master LTDC clock gating This bit is set and reset by software."]
pub type LTDCCKG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `GFXMMUMCKG` reader - AXI master GFXMMU clock gating This bit is set and reset by software."]
pub type GFXMMUMCKG_R = crate::BitReader;
#[doc = "Field `GFXMMUMCKG` writer - AXI master GFXMMU clock gating This bit is set and reset by software."]
pub type GFXMMUMCKG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `AHB12CKG` reader - AXI slave AHB12 clock gating This bit is set and reset by software."]
pub type AHB12CKG_R = crate::BitReader;
#[doc = "Field `AHB12CKG` writer - AXI slave AHB12 clock gating This bit is set and reset by software."]
pub type AHB12CKG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `AHB34CKG` reader - AXI slave AHB34 clock gating This bit is set and reset by software."]
pub type AHB34CKG_R = crate::BitReader;
#[doc = "Field `AHB34CKG` writer - AXI slave AHB34 clock gating This bit is set and reset by software."]
pub type AHB34CKG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FLIFTCKG` reader - AXI slave Flash interface (FLIFT) clock gating This bit is set and reset by software."]
pub type FLIFTCKG_R = crate::BitReader;
#[doc = "Field `FLIFTCKG` writer - AXI slave Flash interface (FLIFT) clock gating This bit is set and reset by software."]
pub type FLIFTCKG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `OCTOSPI2CKG` reader - AXI slave OCTOSPI2 clock gating This bit is set and reset by software."]
pub type OCTOSPI2CKG_R = crate::BitReader;
#[doc = "Field `OCTOSPI2CKG` writer - AXI slave OCTOSPI2 clock gating This bit is set and reset by software."]
pub type OCTOSPI2CKG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FMCCKG` reader - AXI slave FMC clock gating This bit is set and reset by software."]
pub type FMCCKG_R = crate::BitReader;
#[doc = "Field `FMCCKG` writer - AXI slave FMC clock gating This bit is set and reset by software."]
pub type FMCCKG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `OCTOSPI1CKG` reader - AXI slave OCTOSPI1 clock gating This bit is set and reset by software."]
pub type OCTOSPI1CKG_R = crate::BitReader;
#[doc = "Field `OCTOSPI1CKG` writer - AXI slave OCTOSPI1 clock gating This bit is set and reset by software."]
pub type OCTOSPI1CKG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `AXIRAM1CKG` reader - AXI slave SRAM1 clock gating This bit is set and reset by software."]
pub type AXIRAM1CKG_R = crate::BitReader;
#[doc = "Field `AXIRAM1CKG` writer - AXI slave SRAM1 clock gating This bit is set and reset by software."]
pub type AXIRAM1CKG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `AXIRAM2CKG` reader - AXI matrix slave SRAM2 clock gating This bit is set and reset by software."]
pub type AXIRAM2CKG_R = crate::BitReader;
#[doc = "Field `AXIRAM2CKG` writer - AXI matrix slave SRAM2 clock gating This bit is set and reset by software."]
pub type AXIRAM2CKG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `AXIRAM3CKG` reader - AXI matrix slave SRAM3 clock gating This bit is set and reset by software."]
pub type AXIRAM3CKG_R = crate::BitReader;
#[doc = "Field `AXIRAM3CKG` writer - AXI matrix slave SRAM3 clock gating This bit is set and reset by software."]
pub type AXIRAM3CKG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `GFXMMUSCKG` reader - AXI matrix slave GFXMMU clock gating This bit is set and reset by software."]
pub type GFXMMUSCKG_R = crate::BitReader;
#[doc = "Field `GFXMMUSCKG` writer - AXI matrix slave GFXMMU clock gating This bit is set and reset by software."]
pub type GFXMMUSCKG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ECCRAMCKG` reader - RAM error code correction (ECC) clock gating This bit is set and reset by software."]
pub type ECCRAMCKG_R = crate::BitReader;
#[doc = "Field `ECCRAMCKG` writer - RAM error code correction (ECC) clock gating This bit is set and reset by software."]
pub type ECCRAMCKG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `EXTICKG` reader - EXTI clock gating This bit is set and reset by software."]
pub type EXTICKG_R = crate::BitReader;
#[doc = "Field `EXTICKG` writer - EXTI clock gating This bit is set and reset by software."]
pub type EXTICKG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `JTAGCKG` reader - JTAG automatic clock gating This bit is set and reset by software."]
pub type JTAGCKG_R = crate::BitReader;
#[doc = "Field `JTAGCKG` writer - JTAG automatic clock gating This bit is set and reset by software."]
pub type JTAGCKG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - AXI interconnect matrix clock gating This bit is set and reset by software."]
#[inline(always)]
pub fn axickg(&self) -> AXICKG_R {
AXICKG_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - AXI master AHB clock gating This bit is set and reset by software."]
#[inline(always)]
pub fn ahbckg(&self) -> AHBCKG_R {
AHBCKG_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - AXI master CPU clock gating This bit is set and reset by software."]
#[inline(always)]
pub fn cpuckg(&self) -> CPUCKG_R {
CPUCKG_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - AXI master SDMMC clock gating This bit is set and reset by software."]
#[inline(always)]
pub fn sdmmcckg(&self) -> SDMMCCKG_R {
SDMMCCKG_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - AXI master MDMA clock gating This bit is set and reset by software."]
#[inline(always)]
pub fn mdmackg(&self) -> MDMACKG_R {
MDMACKG_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - AXI master DMA2D clock gating This bit is set and reset by software."]
#[inline(always)]
pub fn dma2dckg(&self) -> DMA2DCKG_R {
DMA2DCKG_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - AXI master LTDC clock gating This bit is set and reset by software."]
#[inline(always)]
pub fn ltdcckg(&self) -> LTDCCKG_R {
LTDCCKG_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - AXI master GFXMMU clock gating This bit is set and reset by software."]
#[inline(always)]
pub fn gfxmmumckg(&self) -> GFXMMUMCKG_R {
GFXMMUMCKG_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - AXI slave AHB12 clock gating This bit is set and reset by software."]
#[inline(always)]
pub fn ahb12ckg(&self) -> AHB12CKG_R {
AHB12CKG_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - AXI slave AHB34 clock gating This bit is set and reset by software."]
#[inline(always)]
pub fn ahb34ckg(&self) -> AHB34CKG_R {
AHB34CKG_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - AXI slave Flash interface (FLIFT) clock gating This bit is set and reset by software."]
#[inline(always)]
pub fn fliftckg(&self) -> FLIFTCKG_R {
FLIFTCKG_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - AXI slave OCTOSPI2 clock gating This bit is set and reset by software."]
#[inline(always)]
pub fn octospi2ckg(&self) -> OCTOSPI2CKG_R {
OCTOSPI2CKG_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - AXI slave FMC clock gating This bit is set and reset by software."]
#[inline(always)]
pub fn fmcckg(&self) -> FMCCKG_R {
FMCCKG_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - AXI slave OCTOSPI1 clock gating This bit is set and reset by software."]
#[inline(always)]
pub fn octospi1ckg(&self) -> OCTOSPI1CKG_R {
OCTOSPI1CKG_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - AXI slave SRAM1 clock gating This bit is set and reset by software."]
#[inline(always)]
pub fn axiram1ckg(&self) -> AXIRAM1CKG_R {
AXIRAM1CKG_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - AXI matrix slave SRAM2 clock gating This bit is set and reset by software."]
#[inline(always)]
pub fn axiram2ckg(&self) -> AXIRAM2CKG_R {
AXIRAM2CKG_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 16 - AXI matrix slave SRAM3 clock gating This bit is set and reset by software."]
#[inline(always)]
pub fn axiram3ckg(&self) -> AXIRAM3CKG_R {
AXIRAM3CKG_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17 - AXI matrix slave GFXMMU clock gating This bit is set and reset by software."]
#[inline(always)]
pub fn gfxmmusckg(&self) -> GFXMMUSCKG_R {
GFXMMUSCKG_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 29 - RAM error code correction (ECC) clock gating This bit is set and reset by software."]
#[inline(always)]
pub fn eccramckg(&self) -> ECCRAMCKG_R {
ECCRAMCKG_R::new(((self.bits >> 29) & 1) != 0)
}
#[doc = "Bit 30 - EXTI clock gating This bit is set and reset by software."]
#[inline(always)]
pub fn extickg(&self) -> EXTICKG_R {
EXTICKG_R::new(((self.bits >> 30) & 1) != 0)
}
#[doc = "Bit 31 - JTAG automatic clock gating This bit is set and reset by software."]
#[inline(always)]
pub fn jtagckg(&self) -> JTAGCKG_R {
JTAGCKG_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - AXI interconnect matrix clock gating This bit is set and reset by software."]
#[inline(always)]
#[must_use]
pub fn axickg(&mut self) -> AXICKG_W<CKGAENR_SPEC, 0> {
AXICKG_W::new(self)
}
#[doc = "Bit 1 - AXI master AHB clock gating This bit is set and reset by software."]
#[inline(always)]
#[must_use]
pub fn ahbckg(&mut self) -> AHBCKG_W<CKGAENR_SPEC, 1> {
AHBCKG_W::new(self)
}
#[doc = "Bit 2 - AXI master CPU clock gating This bit is set and reset by software."]
#[inline(always)]
#[must_use]
pub fn cpuckg(&mut self) -> CPUCKG_W<CKGAENR_SPEC, 2> {
CPUCKG_W::new(self)
}
#[doc = "Bit 3 - AXI master SDMMC clock gating This bit is set and reset by software."]
#[inline(always)]
#[must_use]
pub fn sdmmcckg(&mut self) -> SDMMCCKG_W<CKGAENR_SPEC, 3> {
SDMMCCKG_W::new(self)
}
#[doc = "Bit 4 - AXI master MDMA clock gating This bit is set and reset by software."]
#[inline(always)]
#[must_use]
pub fn mdmackg(&mut self) -> MDMACKG_W<CKGAENR_SPEC, 4> {
MDMACKG_W::new(self)
}
#[doc = "Bit 5 - AXI master DMA2D clock gating This bit is set and reset by software."]
#[inline(always)]
#[must_use]
pub fn dma2dckg(&mut self) -> DMA2DCKG_W<CKGAENR_SPEC, 5> {
DMA2DCKG_W::new(self)
}
#[doc = "Bit 6 - AXI master LTDC clock gating This bit is set and reset by software."]
#[inline(always)]
#[must_use]
pub fn ltdcckg(&mut self) -> LTDCCKG_W<CKGAENR_SPEC, 6> {
LTDCCKG_W::new(self)
}
#[doc = "Bit 7 - AXI master GFXMMU clock gating This bit is set and reset by software."]
#[inline(always)]
#[must_use]
pub fn gfxmmumckg(&mut self) -> GFXMMUMCKG_W<CKGAENR_SPEC, 7> {
GFXMMUMCKG_W::new(self)
}
#[doc = "Bit 8 - AXI slave AHB12 clock gating This bit is set and reset by software."]
#[inline(always)]
#[must_use]
pub fn ahb12ckg(&mut self) -> AHB12CKG_W<CKGAENR_SPEC, 8> {
AHB12CKG_W::new(self)
}
#[doc = "Bit 9 - AXI slave AHB34 clock gating This bit is set and reset by software."]
#[inline(always)]
#[must_use]
pub fn ahb34ckg(&mut self) -> AHB34CKG_W<CKGAENR_SPEC, 9> {
AHB34CKG_W::new(self)
}
#[doc = "Bit 10 - AXI slave Flash interface (FLIFT) clock gating This bit is set and reset by software."]
#[inline(always)]
#[must_use]
pub fn fliftckg(&mut self) -> FLIFTCKG_W<CKGAENR_SPEC, 10> {
FLIFTCKG_W::new(self)
}
#[doc = "Bit 11 - AXI slave OCTOSPI2 clock gating This bit is set and reset by software."]
#[inline(always)]
#[must_use]
pub fn octospi2ckg(&mut self) -> OCTOSPI2CKG_W<CKGAENR_SPEC, 11> {
OCTOSPI2CKG_W::new(self)
}
#[doc = "Bit 12 - AXI slave FMC clock gating This bit is set and reset by software."]
#[inline(always)]
#[must_use]
pub fn fmcckg(&mut self) -> FMCCKG_W<CKGAENR_SPEC, 12> {
FMCCKG_W::new(self)
}
#[doc = "Bit 13 - AXI slave OCTOSPI1 clock gating This bit is set and reset by software."]
#[inline(always)]
#[must_use]
pub fn octospi1ckg(&mut self) -> OCTOSPI1CKG_W<CKGAENR_SPEC, 13> {
OCTOSPI1CKG_W::new(self)
}
#[doc = "Bit 14 - AXI slave SRAM1 clock gating This bit is set and reset by software."]
#[inline(always)]
#[must_use]
pub fn axiram1ckg(&mut self) -> AXIRAM1CKG_W<CKGAENR_SPEC, 14> {
AXIRAM1CKG_W::new(self)
}
#[doc = "Bit 15 - AXI matrix slave SRAM2 clock gating This bit is set and reset by software."]
#[inline(always)]
#[must_use]
pub fn axiram2ckg(&mut self) -> AXIRAM2CKG_W<CKGAENR_SPEC, 15> {
AXIRAM2CKG_W::new(self)
}
#[doc = "Bit 16 - AXI matrix slave SRAM3 clock gating This bit is set and reset by software."]
#[inline(always)]
#[must_use]
pub fn axiram3ckg(&mut self) -> AXIRAM3CKG_W<CKGAENR_SPEC, 16> {
AXIRAM3CKG_W::new(self)
}
#[doc = "Bit 17 - AXI matrix slave GFXMMU clock gating This bit is set and reset by software."]
#[inline(always)]
#[must_use]
pub fn gfxmmusckg(&mut self) -> GFXMMUSCKG_W<CKGAENR_SPEC, 17> {
GFXMMUSCKG_W::new(self)
}
#[doc = "Bit 29 - RAM error code correction (ECC) clock gating This bit is set and reset by software."]
#[inline(always)]
#[must_use]
pub fn eccramckg(&mut self) -> ECCRAMCKG_W<CKGAENR_SPEC, 29> {
ECCRAMCKG_W::new(self)
}
#[doc = "Bit 30 - EXTI clock gating This bit is set and reset by software."]
#[inline(always)]
#[must_use]
pub fn extickg(&mut self) -> EXTICKG_W<CKGAENR_SPEC, 30> {
EXTICKG_W::new(self)
}
#[doc = "Bit 31 - JTAG automatic clock gating This bit is set and reset by software."]
#[inline(always)]
#[must_use]
pub fn jtagckg(&mut self) -> JTAGCKG_W<CKGAENR_SPEC, 31> {
JTAGCKG_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "RCC AXI clocks gating enable register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ckgaenr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ckgaenr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CKGAENR_SPEC;
impl crate::RegisterSpec for CKGAENR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`ckgaenr::R`](R) reader structure"]
impl crate::Readable for CKGAENR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`ckgaenr::W`](W) writer structure"]
impl crate::Writable for CKGAENR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CKGAENR to value 0"]
impl crate::Resettable for CKGAENR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.