text stringlengths 8 4.13M |
|---|
use std::fs;
use std::io::{stdin, stdout, ErrorKind, Write};
/*
** SHOPLIST :
**
** First Rust project, practice only.
** This code is free to use, have fun :)
*/
fn add_to_list(file_content: &mut String) {
let mut readed = String::new();
print!("What do you want to add to your list ? : ");
stdout().flush().unwrap();
stdin().read_line(&mut readed).expect("Error when reading.");
file_content.push_str(readed.as_str());
display_list(&file_content);
}
fn remove_line_in_content(file_content: &mut String, readed: String) {
let split_content = file_content.split('\n');
let mut vector_content: Vec<&str> = {
let mut victor: Vec<&str> = split_content.collect();
let victor_size: usize = victor.len();
for i in 0..(victor_size - 1) {
if victor[i] == "" || victor[i] == "\n" {
victor.remove(i);
}
}
victor
};
let selected_line: i32 = match readed.trim_end().parse() {
Err(e) => {
println!("There was an error : {}.", e);
return;
}
Ok(e) => {
if e > (vector_content.len() as i32 - 1) || e < 0 || vector_content[e as usize] == "\n" || vector_content[e as usize] == "" {
println!("/!\\ Invalid line option.");
return;
} else {
e
}
}
};
vector_content.remove(selected_line as usize);
let yolo = vector_content.join("\n");
*file_content = yolo;
}
fn delete_from_list(file_content: &mut String) {
let mut readed = String::new();
display_deletion_list(&file_content);
print!("What do you want to remove from your list ? : ");
stdout().flush().unwrap();
stdin().read_line(&mut readed).expect("Error when reading.");
remove_line_in_content(file_content, readed);
display_list(&file_content);
}
fn get_file_content(filepath: &str) -> String {
let f = fs::File::open(filepath);
let _f = match f {
Ok(file) => file,
Err(e) => match e.kind() {
ErrorKind::NotFound => match fs::File::create(&filepath) {
Ok(file_create) => file_create,
Err(e) => panic!("Error whenn creating the file : {:?}", e),
},
other_error => panic!("Problem with file opening action : {:?}", other_error),
},
};
let file_content = fs::read_to_string(filepath).expect("Error when reading.");
file_content
}
fn display_deletion_list(file_content: &String) {
let file_content = file_content.split('\n');
let mut i = 0;
println!("=== Your actual list =================\n");
for element in file_content {
match element {
"" => {}
"\n" => {}
_ => {
println!("{}\t{}", &i, element);
i = i + 1;
}
}
}
println!("\n======================================\n");
}
fn display_list(file_content: &String) {
let file_content = file_content.split('\n');
println!("=== Your actual list =================\n");
for element in file_content {
match element {
"" => {}
"\n" => {}
_ => {
println!("- {}", element);
}
}
}
println!("\n======================================\n");
}
fn main() {
let filepath = "shoplist.txt";
let mut file_content = get_file_content(&filepath);
let mut string = String::new();
display_list(&file_content);
println!("What do you want to do ?");
loop {
print!("[A]dd, [D]elete, [Q]uit : ");
stdout().flush().unwrap();
stdin().read_line(&mut string).expect("Error when reading.");
match string.as_ref() {
"A\n" => {
add_to_list(&mut file_content);
string.clear();
}
"D\n" => {
delete_from_list(&mut file_content);
string.clear();
}
"Q\n" => {
let file = fs::File::create(filepath);
file.unwrap()
.write_all(file_content.as_bytes())
.expect("Error when updating the file.");
break;
}
"\n" => {
string.clear();
}
_ => {
println!("Not a valid input, please retry.\n");
string.clear();
}
}
}
}
|
use crate::cpu::cpu::CPU;
impl CPU {
pub fn add(&mut self, target: u8, use_carry: bool) -> u8 {
let c_flag = if use_carry && self.reg.f.c { 1 } else { 0 };
let (_val, c1) = self.reg.a.overflowing_add(target);
let (val, c2) = _val.overflowing_add(c_flag);
self.reg.f.z = val == 0;
self.reg.f.n = false;
self.reg.f.c = c1 || c2;
// half carry might need fixing
self.reg.f.h = (self.reg.a & 0xF) + (val & 0xF) + c_flag > 0xF;
val
}
pub fn add_hl(&mut self, target: u16) -> u16 {
let (val, c) = self.reg.hl().overflowing_add(target);
self.reg.f.z = val == 0;
self.reg.f.n = false;
self.reg.f.c = c;
// half carry might need fixing
self.reg.f.h = (self.reg.hl() & 0x07FF) + (target & 0x07FF) > 0x07FF;
val
}
pub fn sub(&mut self, target: u8, use_carry: bool) -> u8 {
let c_flag = if use_carry && self.reg.f.c { 1 } else { 0 };
let (_val, c1) = self.reg.a.overflowing_sub(target);
let (val, c2) = _val.overflowing_sub(c_flag);
self.reg.f.z = val == 0;
self.reg.f.n = true;
self.reg.f.c = c1 || c2;
self.reg.f.h = (self.reg.a & 0xF) < (val & 0xF) + c_flag;
val
}
pub fn and(&mut self, target: u8) -> u8 {
let val = self.reg.a & target;
self.reg.f.z = val == 0;
self.reg.f.n = false;
self.reg.f.c = false;
self.reg.f.h = true;
val
}
pub fn or(&mut self, target: u8) -> u8 {
let val = self.reg.a | target;
self.reg.f.z = val == 0;
self.reg.f.n = false;
self.reg.f.c = false;
self.reg.f.h = false;
val
}
pub fn xor(&mut self, target: u8) -> u8 {
let val = self.reg.a ^ target;
self.reg.f.z = val == 0;
self.reg.f.n = false;
self.reg.f.c = false;
self.reg.f.h = false;
val
}
pub fn cp(&mut self, target: u8) {
let _compare = self.sub(target, false);
}
pub fn inc(&mut self, target: u8) -> u8 {
let val = target.wrapping_add(1);
self.reg.f.z = val == 0;
self.reg.f.n = false;
self.reg.f.h = (target & 0xF) == 0xF;
val
}
pub fn dec(&mut self, target: u8) -> u8 {
let val = target.wrapping_sub(1);
self.reg.f.z = val == 0;
self.reg.f.n = true;
self.reg.f.h = (target & 0xF) == 0;
val
}
pub fn rr(&mut self, target: u8) -> u8 {
let val = (target >> 1) | (if self.reg.f.c { 0x80 } else { 0 });
self.reg.f.z = val == 0;
self.reg.f.n = false;
self.reg.f.c = target & 0x01 != 0;
self.reg.f.h = false;
val
}
pub fn rl(&mut self, target: u8) -> u8 {
let val = (target << 1) | (if self.reg.f.c { 1 } else { 0 });
self.reg.f.z = val == 0;
self.reg.f.n = false;
self.reg.f.c = target & 0x80 != 0;
self.reg.f.h = false;
val
}
pub fn rrc(&mut self, target: u8) -> u8 {
let val = target.rotate_right(1);
self.reg.f.z = val == 0;
self.reg.f.n = false;
self.reg.f.c = target & 0x01 != 0;
self.reg.f.h = false;
val
}
pub fn rlc(&mut self, target: u8) -> u8 {
let val = target.rotate_left(1);
self.reg.f.z = val == 0;
self.reg.f.n = false;
self.reg.f.c = target & 0x80 != 0;
self.reg.f.h = false;
val
}
pub fn bit(&mut self, target: u8, index: u8) {
self.reg.f.z = target as u32 & (1 << (index) as u32) == 0;
self.reg.f.n = false;
self.reg.f.h = true;
}
pub fn srl(&mut self, target: u8) -> u8 {
let val = target >> 1;
self.reg.f.z = val == 0;
self.reg.f.n = false;
self.reg.f.c = target & 0x01 != 0;
self.reg.f.h = false;
val
}
pub fn sra(&mut self, target: u8) -> u8 {
let val = (target >> 1) | (target & 0x80);
self.reg.f.z = val == 0;
self.reg.f.n = false;
self.reg.f.c = target & 0x01 != 0;
self.reg.f.h = false;
val
}
pub fn sla(&mut self, target: u8) -> u8 {
let val = target << 1;
self.reg.f.z = val == 0;
self.reg.f.n = false;
self.reg.f.c = target & 0x80 != 0;
self.reg.f.h = false;
val
}
pub fn swap(&mut self, target: u8) -> u8 {
self.reg.f.z = target == 0;
self.reg.f.n = false;
self.reg.f.c = target & 0x80 != 0;
self.reg.f.h = false;
(target >> 4) | (target << 4)
}
pub fn ccf(&mut self) {
self.reg.f.n = false;
self.reg.f.c = !self.reg.f.c;
self.reg.f.h = false;
}
pub fn scf(&mut self) {
self.reg.f.n = false;
self.reg.f.c = true;
self.reg.f.h = false;
}
pub fn cpl(&mut self) {
self.reg.a = !self.reg.a;
self.reg.f.n = true;
self.reg.f.h = false;
}
} |
use geometry::Point;
use models::Mouse;
use std::hash::Hash;
use std::hash::Hasher;
#[derive(Copy, Clone, PartialEq, Debug)]
pub struct Dot {
pub point: Point,
pub i: usize,
pub j: usize,
}
impl Dot {
pub fn new(i: usize, j: usize) -> Dot
{
Dot {
point: Point {
x: ((i + 1) * 100) as f64,
y: ((j + 1) * 100) as f64,
},
i: i,
j: j,
}
}
pub fn collides_with(&self, mouse: &Mouse) -> bool {
self.point.intersect_circle(&mouse.point, 10.0)
}
pub fn is_contiguous(&self, other: Dot) -> bool {
if self.i == other.i {
self.j + 1 == other.j || self.j == other.j + 1
} else if self.j == other.j {
self.i + 1 == other.i || self.i == other.i + 1
} else {
false
}
}
}
impl Hash for Dot {
fn hash<H: Hasher>(&self, state: &'_ mut H) {
// TODO this assumes i and j will always be consistent
self.i.hash(state);
self.j.hash(state);
}
} |
use std::cmp::Reverse;
use proconio::input;
fn main() {
input! {
n: usize,
x: usize,
y: usize,
z: usize,
a: [u8; n],
b: [u8; n],
};
let mut math: Vec<usize> = (0..n).collect();
// stable
math.sort_by_key(|&i| Reverse(a[i]));
let mut eng = math.split_off(x);
eng.sort();
eng.sort_by_key(|&i| Reverse(b[i]));
let mut math_eng = eng.split_off(y);
math_eng.sort();
math_eng.sort_by_key(|&i| Reverse(a[i] + b[i]));
let _rest = math_eng.split_off(z);
let mut ans = Vec::new();
ans.extend(math);
ans.extend(eng);
ans.extend(math_eng);
ans.sort();
for ans in ans {
println!("{}", ans + 1);
}
}
|
pub struct Solution;
impl Solution {
pub fn bulb_switch(n: i32) -> i32 {
(n as f64).sqrt().floor() as i32
}
}
#[test]
fn test0319() {
fn case(n: i32, want: i32) {
let got = Solution::bulb_switch(n);
assert_eq!(got, want);
}
case(3, 1);
}
|
use P31::Primes;
pub fn main() {
let mut primes = Primes::new();
println!("Is {} a prime number? : {}", 53, primes.is_prime(53));
println!("Is {} a prime number? : {}", 1957, primes.is_prime(1957));
}
|
//! Contains the various decision tree learning algorithms.
use std::{
collections::{BTreeMap, HashMap},
hash::Hash,
iter::IntoIterator,
marker::PhantomData,
slice::Iter as SliceIter,
vec::IntoIter as VecIter,
};
pub mod c45;
pub mod id3;
pub mod id4;
/// A collection of attributes.
pub trait Attributes<L, V> {
fn get_attribute(&self, key: &L) -> &V;
}
impl<L, V> Attributes<L, V> for BTreeMap<L, V>
where
L: Ord,
{
fn get_attribute(&self, key: &L) -> &V {
self.get(key).unwrap()
}
}
impl<L, V> Attributes<L, V> for HashMap<L, V>
where
L: Hash + Eq,
{
fn get_attribute(&self, key: &L) -> &V {
self.get(key).unwrap()
}
}
/// A set of data. Each entry contains two things: An [`Attributes`] instance and a result.
pub struct DataSet<A, R, L, V>
where
A: Attributes<L, V>,
{
data: Vec<(A, R)>,
labels: Vec<L>,
possible_results: Vec<R>,
_marker: PhantomData<V>,
}
impl<A: Attributes<L, V>, R, L, V> DataSet<A, R, L, V> {
pub fn new(labels: Vec<L>, possible_results: Vec<R>) -> Self {
Self {
data: Vec::new(),
labels,
possible_results,
_marker: PhantomData,
}
}
pub fn possible_results(&self) -> &[R] {
&self.possible_results
}
pub fn labels(&self) -> &[L] {
&self.labels
}
pub fn add_entry(&mut self, attributes: A, result: R) {
self.data.push((attributes, result));
}
pub fn iter(&self) -> SliceIter<(A, R)> {
self.data.iter()
}
}
impl<A: Attributes<L, V>, R, L, V> IntoIterator for DataSet<A, R, L, V> {
type Item = (A, R);
type IntoIter = VecIter<(A, R)>;
fn into_iter(self) -> Self::IntoIter {
self.data.into_iter()
}
}
|
//! Module with all structs & functions charged of writing .dbf file content
use std::fs::File;
use std::io::{BufWriter, Write};
use std::path::Path;
use byteorder::WriteBytesExt;
use header::Header;
use reading::TERMINATOR_VALUE;
use record::RecordFieldInfo;
use {Error, Record};
/// A dbase file ends with this byte
const FILE_TERMINATOR: u8 = 0x1A;
/// Struct that handles the writing of records to any destination
/// that supports the `Write` trait
pub struct Writer<T: Write> {
dest: T,
}
impl<T: Write> Writer<T> {
/// Creates a new Writer
///
/// # Examples
///
/// ```
/// use std::io::Cursor;
/// let writer = dbase::Writer::new(Cursor::new(Vec::<u8>::new()));
/// ```
pub fn new(dest: T) -> Self {
Self { dest }
}
/// Writes the collection of records
///
/// # Returns
/// Returns the `dest` provided when constructing the writer, in case you need it.
///
/// # Examples
///
/// ```
/// use std::io::Cursor;
///
/// let mut fst = dbase::Record::new();
/// fst.insert("Name".to_string(), dbase::FieldValue::from("Fallujah"));
/// let records = vec![fst];
///
/// let writer = dbase::Writer::new(Cursor::new(Vec::<u8>::new()));
/// let cursor = writer.write(&records).unwrap();
/// ```
pub fn write(mut self, records: &Vec<Record>) -> Result<(T), Error> {
if records.is_empty() {
return Ok(self.dest);
}
let fields_name: Vec<&String> = records[0].keys().collect();
let mut fields_info = Vec::<RecordFieldInfo>::with_capacity(fields_name.len());
for (field_name, field_value) in &records[0] {
let field_length = field_value.size_in_bytes();
if field_length > std::u8::MAX as usize {
return Err(Error::FieldLengthTooLong);
}
fields_info.push(RecordFieldInfo::new(
field_name.to_owned(),
field_value.field_type(),
field_length as u8,
));
}
// TODO check that for the same field, the field type is the same
for record in &records[1..records.len()] {
for (field_name, record_info) in fields_name.iter().zip(&mut fields_info) {
let field_value = record.get(*field_name).unwrap(); // TODO: Should return an Err()
let field_length = field_value.size_in_bytes();
if field_length > std::u8::MAX as usize {
return Err(Error::FieldLengthTooLong);
}
record_info.field_length =
std::cmp::max(record_info.field_length, field_length as u8);
}
}
let offset_to_first_record =
Header::SIZE + (fields_info.len() * RecordFieldInfo::SIZE) + std::mem::size_of::<u8>();
let size_of_record = fields_info
.iter()
.fold(0u16, |s, ref info| s + info.field_length as u16);
let hdr = Header::new(
records.len() as u32,
offset_to_first_record as u16,
size_of_record,
);
hdr.write_to(&mut self.dest)?;
for record_info in &fields_info {
record_info.write_to(&mut self.dest)?;
}
self.dest.write_u8(TERMINATOR_VALUE)?;
let value_buffer = [' ' as u8; std::u8::MAX as usize];
for record in records {
self.dest.write_u8(' ' as u8)?; // DeletionFlag
for (field_name, record_info) in fields_name.iter().zip(&fields_info) {
let value = record.get(*field_name).unwrap();
let bytes_written = value.write_to(&mut self.dest)? as u8;
if bytes_written > record_info.field_length {
panic!("record length was miscalculated");
}
let bytes_to_pad = record_info.field_length - bytes_written;
self.dest
.write_all(&value_buffer[0..bytes_to_pad as usize])?;
}
}
self.dest.write_u8(FILE_TERMINATOR)?;
Ok(self.dest)
}
}
impl Writer<BufWriter<File>> {
/// Creates a new writer that will write the to a new filed
/// # Examples
/// ```
/// let writer = dbase::Writer::from_path("new_records.dbf").unwrap();
/// ```
pub fn from_path<P: AsRef<Path>>(path: P) -> Result<Self, std::io::Error> {
Ok(Writer::new(BufWriter::new(File::create(path)?)))
}
}
/// Writes the records to the dest
///
/// # Examples
///
/// ```
/// use std::io::Cursor;
///
/// let mut fst = dbase::Record::new();
/// fst.insert("Name".to_string(), dbase::FieldValue::from("The Flesh Prevails"));
/// fst.insert("Price".to_string(), dbase::FieldValue::Numeric(Some(9.99)));
/// let records = vec![fst];
///
/// let cursor = Cursor::new(Vec::<u8>::new());
/// let cursor = dbase::write_to(&records, cursor).unwrap();
/// ```
pub fn write_to<T: Write>(records: &Vec<Record>, dest: T) -> Result<T, Error> {
let writer = Writer::new(dest);
writer.write(&records)
}
/// Writes all the records to the a new file at path
///
/// # Examples
///
/// ```
/// let mut fst = dbase::Record::new();
/// fst.insert("Name".to_string(), dbase::FieldValue::from("The Flesh Prevails"));
/// fst.insert("Price".to_string(), dbase::FieldValue::Numeric(Some(9.99)));
/// let records = vec![fst];
///
/// dbase::write_to_path(&records, "albums.dbf").unwrap();
/// ```
pub fn write_to_path<P: AsRef<Path>>(records: &Vec<Record>, path: P) -> Result<(), Error> {
let writer = Writer::from_path(path)?;
writer.write(&records)?;
Ok(())
}
|
//! # BAPS3 Protocol Library for Rust
//!
//! This library is organised as follows:
//!
//! ## Basics
//! - The `util` module contains miscellaneous utility functions and macros;
//! - The `proto` module contains low-level machinery for encoding and decoding
//! BAPS3 messages.
//!
//! ## Clients and Servers
//! - The `client` module provides a mid-level, basic, channel/task-based
//! implementation of a BAPS3 client;
//! - The `server` module provides a mid-level, basic, channel/task-based
//! implementation of a BAPS3 server.
#[macro_use] pub mod util;
#[macro_use] pub mod proto;
pub mod client;
pub mod server;
|
use crate::context::*;
use crate::types::*;
use crate::util::*;
use lsp_types::request::ExecuteCommand;
use lsp_types::*;
pub fn organize_imports(meta: EditorMeta, ctx: &mut Context) {
let file_uri = Url::from_file_path(&meta.buffile).unwrap();
let req_params = ExecuteCommandParams {
command: "java.edit.organizeImports".to_string(),
arguments: vec![serde_json::json!(file_uri.into_string())],
..ExecuteCommandParams::default()
};
ctx.call::<ExecuteCommand, _>(
meta,
req_params,
move |ctx: &mut Context, meta, response| {
if let Some(response) = response {
organize_imports_response(meta, serde_json::from_value(response).unwrap(), ctx);
}
},
);
}
pub fn organize_imports_response(
meta: EditorMeta,
result: Option<WorkspaceEdit>,
ctx: &mut Context,
) {
let result = match result {
Some(result) => result,
None => return,
};
// Double JSON serialization is performed to prevent parsing args as a TOML
// structure when they are passed back via lsp-apply-workspace-edit.
let edit = &serde_json::to_string(&result).unwrap();
let edit = editor_quote(&serde_json::to_string(&edit).unwrap());
let select_cmd = format!("lsp-apply-workspace-edit {}", edit);
ctx.exec(meta, select_cmd);
}
|
use std::unimplemented;
use parser::mir::{Declaration, Expression, Module};
pub struct Interpeter<'module> {
module: &'module Module,
}
pub struct State<'module> {
module: &'module Module,
call: Vec<Value<'module>>,
}
#[derive(Clone, PartialEq, Debug)]
pub enum Value<'module> {
Builtin(String),
Closure(Closure<'module>),
String(String),
Number(u64),
}
#[derive(Clone, PartialEq, Debug)]
struct Closure<'module> {
declaration: &'module Declaration,
closure: Vec<Value<'module>>,
}
impl<'module> Interpeter<'module> {
pub fn new(module: &'module Module) -> Self {
dbg!(module);
Self { module }
}
pub fn eval_by_name(&self, name: &str, arguments: &[Value<'module>]) {
// Find name
let index = self
.module
.symbols
.iter()
.position(|item| item == name)
.expect("Function not found");
if !self.module.names[index] {
panic!("Symbol is not a proper name");
}
// Set initial state
let closure = Value::Closure(Closure {
declaration: self
.module
.declaration(index)
.expect("Symbol is not a proper name"),
closure: vec![],
});
let mut state = State {
module: self.module,
call: std::iter::once(closure)
.chain(arguments.iter().cloned())
.collect(),
};
// Run till completion
state.run();
}
}
impl<'module> State<'module> {
fn run(&mut self) {
while self.step() {}
}
fn step(&mut self) -> bool {
self.pretty_print();
match self.call.first() {
Some(Value::Builtin(s)) => {
match s.as_ref() {
"print" => self.print().is_some(),
"exit" => self.exit().is_some(),
"isZero" => self.is_zero().is_some(),
"sub" => self.sub().is_some(),
"add" => self.add().is_some(),
"divmod" => self.divmod().is_some(),
"mul" => self.mul().is_some(),
_ => unimplemented!(),
}
}
Some(Value::Closure(closure)) => {
self.call = closure
.declaration
.call
.iter()
.map(|expr| {
match expr {
Expression::Symbol(s) => {
self.resolve(*s).expect("Could not resolve symbol value")
}
Expression::Import(i) => {
Value::Builtin(self.module.imports[*i].clone())
}
Expression::Literal(i) => {
Value::String(self.module.strings[*i].clone())
}
Expression::Number(i) => Value::Number(self.module.numbers[*i]),
}
})
.collect();
true
}
Some(_) => panic!("Can not execute"),
None => false,
}
}
fn resolve(&self, symbol: usize) -> Option<Value<'module>> {
// Resolve only works in a closure
let closure = match self.call.first()? {
Value::Closure(closure) => Some(closure),
_ => None,
}?;
let decl = closure.declaration;
// Check argument values
let value = decl
.procedure
.iter()
.zip(self.call.iter())
.find(|(s, _)| **s == symbol)
.map(|(_, v)| v);
if value.is_some() {
return value.cloned();
}
// Check closure values
let value = decl
.closure
.iter()
.zip(closure.closure.iter())
.find(|(s, _)| **s == symbol)
.map(|(_, v)| v);
if value.is_some() {
return value.cloned();
}
// Create new closure?
if let Some(declaration) = self.module.declaration(symbol) {
return declaration
.closure
.iter()
.map(|s| self.resolve(*s))
.collect::<Option<Vec<_>>>()
.map(|closure| {
Value::Closure(Closure {
declaration,
closure,
})
});
}
// Builtin?
println!("Could not resolve symbol {}", self.module.symbols[symbol]);
return None;
}
pub fn pretty_print(&self) {
print!("\n⇒ ");
for value in &self.call {
match value {
Value::Builtin(name) => print!("{} ", name),
Value::String(s) => print!("“{}” ", s),
Value::Number(n) => print!("{} ", n),
Value::Closure(c) => {
let symbol = c.declaration.procedure[0];
let name = &self.module.symbols[symbol];
if name.is_empty() {
print!("λ{} ", symbol);
} else {
print!("{} ", name);
}
}
}
}
println!("");
}
fn print(&mut self) -> Option<()> {
assert_eq!(
self.call.first(),
Some(&Value::Builtin("print".to_string()))
);
assert_eq!(self.call.len(), 3);
let string = match &self.call[1] {
Value::String(s) => Some(s),
_ => None,
}?;
print!("{}", string);
self.call = vec![self.call[2].clone()];
Some(())
}
fn exit(&mut self) -> Option<()> {
assert_eq!(self.call.first(), Some(&Value::Builtin("exit".to_string())));
assert_eq!(self.call.len(), 2);
let code = match &self.call[1] {
Value::Number(n) => Some(n),
_ => None,
}?;
println!("[EXIT] {}", code);
self.call = vec![];
Some(())
}
fn is_zero(&mut self) -> Option<()> {
assert_eq!(
self.call.first(),
Some(&Value::Builtin("isZero".to_string()))
);
assert_eq!(self.call.len(), 4);
let n = match &self.call[1] {
Value::Number(n) => Some(n),
_ => None,
}?;
self.call = vec![self.call[if *n == 0 { 2 } else { 3 }].clone()];
Some(())
}
fn sub(&mut self) -> Option<()> {
assert_eq!(self.call.first(), Some(&Value::Builtin("sub".to_string())));
assert_eq!(self.call.len(), 4);
let a = match &self.call[1] {
Value::Number(n) => Some(n),
_ => None,
}?;
let b = match &self.call[2] {
Value::Number(n) => Some(n),
_ => None,
}?;
self.call = vec![self.call[3].clone(), Value::Number(a - b)];
Some(())
}
fn add(&mut self) -> Option<()> {
assert_eq!(self.call.first(), Some(&Value::Builtin("add".to_string())));
assert_eq!(self.call.len(), 4);
let a = match &self.call[1] {
Value::Number(n) => Some(n),
_ => None,
}?;
let b = match &self.call[2] {
Value::Number(n) => Some(n),
_ => None,
}?;
self.call = vec![self.call[3].clone(), Value::Number(a + b)];
Some(())
}
fn divmod(&mut self) -> Option<()> {
assert_eq!(
self.call.first(),
Some(&Value::Builtin("divmod".to_string()))
);
assert_eq!(self.call.len(), 4);
let a = match &self.call[1] {
Value::Number(n) => Some(n),
_ => None,
}?;
let b = match &self.call[2] {
Value::Number(n) => Some(n),
_ => None,
}?;
self.call = vec![
self.call[3].clone(),
Value::Number(a / b),
Value::Number(a % b),
];
Some(())
}
fn mul(&mut self) -> Option<()> {
assert_eq!(self.call.first(), Some(&Value::Builtin("mul".to_string())));
assert_eq!(self.call.len(), 4);
let a = match &self.call[1] {
Value::Number(n) => Some(n),
_ => None,
}?;
let b = match &self.call[2] {
Value::Number(n) => Some(n),
_ => None,
}?;
self.call = vec![self.call[3].clone(), Value::Number(a * b)];
Some(())
}
}
|
use sevendays_crafting_calculator::do_the_thing;
fn main() -> Result<(), anyhow::Error> {
do_the_thing()?;
// do_the_thing_2()?;
Ok(())
}
|
use core::cell::RefCell;
use core::mem;
use core::ptr::{self, NonNull};
use intrusive_collections::container_of;
use intrusive_collections::{LinkOps, RBTreeLink};
use liblumen_core::alloc::Layout;
use crate::blocks::{Block, BlockRef, FreeBlock, FreeBlockRef, FreeBlocks};
use crate::sorted::{Link, SortKey, SortOrder, Sortable};
/// This struct represents a carrier type which can contain
/// multiple blocks of variable size, and is designed specifically
/// for that case. For a carrier optimized for fixed size allocations,
/// see the documentation for `SlabCarrier`.
///
/// This type of multi-block carrier carries an intrusive red/black
/// tree for tracking free blocks available for use, making best fit
/// searches in `O(log N)`.
///
/// It also contains an intrusive link for use by a parent allocator
/// which wants to store carriers in a collection for optimal searches.
///
/// NOTE: This carrier type is designed to be created once and reused
/// indefinitely. While they can be freed when all blocks are free, the
/// current set of allocators do not ever free these carriers once allocated.
/// That will need to change eventually, but is not a high-priority issue
/// for now.
///
/// TODO: Support carrier migration
#[repr(C)]
pub struct MultiBlockCarrier<L: Link> {
// The total size of this carrier
pub(crate) size: usize,
// Used to store the intrusive link to a size + address ordered tree,
pub(crate) link: L,
// This field stores an intrusive red/black tree where blocks are tracked
pub(crate) blocks: RefCell<FreeBlocks>,
}
impl<L> MultiBlockCarrier<L>
where
L: Link,
{
#[inline]
pub unsafe fn init(ptr: NonNull<u8>, size: usize) -> *mut Self {
// Write carrier header to given memory region
let carrier = ptr.as_ptr() as *mut MultiBlockCarrier<L>;
ptr::write(
carrier,
MultiBlockCarrier {
size,
link: L::default(),
blocks: RefCell::new(FreeBlocks::new(SortOrder::SizeAddressOrder)),
},
);
// Get a mutable reference for later
let this = &mut *carrier;
// Write initial free block header
let block = carrier.add(1) as *mut FreeBlock;
let usable = size - mem::size_of::<Block>() - mem::size_of::<MultiBlockCarrier<L>>();
let mut free_block = FreeBlock::new(usable);
free_block.set_last();
ptr::write(block, free_block);
// Add free block to internal free list
let mut blocks = this.blocks.borrow_mut();
blocks.insert(FreeBlockRef::from_raw(block));
carrier
}
/// Calculates the usable size of this carrier, specifically the
/// size available to be allocated to blocks. In practice, the
/// usable size for user allocations is smaller, as block headers
/// take up some space in the carrier
#[inline]
pub fn usable_size(&self) -> usize {
self.size - mem::size_of::<Self>()
}
/// Gets a reference to the first block in this carrier.
/// There is always at least one block, so there is no risk
/// of this returning an invalid reference.
///
/// NOTE: This is unsafe because a reference that outlives this
/// carrier will become invalid, potentially allowing use-after-free.
#[inline]
fn head(&self) -> BlockRef {
unsafe {
let ptr = (self as *const Self).add(1) as *mut Block;
BlockRef::from_raw(ptr)
}
}
/// Tries to satisfy an allocation request using a block in this carrier.
/// If successful, returns a raw pointer to the data region of that block.
///
/// NOTE: This is unsafe because a raw pointer is returned, it is important
/// that these pointers are not allowed to live beyond the life of both the
/// block that owns them, and the carrier itself. If a pointer is double-freed
/// then other references will be invalidated, resulting in undefined behavior,
/// the worst of which is silent corruption of memory due to reuse of blocks.
///
/// Futhermore, if the carrier itself is freed when there are still pointers
/// to blocks in the carrier, the same undefined behavior is possible, though
/// depending on how the underyling memory is allocated, it may actually produce
/// SIGSEGV or equivalent.
#[inline]
pub unsafe fn alloc_block(&self, layout: &Layout) -> Option<NonNull<u8>> {
// Try to find a block that will fit
let mut blocks = self.blocks.borrow_mut();
let result = blocks.find_best_fit(layout);
// No fit, then we're done
if result.is_none() {
return None;
}
// We have a fit, so allocate the block and update relevant metadata
let mut allocated = result.unwrap();
let ptr = allocated
.try_alloc(layout)
.expect("find_best_fit and try_alloc disagreed!");
blocks.remove(allocated);
// Allocate this block
// Check if we should split the block first
if let Some(split_block) = allocated.try_split(layout) {
// Add the newly split block to the free blocks tree
blocks.insert(split_block);
// We're done, return the userdata pointer
return Some(ptr);
}
// There was no split, so check if the neighboring block
// thinks we're free and fix that
if let Some(mut neighbor) = allocated.next() {
neighbor.as_mut().set_prev_allocated();
}
// Return the userdata pointer
Some(ptr)
}
#[inline]
pub unsafe fn realloc_block(
&self,
ptr: *mut u8,
old_layout: &Layout,
new_layout: &Layout,
) -> Option<NonNull<u8>> {
// Locate the given block
// The pointer given is for the aligned data region, so we need
// to find the block which contains this pointer
let old_size = old_layout.size();
let mut result = Some(self.head());
loop {
if result.is_none() {
break;
}
let mut block = result.unwrap();
let blk = block.as_mut();
if blk.owns(ptr) {
let new_size = new_layout.size();
if old_size <= new_size {
// Try to grow in place, otherwise proceed to realloc
if blk.grow_in_place(new_size) {
return Some(NonNull::new_unchecked(ptr));
} else {
break;
}
} else {
// Shrink in place, this always succeeds for now
blk.shrink_in_place(new_size);
return Some(NonNull::new_unchecked(ptr));
}
}
result = blk.next();
}
// If current is None, this realloc call was given with an invalid pointer
assert!(result.is_some(), "possible use-after-free");
let mut block = result.unwrap();
let blk = block.as_mut();
// Unable to alloc in previous block, so this requires a new allocation
let new_block = self.alloc_block(&new_layout)?;
let new_ptr = new_block.as_ptr();
// Copy old data into new block
ptr::copy_nonoverlapping(ptr, new_ptr, old_size);
// Free old block
let free_block = blk.free();
let mut blocks = self.blocks.borrow_mut();
blocks.insert(free_block);
// Return new block
Some(new_block)
}
/// Frees a block in this carrier.
///
/// The memory backing the block is not actually released to the operating system,
/// instead the block is marked free and made available for new allocation requests.
///
/// NOTE: This is unsafe:
///
/// - It is critical to ensure frees occur when only one pointer/reference exists to the block,
/// otherwise it is possible to double-free or corrupt new allocations in that block
/// - Since blocks are reused, it is imperative that no pointers/references refer to the data
/// region of the freed block after this function is called, or that memory can be corrupted,
/// or at a minimum result in undefined behavior.
#[inline]
pub unsafe fn free_block(&self, ptr: *const u8, _layout: Layout) {
// The pointer is for the start of the aligned data region
// Locate the block indicated by the pointer
let mut block = self.head();
loop {
if block.owns(ptr) {
let blk = block.as_mut();
// Free the block
let mut blocks = self.blocks.borrow_mut();
let freed = blk.free();
// We don't add `freed` to the free blocks tree yet,
// we rely on the coalesce operation to combine free
// blocks first, and then the resulting block is added
// to the tree here
let _coalesced = FreeBlock::coalesce(freed, &mut blocks);
// Done
return;
}
match block.next() {
None => return,
Some(next) => {
block = next;
}
}
}
}
#[cfg(test)]
#[inline]
pub(crate) fn num_blocks_free(&self) -> usize {
self.blocks.borrow().count()
}
#[cfg(test)]
#[inline]
pub(crate) fn num_blocks(&self) -> usize {
let mut block = self.head();
let mut count = 1;
loop {
if let Some(next_block) = block.next() {
block = next_block;
count += 1;
continue;
}
break;
}
count
}
}
impl Sortable for MultiBlockCarrier<RBTreeLink> {
type Link = RBTreeLink;
fn get_value(
link: <<Self::Link as Link>::LinkOps as LinkOps>::LinkPtr,
_order: SortOrder,
) -> *const Self {
// the second `link` is the name of the link field in the carrier struct
unsafe { container_of!(link.as_ptr(), Self, link) }
}
fn get_link(
value: *const Self,
_order: SortOrder,
) -> <<Self::Link as Link>::LinkOps as LinkOps>::LinkPtr {
NonNull::new(unsafe { &(*value).link as *const _ as *mut RBTreeLink }).unwrap()
}
fn sort_key(&self, order: SortOrder) -> SortKey {
SortKey::new(order, self.usable_size(), self as *const _ as usize)
}
}
#[cfg(test)]
mod tests {
use core::alloc::Allocator;
use super::*;
use intrusive_collections::RBTreeLink;
use liblumen_core::alloc::SysAlloc;
use crate::blocks::FreeBlockRef;
use crate::carriers::SUPERALIGNED_CARRIER_SIZE;
#[test]
fn multi_block_carrier_test() {
// Use super-aligned size from std_alloc
let size = SUPERALIGNED_CARRIER_SIZE;
let carrier_layout = Layout::from_size_align(size, size).unwrap();
// Allocate region
let non_null_byte_slice = SysAlloc::get_mut().allocate(carrier_layout).unwrap();
// Get pointer to carrier header location
let carrier = non_null_byte_slice.as_mut_ptr().cast();
// Write initial carrier header
unsafe {
ptr::write(
carrier,
MultiBlockCarrier {
size,
link: RBTreeLink::default(),
blocks: RefCell::new(FreeBlocks::new(SortOrder::SizeAddressOrder)),
},
);
}
let mbc = unsafe { &mut *carrier };
// Write initial free block
let usable =
size - mem::size_of::<Block>() - mem::size_of::<MultiBlockCarrier<RBTreeLink>>();
let block = unsafe { carrier.add(1) as *const _ as *mut FreeBlock };
unsafe {
let mut header = Block::new(usable);
header.set_free();
header.set_last();
ptr::write(block, FreeBlock::from(header));
let mut blocks = mbc.blocks.borrow_mut();
blocks.insert(FreeBlockRef::from_raw(block));
}
assert_eq!(mbc.num_blocks_free(), 1);
assert_eq!(mbc.num_blocks(), 1);
// Allocate 4k large, word-aligned block using newly allocated carrier
// This should result in a split, and an extra block added
let layout = Layout::from_size_align(4096, 8).unwrap();
let block = unsafe { mbc.alloc_block(&layout) };
assert!(block.is_some());
assert_eq!(mbc.num_blocks_free(), 1);
assert_eq!(mbc.num_blocks(), 2);
// Freeing the allocated block will coalesce these blocks into one again
let block_ref = block.unwrap();
unsafe {
mbc.free_block(block_ref.as_ptr(), layout);
}
assert_eq!(mbc.num_blocks_free(), 1);
assert_eq!(mbc.num_blocks(), 1);
// Cleanup
drop(mbc);
unsafe { SysAlloc::get_mut().deallocate(non_null_byte_slice.cast(), carrier_layout) };
}
}
|
#[derive(Debug, PartialEq)]
pub enum Prefix<'a> {
Server {
host: &'a str,
},
User {
nick: &'a str,
username: Option<&'a str>,
host: &'a str,
},
}
#[derive(Debug, PartialEq)]
pub struct RawEvent<'a> {
pub prefix: Option<Prefix<'a>>,
pub command: &'a str,
pub params: Vec<&'a str>,
}
#[derive(Debug, PartialEq)]
pub enum MessageTarget {
Channel(String),
User(String),
}
#[derive(Debug, PartialEq)]
pub enum Event {
Ping { server: String, server2: Option<String> },
Welcome { server_message: String, host: String, server_created: String, server_info: String },
ChannelJoined { topic: String },
NewMessage { target: MessageTarget, message: String },
// Errors
NickFailure(String),
JoinFailure(String),
Unknown,
}
|
// Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::BTreeMap;
use std::convert::TryInto;
use std::fmt::Debug;
use std::time::Duration;
use std::time::Instant;
use std::time::SystemTime;
use std::time::UNIX_EPOCH;
use common_meta_sled_store::get_sled_db;
use common_meta_sled_store::openraft::MessageSummary;
use common_meta_sled_store::AsKeySpace;
use common_meta_sled_store::SledKeySpace;
use common_meta_sled_store::SledTree;
use common_meta_sled_store::Store;
use common_meta_sled_store::TransactionSledTree;
use common_meta_stoerr::MetaStorageError;
use common_meta_types::protobuf as pb;
use common_meta_types::txn_condition;
use common_meta_types::txn_op;
use common_meta_types::txn_op_response;
use common_meta_types::AppliedState;
use common_meta_types::Change;
use common_meta_types::Cmd;
use common_meta_types::ConditionResult;
use common_meta_types::Entry;
use common_meta_types::EntryPayload;
use common_meta_types::KVMeta;
use common_meta_types::LogId;
use common_meta_types::MatchSeqExt;
use common_meta_types::Node;
use common_meta_types::NodeId;
use common_meta_types::Operation;
use common_meta_types::SeqV;
use common_meta_types::StoredMembership;
use common_meta_types::TxnCondition;
use common_meta_types::TxnDeleteByPrefixRequest;
use common_meta_types::TxnDeleteByPrefixResponse;
use common_meta_types::TxnDeleteRequest;
use common_meta_types::TxnDeleteResponse;
use common_meta_types::TxnGetRequest;
use common_meta_types::TxnGetResponse;
use common_meta_types::TxnOp;
use common_meta_types::TxnOpResponse;
use common_meta_types::TxnPutRequest;
use common_meta_types::TxnPutResponse;
use common_meta_types::TxnReply;
use common_meta_types::TxnRequest;
use common_meta_types::UpsertKV;
use common_meta_types::With;
use num::FromPrimitive;
use serde::Deserialize;
use serde::Serialize;
use tracing::debug;
use tracing::error;
use tracing::info;
use crate::config::RaftConfig;
use crate::key_spaces::ClientLastResps;
use crate::key_spaces::Expire;
use crate::key_spaces::GenericKV;
use crate::key_spaces::Nodes;
use crate::key_spaces::Sequences;
use crate::key_spaces::StateMachineMeta;
use crate::state_machine::ClientLastRespValue;
use crate::state_machine::ExpireKey;
use crate::state_machine::ExpireValue;
use crate::state_machine::MetaSnapshotId;
use crate::state_machine::StateMachineMetaKey;
use crate::state_machine::StateMachineMetaKey::Initialized;
use crate::state_machine::StateMachineMetaKey::LastApplied;
use crate::state_machine::StateMachineMetaKey::LastMembership;
use crate::state_machine::StateMachineMetaValue;
/// sled db tree name for nodes
// const TREE_NODES: &str = "nodes";
// const TREE_META: &str = "meta";
const TREE_STATE_MACHINE: &str = "state_machine";
/// StateMachine subscriber trait
pub trait StateMachineSubscriber: Debug + Sync + Send {
fn kv_changed(&self, change: Change<Vec<u8>, String>);
}
/// The state machine of the `MemStore`.
/// It includes user data and two raft-related information:
/// `last_applied_logs` and `client_serial_responses` to achieve idempotence.
#[derive(Debug)]
pub struct StateMachine {
/// The internal sled::Tree to store everything about a state machine:
/// - Store initialization state and last applied in keyspace `StateMachineMeta`.
/// - Every other state is store in its own keyspace such as `Nodes`.
pub sm_tree: SledTree,
/// subscriber of statemachine data
pub subscriber: Option<Box<dyn StateMachineSubscriber>>,
}
/// A key-value pair in a snapshot is a vec of two `Vec<u8>`.
pub type SnapshotKeyValue = Vec<Vec<u8>>;
type DeleteByPrefixKeyMap = BTreeMap<TxnDeleteByPrefixRequest, Vec<(String, SeqV)>>;
/// Snapshot data for serialization and for transport.
#[derive(Serialize, Deserialize, Debug, Default, Clone)]
pub struct SerializableSnapshot {
/// A list of kv pairs.
pub kvs: Vec<SnapshotKeyValue>,
}
impl SerializableSnapshot {
/// Convert the snapshot to a `Vec<(type, name, iter)>` format for sled to import.
pub fn sled_importable(self) -> Vec<(Vec<u8>, Vec<u8>, impl Iterator<Item = Vec<Vec<u8>>>)> {
vec![(
"tree".as_bytes().to_vec(),
TREE_STATE_MACHINE.as_bytes().to_vec(),
self.kvs.into_iter(),
)]
}
}
impl StateMachine {
#[tracing::instrument(level = "debug", skip(config), fields(config_id=%config.config_id, prefix=%config.sled_tree_prefix))]
pub fn tree_name(config: &RaftConfig, sm_id: u64) -> String {
config.tree_name(format!("{}/{}", TREE_STATE_MACHINE, sm_id))
}
#[tracing::instrument(level = "debug", skip(config), fields(config_id=config.config_id.as_str()))]
pub fn clean(config: &RaftConfig, sm_id: u64) -> Result<(), MetaStorageError> {
let tree_name = StateMachine::tree_name(config, sm_id);
let db = get_sled_db();
// it blocks and slow
db.drop_tree(tree_name)?;
Ok(())
}
#[tracing::instrument(level = "debug", skip(config), fields(config_id=config.config_id.as_str()))]
pub async fn open(config: &RaftConfig, sm_id: u64) -> Result<StateMachine, MetaStorageError> {
let db = get_sled_db();
let tree_name = StateMachine::tree_name(config, sm_id);
let sm_tree = SledTree::open(&db, &tree_name, config.is_sync())?;
let sm = StateMachine {
sm_tree,
subscriber: None,
};
let inited = {
let sm_meta = sm.sm_meta();
sm_meta.get(&Initialized)?
};
if inited.is_some() {
Ok(sm)
} else {
let sm_meta = sm.sm_meta();
sm_meta
.insert(&Initialized, &StateMachineMetaValue::Bool(true))
.await?;
Ok(sm)
}
}
pub fn set_subscriber(&mut self, subscriber: Box<dyn StateMachineSubscriber>) {
self.subscriber = Some(subscriber);
}
/// Create a snapshot.
///
/// Returns:
/// - all key values in state machine;
/// - the last applied log id
/// - and a snapshot id that uniquely identifies this snapshot.
pub fn build_snapshot(
&self,
) -> Result<
(
SerializableSnapshot,
Option<LogId>,
StoredMembership,
MetaSnapshotId,
),
MetaStorageError,
> {
let last_applied = self.get_last_applied()?;
let last_membership = self.get_membership()?.unwrap_or_default();
let snapshot_idx = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs();
let snapshot_id = MetaSnapshotId::new(last_applied, snapshot_idx);
let view = self.sm_tree.tree.iter();
let mut kvs = Vec::new();
for rkv in view {
let (k, v) = rkv?;
kvs.push(vec![k.to_vec(), v.to_vec()]);
}
let snap = SerializableSnapshot { kvs };
Ok((snap, last_applied, last_membership, snapshot_id))
}
fn scan_prefix_if_needed(
&self,
entry: &Entry,
) -> Result<Option<(DeleteByPrefixKeyMap, DeleteByPrefixKeyMap)>, MetaStorageError> {
match entry.payload {
EntryPayload::Normal(ref data) => match &data.cmd {
Cmd::Transaction(txn) => {
let kvs = self.kvs();
let mut if_map = BTreeMap::new();
let mut else_map = BTreeMap::new();
for op in txn.if_then.iter() {
if let Some(txn_op::Request::DeleteByPrefix(delete_by_prefix)) = &op.request
{
if_map.insert(
delete_by_prefix.clone(),
kvs.scan_prefix(&delete_by_prefix.prefix)?,
);
}
}
for op in txn.else_then.iter() {
if let Some(txn_op::Request::DeleteByPrefix(delete_by_prefix)) = &op.request
{
else_map.insert(
delete_by_prefix.clone(),
kvs.scan_prefix(&delete_by_prefix.prefix)?,
);
}
}
Ok(Some((if_map, else_map)))
}
_ => Ok(None),
},
_ => Ok(None),
}
}
/// Apply an log entry to state machine.
///
/// If a duplicated log entry is detected by checking data.txid, no update
/// will be made and the previous resp is returned. In this way a client is able to re-send a
/// command safely in case of network failure etc.
#[tracing::instrument(level = "debug", skip(self, entry), fields(log_id=%entry.log_id))]
pub async fn apply(&self, entry: &Entry) -> Result<AppliedState, MetaStorageError> {
info!("apply: summary: {}", entry.summary(),);
debug!("sled tx start: {:?}", entry);
let log_id = &entry.log_id;
let log_time_ms = Self::get_log_time(entry);
let expired = self.list_expired_kvs(log_time_ms)?;
debug!("expired keys: {:?}", expired);
let kv_pairs = self.scan_prefix_if_needed(entry)?;
let result = self.sm_tree.txn(true, move |mut txn_tree| {
self.clean_expired_kvs(&mut txn_tree, &expired)?;
let txn_sm_meta = txn_tree.key_space::<StateMachineMeta>();
txn_sm_meta.insert(&LastApplied, &StateMachineMetaValue::LogId(*log_id))?;
match entry.payload {
EntryPayload::Blank => {
info!("apply: blank");
}
EntryPayload::Normal(ref data) => {
info!("apply: {}", data);
if let Some(ref txid) = data.txid {
let (serial, resp) =
self.txn_get_client_last_resp(&txid.client, &txn_tree)?;
if serial == txid.serial {
return Ok((Some(resp), txn_tree.changes));
}
}
let res =
self.apply_cmd(&data.cmd, &mut txn_tree, kv_pairs.as_ref(), log_time_ms);
if let Ok(ok) = &res {
info!("apply_result: summary: {}; res ok: {}", entry.summary(), ok);
}
if let Err(err) = &res {
info!(
"apply_result: summary: {}; res err: {:?}",
entry.summary(),
err
);
}
let applied_state = res?;
if let Some(ref txid) = data.txid {
self.txn_client_last_resp_update(
&txid.client,
(txid.serial, applied_state.clone()),
&txn_tree,
)?;
}
return Ok((Some(applied_state), txn_tree.changes));
}
EntryPayload::Membership(ref mem) => {
info!("apply: membership: {:?}", mem);
txn_sm_meta.insert(
&LastMembership,
&StateMachineMetaValue::Membership(StoredMembership::new(
Some(*log_id),
mem.clone(),
)),
)?;
return Ok((Some(AppliedState::None), txn_tree.changes));
}
};
Ok((None, txn_tree.changes))
});
let (opt_applied_state, changes) = result?;
debug!("sled tx done: {:?}", entry);
let applied_state = match opt_applied_state {
Some(r) => r,
None => AppliedState::None,
};
// Send queued change events to subscriber
if let Some(subscriber) = &self.subscriber {
for event in changes {
subscriber.kv_changed(event);
}
}
Ok(applied_state)
}
/// Retrieve the proposing time from a raft-log.
///
/// Only `Normal` log has a time embedded.
#[tracing::instrument(level = "debug", skip_all)]
fn get_log_time(entry: &Entry) -> u64 {
match &entry.payload {
EntryPayload::Normal(data) => match data.time_ms {
None => {
error!(
"log has no time: {}, treat every record with non-none `expire` as timed out",
entry.summary()
);
0
}
Some(x) => {
let t = SystemTime::UNIX_EPOCH + Duration::from_millis(x);
info!("apply: raft-log time: {:?}", t);
x
}
},
_ => 0,
}
}
#[tracing::instrument(level = "debug", skip(self, txn_tree))]
fn apply_add_node_cmd(
&self,
node_id: &u64,
node: &Node,
overriding: bool,
txn_tree: &TransactionSledTree,
) -> Result<AppliedState, MetaStorageError> {
let sm_nodes = txn_tree.key_space::<Nodes>();
let prev = sm_nodes.get(node_id)?;
if prev.is_none() {
sm_nodes.insert(node_id, node)?;
info!("applied AddNode(non-overriding): {}={:?}", node_id, node);
return Ok((prev, Some(node.clone())).into());
}
if overriding {
sm_nodes.insert(node_id, node)?;
info!("applied AddNode(overriding): {}={:?}", node_id, node);
Ok((prev, Some(node.clone())).into())
} else {
Ok((prev.clone(), prev).into())
}
}
#[tracing::instrument(level = "debug", skip(self, txn_tree))]
fn apply_remove_node_cmd(
&self,
node_id: &u64,
txn_tree: &TransactionSledTree,
) -> Result<AppliedState, MetaStorageError> {
let sm_nodes = txn_tree.key_space::<Nodes>();
let prev = sm_nodes.get(node_id)?;
if prev.is_some() {
info!("applied RemoveNode: {}={:?}", node_id, prev);
sm_nodes.remove(node_id)?;
}
Ok((prev, None).into())
}
#[tracing::instrument(level = "debug", skip_all)]
fn apply_update_kv_cmd(
&self,
upsert_kv: &UpsertKV,
txn_tree: &mut TransactionSledTree,
log_time_ms: u64,
) -> Result<AppliedState, MetaStorageError> {
debug!(upsert_kv = debug(upsert_kv), "apply_update_kv_cmd");
let (expired, prev, result) = Self::txn_upsert_kv(txn_tree, upsert_kv, log_time_ms)?;
debug!("applied UpsertKV: {:?} {:?}", upsert_kv, result);
if expired.is_some() {
txn_tree.push_change(&upsert_kv.key, expired, None);
}
txn_tree.push_change(&upsert_kv.key, prev.clone(), result.clone());
Ok(Change::new(prev, result).into())
}
fn return_value_condition_result(
&self,
expected: i32,
target_value: &Vec<u8>,
value: &SeqV,
) -> bool {
match FromPrimitive::from_i32(expected) {
Some(ConditionResult::Eq) => value.data == *target_value,
Some(ConditionResult::Gt) => value.data > *target_value,
Some(ConditionResult::Lt) => value.data < *target_value,
Some(ConditionResult::Ne) => value.data != *target_value,
Some(ConditionResult::Ge) => value.data >= *target_value,
Some(ConditionResult::Le) => value.data <= *target_value,
_ => false,
}
}
pub fn return_seq_condition_result(
&self,
expected: i32,
target_seq: &u64,
value: &SeqV,
) -> bool {
match FromPrimitive::from_i32(expected) {
Some(ConditionResult::Eq) => value.seq == *target_seq,
Some(ConditionResult::Gt) => value.seq > *target_seq,
Some(ConditionResult::Lt) => value.seq < *target_seq,
Some(ConditionResult::Ne) => value.seq != *target_seq,
Some(ConditionResult::Ge) => value.seq >= *target_seq,
Some(ConditionResult::Le) => value.seq <= *target_seq,
_ => false,
}
}
#[tracing::instrument(level = "debug", skip(self, txn_tree, cond))]
fn txn_execute_one_condition(
&self,
txn_tree: &TransactionSledTree,
cond: &TxnCondition,
) -> Result<bool, MetaStorageError> {
debug!(cond = display(cond), "txn_execute_one_condition");
let key = cond.key.clone();
let sub_tree = txn_tree.key_space::<GenericKV>();
let sv = sub_tree.get(&key)?;
debug!("txn_execute_one_condition: {:?} {:?}", key, sv);
if let Some(target) = &cond.target {
match target {
txn_condition::Target::Seq(target_seq) => {
return Ok(self.return_seq_condition_result(
cond.expected,
target_seq,
// seq is 0 if the record does not exist.
&sv.unwrap_or_default(),
));
}
txn_condition::Target::Value(target_value) => {
if let Some(sv) = sv {
return Ok(self.return_value_condition_result(
cond.expected,
target_value,
&sv,
));
} else {
return Ok(false);
}
}
}
};
Ok(false)
}
#[tracing::instrument(level = "debug", skip(self, txn_tree, condition))]
fn txn_execute_condition(
&self,
txn_tree: &TransactionSledTree,
condition: &Vec<TxnCondition>,
) -> Result<bool, MetaStorageError> {
for cond in condition {
debug!(condition = display(cond), "txn_execute_condition");
if !self.txn_execute_one_condition(txn_tree, cond)? {
return Ok(false);
}
}
Ok(true)
}
fn txn_execute_get_operation(
&self,
txn_tree: &TransactionSledTree,
get: &TxnGetRequest,
resp: &mut TxnReply,
) -> Result<(), MetaStorageError> {
let sub_tree = txn_tree.key_space::<GenericKV>();
let sv = sub_tree.get(&get.key)?;
let value = sv.map(to_pb_seq_v);
let get_resp = TxnGetResponse {
key: get.key.clone(),
value,
};
resp.responses.push(TxnOpResponse {
response: Some(txn_op_response::Response::Get(get_resp)),
});
Ok(())
}
fn txn_execute_put_operation(
&self,
txn_tree: &mut TransactionSledTree,
put: &TxnPutRequest,
resp: &mut TxnReply,
log_time_ms: u64,
) -> Result<(), MetaStorageError> {
let (expired, prev, result) = Self::txn_upsert_kv(
txn_tree,
&UpsertKV::update(&put.key, &put.value).with(KVMeta {
expire_at: put.expire_at,
}),
log_time_ms,
)?;
if expired.is_some() {
txn_tree.push_change(&put.key, expired, None);
}
txn_tree.push_change(&put.key, prev.clone(), result);
let put_resp = TxnPutResponse {
key: put.key.clone(),
prev_value: if put.prev_value {
prev.map(to_pb_seq_v)
} else {
None
},
};
resp.responses.push(TxnOpResponse {
response: Some(txn_op_response::Response::Put(put_resp)),
});
Ok(())
}
fn txn_execute_delete_operation(
&self,
txn_tree: &mut TransactionSledTree,
delete: &TxnDeleteRequest,
resp: &mut TxnReply,
log_time_ms: u64,
) -> Result<(), MetaStorageError> {
let (expired, prev, result) =
Self::txn_upsert_kv(txn_tree, &UpsertKV::delete(&delete.key), log_time_ms)?;
if expired.is_some() {
txn_tree.push_change(&delete.key, expired, None);
}
txn_tree.push_change(&delete.key, prev.clone(), result);
let del_resp = TxnDeleteResponse {
key: delete.key.clone(),
success: prev.is_some(),
prev_value: if delete.prev_value {
prev.map(to_pb_seq_v)
} else {
None
},
};
resp.responses.push(TxnOpResponse {
response: Some(txn_op_response::Response::Delete(del_resp)),
});
Ok(())
}
fn txn_execute_delete_by_prefix_operation(
&self,
txn_tree: &mut TransactionSledTree,
delete_by_prefix: &TxnDeleteByPrefixRequest,
kv_pairs: Option<&DeleteByPrefixKeyMap>,
resp: &mut TxnReply,
log_time_ms: u64,
) -> Result<(), MetaStorageError> {
let mut count: u32 = 0;
if let Some(kv_pairs) = kv_pairs {
if let Some(kv_pairs) = kv_pairs.get(delete_by_prefix) {
for (key, _seq) in kv_pairs.iter() {
let (expired, prev, res) =
Self::txn_upsert_kv(txn_tree, &UpsertKV::delete(key), log_time_ms)?;
count += 1;
if expired.is_some() {
txn_tree.push_change(key, expired, None);
}
txn_tree.push_change(key, prev, res);
}
}
}
let del_resp = TxnDeleteByPrefixResponse {
prefix: delete_by_prefix.prefix.clone(),
count,
};
resp.responses.push(TxnOpResponse {
response: Some(txn_op_response::Response::DeleteByPrefix(del_resp)),
});
Ok(())
}
#[tracing::instrument(level = "debug", skip(self, txn_tree, op, resp))]
fn txn_execute_operation(
&self,
txn_tree: &mut TransactionSledTree,
op: &TxnOp,
kv_pairs: Option<&DeleteByPrefixKeyMap>,
resp: &mut TxnReply,
log_time_ms: u64,
) -> Result<(), MetaStorageError> {
debug!(op = display(op), "txn execute TxnOp");
match &op.request {
Some(txn_op::Request::Get(get)) => {
self.txn_execute_get_operation(txn_tree, get, resp)?;
}
Some(txn_op::Request::Put(put)) => {
self.txn_execute_put_operation(txn_tree, put, resp, log_time_ms)?;
}
Some(txn_op::Request::Delete(delete)) => {
self.txn_execute_delete_operation(txn_tree, delete, resp, log_time_ms)?;
}
Some(txn_op::Request::DeleteByPrefix(delete_by_prefix)) => {
self.txn_execute_delete_by_prefix_operation(
txn_tree,
delete_by_prefix,
kv_pairs,
resp,
log_time_ms,
)?;
}
None => {}
}
Ok(())
}
#[tracing::instrument(level = "debug", skip(self, txn_tree, req))]
fn apply_txn_cmd(
&self,
req: &TxnRequest,
txn_tree: &mut TransactionSledTree,
kv_pairs: Option<&(DeleteByPrefixKeyMap, DeleteByPrefixKeyMap)>,
log_time_ms: u64,
) -> Result<AppliedState, MetaStorageError> {
debug!(txn = display(req), "apply txn cmd");
let condition = &req.condition;
let ops: &Vec<TxnOp>;
let kv_op_pairs: Option<&DeleteByPrefixKeyMap>;
let success = if self.txn_execute_condition(txn_tree, condition)? {
ops = &req.if_then;
kv_op_pairs = if let Some(kv_pairs) = kv_pairs {
Some(&kv_pairs.0)
} else {
None
};
true
} else {
ops = &req.else_then;
kv_op_pairs = if let Some(kv_pairs) = kv_pairs {
Some(&kv_pairs.1)
} else {
None
};
false
};
let mut resp: TxnReply = TxnReply {
success,
error: "".to_string(),
responses: vec![],
};
for op in ops {
self.txn_execute_operation(txn_tree, op, kv_op_pairs, &mut resp, log_time_ms)?;
}
Ok(AppliedState::TxnReply(resp))
}
/// Apply a `Cmd` to state machine.
///
/// Already applied log should be filtered out before passing into this function.
/// This is the only entry to modify state machine.
/// The `cmd` is always committed by raft before applying.
#[tracing::instrument(level = "debug", skip_all)]
pub fn apply_cmd(
&self,
cmd: &Cmd,
txn_tree: &mut TransactionSledTree,
kv_pairs: Option<&(DeleteByPrefixKeyMap, DeleteByPrefixKeyMap)>,
log_time_ms: u64,
) -> Result<AppliedState, MetaStorageError> {
info!("apply_cmd: {}", cmd);
let now = Instant::now();
let res = match cmd {
Cmd::AddNode {
ref node_id,
ref node,
overriding,
} => self.apply_add_node_cmd(node_id, node, *overriding, txn_tree),
Cmd::RemoveNode { ref node_id } => self.apply_remove_node_cmd(node_id, txn_tree),
Cmd::UpsertKV(ref upsert_kv) => {
self.apply_update_kv_cmd(upsert_kv, txn_tree, log_time_ms)
}
Cmd::Transaction(txn) => self.apply_txn_cmd(txn, txn_tree, kv_pairs, log_time_ms),
};
let elapsed = now.elapsed().as_micros();
debug!("apply_cmd: elapsed: {}", elapsed);
res
}
/// Before applying, list expired keys to clean.
///
/// Apply is done in a sled-txn tree, which does not provide listing function.
#[tracing::instrument(level = "debug", skip_all)]
pub fn list_expired_kvs(
&self,
log_time_ms: u64,
) -> Result<Vec<(String, ExpireKey)>, MetaStorageError> {
if log_time_ms == 0 {
return Ok(vec![]);
}
let at_most = 32;
let mut to_clean = Vec::with_capacity(at_most);
info!("list_expired_kv, log_time_ts: {}", log_time_ms);
let expires = self.sm_tree.key_space::<Expire>();
let it = expires.range(..)?.take(at_most);
for item_res in it {
let item = item_res?;
let k: ExpireKey = item.key()?;
if log_time_ms > k.time_ms {
let v: ExpireValue = item.value()?;
to_clean.push((v.key, k))
}
}
Ok(to_clean)
}
/// Remove expired key-values, and corresponding secondary expiration index record.
///
/// This should be done inside a sled-transaction.
#[tracing::instrument(level = "debug", skip_all)]
fn clean_expired_kvs(
&self,
txn_tree: &mut TransactionSledTree,
expired: &[(String, ExpireKey)],
) -> Result<(), MetaStorageError> {
for (key, expire_key) in expired.iter() {
let sv = txn_tree.key_space::<GenericKV>().get(key)?;
if let Some(seq_v) = &sv {
if expire_key.seq == seq_v.seq {
info!("clean expired: {}, {}", key, expire_key);
txn_tree.key_space::<GenericKV>().remove(key)?;
txn_tree.key_space::<Expire>().remove(expire_key)?;
txn_tree.push_change(key, sv, None);
continue;
}
}
unreachable!(
"trying to remove un-cleanable: {}, {}, kv-record: {:?}",
key, expire_key, sv
);
}
Ok(())
}
fn txn_incr_seq(key: &str, txn_tree: &TransactionSledTree) -> Result<u64, MetaStorageError> {
let seqs = txn_tree.key_space::<Sequences>();
let key = key.to_string();
let curr = seqs.get(&key)?;
let new_value = curr.unwrap_or_default() + 1;
seqs.insert(&key, &new_value)?;
debug!("txn_incr_seq: {}={}", key, new_value);
Ok(new_value.0)
}
/// Execute an upsert-kv operation on a transactional sled tree.
///
/// KV has two indexes:
/// - The primary index: `key -> (seq, meta(expire_time), value)`,
/// - and a secondary expiration index: `(expire_time, seq) -> key`.
///
/// Thus upsert a kv record is done in two steps:
/// update the primary index and optionally update the secondary index.
///
/// It returns 3 SeqV:
/// - `(None, None, x)`: upsert nonexistent key;
/// - `(None, Some, x)`: upsert existent and non-expired key;
/// - `(Some, None, x)`: upsert existent but expired key;
#[allow(clippy::type_complexity)]
fn txn_upsert_kv(
txn_tree: &TransactionSledTree,
upsert_kv: &UpsertKV,
log_time_ms: u64,
) -> Result<(Option<SeqV>, Option<SeqV>, Option<SeqV>), MetaStorageError> {
let (expired, prev, res) =
Self::txn_upsert_kv_primary_index(txn_tree, upsert_kv, log_time_ms)?;
let expires = txn_tree.key_space::<Expire>();
if let Some(sv) = &expired {
if let Some(m) = &sv.meta {
if let Some(exp) = m.expire_at {
expires.remove(&ExpireKey::new(exp * 1000, sv.seq))?;
}
}
}
// No change, no need to update expiration index
if prev == res {
return Ok((expired, prev, res));
}
// Remove previous expiration index, add a new one.
if let Some(sv) = &prev {
if let Some(m) = &sv.meta {
if let Some(exp) = m.expire_at {
expires.remove(&ExpireKey::new(exp * 1000, sv.seq))?;
}
}
}
if let Some(sv) = &res {
if let Some(m) = &sv.meta {
if let Some(exp) = m.expire_at {
let k = ExpireKey::new(exp * 1000, sv.seq);
let v = ExpireValue {
key: upsert_kv.key.clone(),
};
expires.insert(&k, &v)?;
}
}
}
Ok((expired, prev, res))
}
/// It returns 3 SeqV:
/// - The first one is `Some` if an existent record expired.
/// - The second and the third represent the change that is made by the upsert operation.
///
/// Only one of the first and second can be `Some`.
#[allow(clippy::type_complexity)]
fn txn_upsert_kv_primary_index(
txn_tree: &TransactionSledTree,
upsert_kv: &UpsertKV,
log_time_ms: u64,
) -> Result<(Option<SeqV>, Option<SeqV>, Option<SeqV>), MetaStorageError> {
let kvs = txn_tree.key_space::<GenericKV>();
let prev = kvs.get(&upsert_kv.key)?;
// If prev is timed out, treat it as a None. But still keep the original value for cleaning up it.
let (expired, prev) = Self::expire_seq_v(prev, log_time_ms);
if upsert_kv.seq.match_seq(&prev).is_err() {
return Ok((expired, prev.clone(), prev));
}
let mut new_seq_v = match &upsert_kv.value {
Operation::Update(v) => SeqV::with_meta(0, upsert_kv.value_meta.clone(), v.clone()),
Operation::Delete => {
kvs.remove(&upsert_kv.key)?;
return Ok((expired, prev, None));
}
Operation::AsIs => match prev {
None => return Ok((expired, prev, None)),
Some(ref prev_kv_value) => {
prev_kv_value.clone().set_meta(upsert_kv.value_meta.clone())
}
},
};
new_seq_v.seq = Self::txn_incr_seq(GenericKV::NAME, txn_tree)?;
kvs.insert(&upsert_kv.key, &new_seq_v)?;
debug!("applied upsert: {:?} res: {:?}", upsert_kv, new_seq_v);
Ok((expired, prev, Some(new_seq_v)))
}
fn txn_client_last_resp_update(
&self,
key: &str,
value: (u64, AppliedState),
txn_tree: &TransactionSledTree,
) -> Result<AppliedState, MetaStorageError> {
let v = ClientLastRespValue {
req_serial_num: value.0,
res: value.1.clone(),
};
let txn_ks = txn_tree.key_space::<ClientLastResps>();
txn_ks.insert(&key.to_string(), &v)?;
Ok(value.1)
}
pub fn get_membership(&self) -> Result<Option<StoredMembership>, MetaStorageError> {
let sm_meta = self.sm_meta();
let mem = sm_meta
.get(&StateMachineMetaKey::LastMembership)?
.map(|x| x.try_into().expect("Membership"));
Ok(mem)
}
pub fn get_last_applied(&self) -> Result<Option<LogId>, MetaStorageError> {
let sm_meta = self.sm_meta();
let last_applied = sm_meta
.get(&LastApplied)?
.map(|x| x.try_into().expect("LogId"));
Ok(last_applied)
}
pub async fn add_node(&self, node_id: u64, node: &Node) -> Result<(), MetaStorageError> {
let sm_nodes = self.nodes();
sm_nodes.insert(&node_id, node).await?;
Ok(())
}
pub fn get_client_last_resp(
&self,
key: &str,
) -> Result<Option<(u64, AppliedState)>, MetaStorageError> {
let client_last_resps = self.client_last_resps();
let v: Option<ClientLastRespValue> = client_last_resps.get(&key.to_string())?;
if let Some(resp) = v {
return Ok(Some((resp.req_serial_num, resp.res)));
}
Ok(Some((0, AppliedState::None)))
}
pub fn txn_get_client_last_resp(
&self,
key: &str,
txn_tree: &TransactionSledTree,
) -> Result<(u64, AppliedState), MetaStorageError> {
let client_last_resps = txn_tree.key_space::<ClientLastResps>();
let v = client_last_resps.get(&key.to_string())?;
if let Some(resp) = v {
return Ok((resp.req_serial_num, resp.res));
}
Ok((0, AppliedState::None))
}
pub fn get_node(&self, node_id: &NodeId) -> Result<Option<Node>, MetaStorageError> {
let sm_nodes = self.nodes();
sm_nodes.get(node_id)
}
pub fn get_nodes(&self) -> Result<Vec<Node>, MetaStorageError> {
let sm_nodes = self.nodes();
sm_nodes.range_values(..)
}
/// Expire an `SeqV` and returns the value discarded by expiration and the unexpired value:
/// - `(Some, None)` if it expires.
/// - `(None, Some)` if it does not.
/// - `(None, None)` if the input is None.
pub fn expire_seq_v<V>(
seq_value: Option<SeqV<V>>,
log_time_ms: u64,
) -> (Option<SeqV<V>>, Option<SeqV<V>>) {
if let Some(s) = &seq_value {
if s.get_expire_at() < log_time_ms {
(seq_value, None)
} else {
(None, seq_value)
}
} else {
(None, None)
}
}
}
/// Key space support
impl StateMachine {
pub fn sm_meta(&self) -> AsKeySpace<StateMachineMeta> {
self.sm_tree.key_space()
}
pub fn nodes(&self) -> AsKeySpace<Nodes> {
self.sm_tree.key_space()
}
/// A kv store of all other general purpose information.
/// The value is tuple of a monotonic sequence number and userdata value in string.
/// The sequence number is guaranteed to increment(by some value greater than 0) everytime the record changes.
pub fn kvs(&self) -> AsKeySpace<GenericKV> {
self.sm_tree.key_space()
}
/// storage of auto-incremental number.
pub fn sequences(&self) -> AsKeySpace<Sequences> {
self.sm_tree.key_space()
}
/// storage of client last resp to keep idempotent.
pub fn client_last_resps(&self) -> AsKeySpace<ClientLastResps> {
self.sm_tree.key_space()
}
}
/// Convert SeqV defined in rust types to SeqV defined in protobuf.
fn to_pb_seq_v(seq_v: SeqV) -> pb::SeqV {
pb::SeqV {
seq: seq_v.seq,
data: seq_v.data,
}
}
#[cfg(test)]
mod tests {
use common_meta_types::KVMeta;
use common_meta_types::SeqV;
use crate::state_machine::StateMachine;
#[test]
fn test_expire_seq_v() -> anyhow::Result<()> {
let sv = || SeqV::new(1, ());
let expire_seq_v = StateMachine::expire_seq_v;
assert_eq!((None, None), expire_seq_v(None, 10_000));
assert_eq!((None, Some(sv())), expire_seq_v(Some(sv()), 10_000));
assert_eq!(
(None, Some(sv().set_meta(Some(KVMeta { expire_at: None })))),
expire_seq_v(
Some(sv().set_meta(Some(KVMeta { expire_at: None }))),
10_000
)
);
assert_eq!(
(
None,
Some(sv().set_meta(Some(KVMeta {
expire_at: Some(20)
})))
),
expire_seq_v(
Some(sv().set_meta(Some(KVMeta {
expire_at: Some(20)
}))),
10_000
)
);
assert_eq!(
(
Some(sv().set_meta(Some(KVMeta { expire_at: Some(5) }))),
None
),
expire_seq_v(
Some(sv().set_meta(Some(KVMeta { expire_at: Some(5) }))),
10_000
)
);
Ok(())
}
}
|
#[doc = "Reader of register BOOT7_CURR"]
pub type R = crate::R<u32, super::BOOT7_CURR>;
#[doc = "Writer for register BOOT7_CURR"]
pub type W = crate::W<u32, super::BOOT7_CURR>;
#[doc = "Register BOOT7_CURR `reset()`'s with value 0"]
impl crate::ResetValue for super::BOOT7_CURR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `BOOT_CM7_ADD1`"]
pub type BOOT_CM7_ADD1_R = crate::R<u16, u16>;
#[doc = "Write proxy for field `BOOT_CM7_ADD1`"]
pub struct BOOT_CM7_ADD1_W<'a> {
w: &'a mut W,
}
impl<'a> BOOT_CM7_ADD1_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !(0xffff << 16)) | (((value as u32) & 0xffff) << 16);
self.w
}
}
#[doc = "Reader of field `BOOT_CM7_ADD0`"]
pub type BOOT_CM7_ADD0_R = crate::R<u16, u16>;
#[doc = "Write proxy for field `BOOT_CM7_ADD0`"]
pub struct BOOT_CM7_ADD0_W<'a> {
w: &'a mut W,
}
impl<'a> BOOT_CM7_ADD0_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !0xffff) | ((value as u32) & 0xffff);
self.w
}
}
impl R {
#[doc = "Bits 16:31 - Arm Cortex-M7 boot address 1"]
#[inline(always)]
pub fn boot_cm7_add1(&self) -> BOOT_CM7_ADD1_R {
BOOT_CM7_ADD1_R::new(((self.bits >> 16) & 0xffff) as u16)
}
#[doc = "Bits 0:15 - Arm Cortex-M7 boot address 0"]
#[inline(always)]
pub fn boot_cm7_add0(&self) -> BOOT_CM7_ADD0_R {
BOOT_CM7_ADD0_R::new((self.bits & 0xffff) as u16)
}
}
impl W {
#[doc = "Bits 16:31 - Arm Cortex-M7 boot address 1"]
#[inline(always)]
pub fn boot_cm7_add1(&mut self) -> BOOT_CM7_ADD1_W {
BOOT_CM7_ADD1_W { w: self }
}
#[doc = "Bits 0:15 - Arm Cortex-M7 boot address 0"]
#[inline(always)]
pub fn boot_cm7_add0(&mut self) -> BOOT_CM7_ADD0_W {
BOOT_CM7_ADD0_W { w: self }
}
}
|
use crate::S;
use wasm_bindgen::UnwrapThrowExt;
use web_sys as web;
#[derive(Debug)]
pub struct VText {
value: S,
node: Option<web::Text>,
}
impl VText {
pub fn new<V: Into<S>>(value: V) -> Self {
VText {
value: value.into(),
node: None,
}
}
pub fn create(&mut self) -> web::Text {
let node = web::window()
.unwrap_throw()
.document()
.unwrap_throw()
.create_text_node(&self.value);
self.node = Some(node.clone());
node
}
pub fn patch(&mut self, old: &mut VText) -> web::Text {
let node = old.node.clone().unwrap_throw();
if self.value != old.value {
node.set_data(&self.value);
}
self.node = Some(node.clone());
node
}
pub fn node(&self) -> Option<web::Text> {
self.node.clone()
}
}
|
use std::fmt::Display;
use crate::query_adaptor::QueryAdaptor;
/// An opaque, monotonic generational identifier of a buffer in a
/// [`PartitionData`].
///
/// A [`BatchIdent`] is strictly greater than all those that were obtained
/// before it.
///
/// [`PartitionData`]: super::PartitionData
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd)]
pub(crate) struct BatchIdent(u64);
impl BatchIdent {
/// Return the next unique monotonic value.
pub(super) fn next(&mut self) -> Self {
self.0 += 1;
Self(self.0)
}
/// Only for tests, this allows reading the opaque identifier to assert the
/// value changing between persist ops.
#[cfg(test)]
pub(super) fn get(&self) -> u64 {
self.0
}
}
impl Display for BatchIdent {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
}
/// A type wrapper over [`QueryAdaptor`] that statically ensures only batches of
/// data from [`PartitionData::mark_persisting()`] are given to
/// [`PartitionData::mark_persisted()`].
///
/// Cloning this type is relatively cheap.
///
/// [`PartitionData::mark_persisting()`]: super::PartitionData::mark_persisting
/// [`PartitionData::mark_persisted()`]: super::PartitionData::mark_persisted
#[derive(Debug, Clone)]
pub struct PersistingData {
data: QueryAdaptor,
batch_ident: BatchIdent,
}
impl PersistingData {
pub(super) fn new(data: QueryAdaptor, batch_ident: BatchIdent) -> Self {
Self { data, batch_ident }
}
pub(super) fn batch_ident(&self) -> BatchIdent {
self.batch_ident
}
pub(crate) fn query_adaptor(&self) -> QueryAdaptor {
self.data.clone()
}
}
impl std::ops::Deref for PersistingData {
type Target = QueryAdaptor;
fn deref(&self) -> &Self::Target {
&self.data
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_batch_ident() {
let mut b = BatchIdent::default();
assert_eq!(b.get(), 0);
assert_eq!(b.next().get(), 1);
assert_eq!(b.get(), 1);
assert_eq!(b.next().get(), 2);
assert_eq!(b.get(), 2);
}
}
|
use std::io;
fn main() {
println!("Please enter a temperator followed by F (for Fahrenheit) and C (for Celsius)");
let mut temperature = String::new();
io::stdin()
.read_line(&mut temperature)
.expect("Failed to read line");
let temperature: f32 = temperature.trim().parse().unwrap();
println!("Please enter the unit you want to convert this from");
let mut unit = String::new();
io::stdin()
.read_line(&mut unit)
.expect("Failed to read line");
let unit = unit.trim(); // shadowed
println!(
"Your entry of {}{} is {}",
temperature,
unit,
convert(temperature, &unit)
);
}
fn convert(temp: f32, unit: &str) -> f32 {
let unit = unit.to_lowercase();
if unit == "c" {
return (temp * 9.0 / 5.0) + 32.0;
}
if unit == "f" {
return (temp - 32.0) * 5.0 / 9.0;
}
panic!("invalid unit");
}
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:attribute-with-error.rs
// ignore-stage1
#![feature(use_extern_macros)]
extern crate attribute_with_error;
use attribute_with_error::foo;
#[foo]
fn test1() {
let a: i32 = "foo";
//~^ ERROR: mismatched types
let b: i32 = "f'oo";
//~^ ERROR: mismatched types
}
fn test2() {
#![foo]
// FIXME: should have a type error here and assert it works but it doesn't
}
trait A {
// FIXME: should have a #[foo] attribute here and assert that it works
fn foo(&self) {
let a: i32 = "foo";
//~^ ERROR: mismatched types
}
}
struct B;
impl A for B {
#[foo]
fn foo(&self) {
let a: i32 = "foo";
//~^ ERROR: mismatched types
}
}
#[foo]
fn main() {
}
|
pub struct Rotor {
map: String,
notch: Option<char>,
}
impl Rotor {
pub fn new(map: &str, notch: Option<char>) -> Rotor {
Rotor { map: String::from(map), notch }
}
}
pub struct Enigma {
pub rotors: Vec<Rotor>,
pub refrector: String,
pub rotor_offsets: Vec<i32>,
pub plugboard: String,
pub log: String,
}
impl Enigma {
pub fn new(rotors: Vec<Rotor>, refrector: &str) -> Enigma {
Enigma {
rotors, refrector: String::from(refrector), rotor_offsets: vec![0; 3], plugboard: String::from("ABCDEFGHIJKLMNOPQRSTUVWXYZ"), log: String::new()
}
}
pub fn encrypt(&mut self, data: &String) -> String {
fn number_from_latin(character: char) -> i32 {
character as i32 - 'A' as i32
}
fn latin_from_number(number: i32) -> String {
let temp;
if number < 0 {
temp = 26 + (number % 26);
} else if number > 25 {
temp = number % 26
} else {
temp = number;
}
String::from_utf8(vec![temp as u8 + 'A' as u8]).unwrap()
}
let data = data.to_uppercase();
let mut result = String::new();
let mut ratched = vec![false; self.rotor_offsets.len()];
let plugboard: Vec<char> = self.plugboard.chars().collect();
self.log += "IN P 1 2 3 R 3 2 1 Rotors";
for character in data.chars() {
if character.is_uppercase() {
let mut temp = number_from_latin(character);
self.log += &format!("{}: ", latin_from_number(temp));
temp = number_from_latin(plugboard[temp as usize]);
self.log += &format!("{} ", latin_from_number(temp));
self.rotor_offsets[0] = (self.rotor_offsets[0] + 1) % 26;
for (i, rotor) in (&self.rotors).into_iter().enumerate() {
if let Some(notch) = rotor.notch {
if ratched[i] {
if i != 0 { self.rotor_offsets[i] = (self.rotor_offsets[i] + 1) % 26 };
self.rotor_offsets[i + 1] = (self.rotor_offsets[i + 1] + 1) % 26;
ratched[i] = false;
}
if self.rotor_offsets[i] == number_from_latin(notch) {
ratched[i] = true;
}
}
temp = number_from_latin(rotor.map.chars().nth((temp + self.rotor_offsets[i]) as usize % 26).unwrap()) - self.rotor_offsets[i];
if temp < 0 {
temp = 26 + (temp % 26);
} else if temp > 25 {
temp = temp % 26;
}
self.log += &format!("{} ", latin_from_number(temp + self.rotor_offsets[i]));
}
temp = number_from_latin(self.refrector.chars().nth(temp as usize).unwrap());
self.log += &format!("{} ", latin_from_number(temp));
for (i, rotor) in (&self.rotors).into_iter().rev().enumerate() {
temp = rotor.map.chars().into_iter().position(|x| { number_from_latin(x) == (temp + self.rotor_offsets[2 - i]) % 26 }).unwrap() as i32 - self.rotor_offsets[2 - i];
if temp < 0 {
temp = 26 + (temp % 26);
} else if temp > 25 {
temp = temp % 26;
}
self.log += &format!("{} ", latin_from_number(temp + self.rotor_offsets[i]));
}
self.log += &format!("[{}, {}, {}]", latin_from_number(self.rotor_offsets[0]), latin_from_number(self.rotor_offsets[1]), latin_from_number(self.rotor_offsets[2]));
result += &latin_from_number(temp);
} else {
result += &character.to_string();
}
}
result
}
}
|
use encoding::Encoding;
use fontmetrics::FontMetrics;
use std::fmt;
use std::sync::Arc;
use units::{LengthUnit, UserSpace};
/// A font ready to be used in a TextObject.
///
/// The way to get FontRef is to call
/// [Canvas::get_font](struct.Canvas.html#method.get_font) with a
/// [FontSource](trait.FontSource.html). In PDF terms, a FontSource is
/// everything needed to build a font dictionary, while a FontRef is the name
/// that can be used in a page stream to use a font. Calling Canvas::get_font
/// will make sure the font dictionary is created in the file, associate it
/// with a name in the page resources and return a FontRef representing that
/// name.
///
/// The `serif` variable in [the TextObject
/// example](struct.TextObject.html#example) is a FontRef.
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
pub struct FontRef {
n: usize,
encoding: Encoding,
metrics: Arc<FontMetrics>,
}
impl FontRef {
// Should not be called by user code.
pub(crate) fn new(
n: usize,
encoding: Encoding,
metrics: Arc<FontMetrics>,
) -> Self {
FontRef {
n,
encoding,
metrics,
}
}
/// Get the encoding used by the referenced font.
pub fn encoding(&self) -> &Encoding {
&self.encoding
}
/// Get the width of the given text in this font at given size.
pub fn text_width<T: LengthUnit>(
&self,
size: UserSpace<T>,
text: &str,
) -> UserSpace<T> {
size * self.raw_text_width(text) as f32 / 1000.0
}
/// Get the width of the given text in thousands of unit of text
/// space.
/// This unit is what is used in some places internally in pdf files
/// and in some methods on a [TextObject](struct.TextObject.html).
pub fn raw_text_width(&self, text: &str) -> u32 {
text.chars().fold(0, |acc, ch| {
acc + u32::from(
self.encoding
.encode_char(ch)
.and_then(|ch| self.metrics.get_width(ch))
.unwrap_or(100),
)
})
}
}
impl fmt::Display for FontRef {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "/F{}", self.n)
}
}
|
#![crate_name = "uu_od"]
/*
* This file is part of the uutils coreutils package.
*
* (c) Ben Hirsch <benhirsch24@gmail.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
extern crate getopts;
use std::fs::File;
use std::io::Read;
use std::mem;
use std::path::Path;
#[derive(Debug)]
enum Radix { Decimal, Hexadecimal, Octal, Binary }
pub fn uumain(args: Vec<String>) -> i32 {
let mut opts = getopts::Options::new();
opts.optopt("A", "address-radix",
"Select the base in which file offsets are printed.", "RADIX");
opts.optopt("j", "skip-bytes",
"Skip bytes input bytes before formatting and writing.", "BYTES");
opts.optopt("N", "read-bytes",
"limit dump to BYTES input bytes", "BYTES");
opts.optopt("S", "strings",
("output strings of at least BYTES graphic chars. 3 is assumed when \
BYTES is not specified."),
"BYTES");
opts.optopt("t", "format", "select output format or formats", "TYPE");
opts.optflag("v", "output-duplicates", "do not use * to mark line suppression");
opts.optopt("w", "width",
("output BYTES bytes per output line. 32 is implied when BYTES is not \
specified."),
"BYTES");
opts.optflag("h", "help", "display this help and exit.");
opts.optflag("", "version", "output version information and exit.");
let matches = match opts.parse(&args[1..]) {
Ok(m) => m,
Err(f) => panic!("Invalid options\n{}", f)
};
let input_offset_base = match parse_radix(matches.opt_str("A")) {
Ok(r) => r,
Err(f) => { panic!("Invalid -A/--address-radix\n{}", f) }
};
let fname = match args.last() {
Some(n) => n,
None => { panic!("Need fname for now") ; }
};
odfunc(&input_offset_base, &fname);
0
}
fn odfunc(input_offset_base: &Radix, fname: &str) {
let mut f = match File::open(Path::new(fname)) {
Ok(f) => f,
Err(e) => panic!("file error: {}", e)
};
let mut addr = 0;
let bytes = &mut [b'\x00'; 16];
loop {
match f.read(bytes) {
Ok(n) => {
print_with_radix(input_offset_base, addr);
for b in 0 .. n / mem::size_of::<u16>() {
let bs = &bytes[(2 * b) .. (2 * b + 2)];
let p: u16 = (bs[1] as u16) << 8 | bs[0] as u16;
print!(" {:06o}", p);
}
if n % mem::size_of::<u16>() == 1 {
print!(" {:06o}", bytes[n - 1]);
}
print!("\n");
addr += n;
},
Err(_) => {
print_with_radix(input_offset_base, addr);
break;
}
};
};
}
fn parse_radix(radix_str: Option<String>) -> Result<Radix, &'static str> {
match radix_str {
None => Ok(Radix::Octal),
Some(s) => {
let st = s.into_bytes();
if st.len() != 1 {
Err("Radix must be one of [d, o, b, x]\n")
} else {
let radix: char = *(st.get(0)
.expect("byte string of length 1 lacks a 0th elem")) as char;
match radix {
'd' => Ok(Radix::Decimal),
'x' => Ok(Radix::Hexadecimal),
'o' => Ok(Radix::Octal),
'b' => Ok(Radix::Binary),
_ => Err("Radix must be one of [d, o, b, x]\n")
}
}
}
}
}
fn print_with_radix(r: &Radix, x: usize) {
// TODO(keunwoo): field widths should be based on sizeof(x), or chosen dynamically based on the
// expected range of address values. Binary in particular is not great here.
match *r {
Radix::Decimal => print!("{:07}", x),
Radix::Hexadecimal => print!("{:07X}", x),
Radix::Octal => print!("{:07o}", x),
Radix::Binary => print!("{:07b}", x)
}
}
|
use crate::{integer::Integer, rational::Rational};
use core::ops::SubAssign;
// SubAssign The subtraction assignment operator -=.
// ['Rational', 'Rational', 'Rational::subtract_assign', 'no', [], ['ref']]
impl SubAssign<Rational> for Rational {
fn sub_assign(&mut self, rhs: Rational) {
Rational::subtract_assign(self, &rhs)
}
}
// ['Rational', '&Rational', 'Rational::subtract_assign', 'no', [], []]
impl SubAssign<&Rational> for Rational {
fn sub_assign(&mut self, rhs: &Rational) {
Rational::subtract_assign(self, rhs)
}
}
// ['Rational', 'Integer', 'Rational::subtract_assign_integer', 'no', [],
// ['ref']]
impl SubAssign<Integer> for Rational {
fn sub_assign(&mut self, rhs: Integer) {
Rational::subtract_assign_integer(self, &rhs)
}
}
// ['Rational', '&Integer', 'Rational::subtract_assign_integer', 'no', [], []]
impl SubAssign<&Integer> for Rational {
fn sub_assign(&mut self, rhs: &Integer) {
Rational::subtract_assign_integer(self, rhs)
}
}
// ['Rational', 'i8', 'Rational::subtract_assign_integer', 'no', [], ['ref',
// {'convert': 'Integer'}]]
impl SubAssign<i8> for Rational {
fn sub_assign(&mut self, rhs: i8) {
Rational::subtract_assign_integer(self, &Integer::from(rhs))
}
}
// ['Rational', '&i8', 'Rational::subtract_assign_integer', 'no', [], ['ref',
// {'convert': 'Integer'}, 'deref']]
impl SubAssign<&i8> for Rational {
fn sub_assign(&mut self, rhs: &i8) {
Rational::subtract_assign_integer(self, &Integer::from(*rhs))
}
}
// ['Rational', 'u8', 'Rational::subtract_assign_integer', 'no', [], ['ref',
// {'convert': 'Integer'}]]
impl SubAssign<u8> for Rational {
fn sub_assign(&mut self, rhs: u8) {
Rational::subtract_assign_integer(self, &Integer::from(rhs))
}
}
// ['Rational', '&u8', 'Rational::subtract_assign_integer', 'no', [], ['ref',
// {'convert': 'Integer'}, 'deref']]
impl SubAssign<&u8> for Rational {
fn sub_assign(&mut self, rhs: &u8) {
Rational::subtract_assign_integer(self, &Integer::from(*rhs))
}
}
// ['Rational', 'i16', 'Rational::subtract_assign_integer', 'no', [], ['ref',
// {'convert': 'Integer'}]]
impl SubAssign<i16> for Rational {
fn sub_assign(&mut self, rhs: i16) {
Rational::subtract_assign_integer(self, &Integer::from(rhs))
}
}
// ['Rational', '&i16', 'Rational::subtract_assign_integer', 'no', [], ['ref',
// {'convert': 'Integer'}, 'deref']]
impl SubAssign<&i16> for Rational {
fn sub_assign(&mut self, rhs: &i16) {
Rational::subtract_assign_integer(self, &Integer::from(*rhs))
}
}
// ['Rational', 'u16', 'Rational::subtract_assign_integer', 'no', [], ['ref',
// {'convert': 'Integer'}]]
impl SubAssign<u16> for Rational {
fn sub_assign(&mut self, rhs: u16) {
Rational::subtract_assign_integer(self, &Integer::from(rhs))
}
}
// ['Rational', '&u16', 'Rational::subtract_assign_integer', 'no', [], ['ref',
// {'convert': 'Integer'}, 'deref']]
impl SubAssign<&u16> for Rational {
fn sub_assign(&mut self, rhs: &u16) {
Rational::subtract_assign_integer(self, &Integer::from(*rhs))
}
}
// ['Rational', 'i32', 'Rational::subtract_assign_integer', 'no', [], ['ref',
// {'convert': 'Integer'}]]
impl SubAssign<i32> for Rational {
fn sub_assign(&mut self, rhs: i32) {
Rational::subtract_assign_integer(self, &Integer::from(rhs))
}
}
// ['Rational', '&i32', 'Rational::subtract_assign_integer', 'no', [], ['ref',
// {'convert': 'Integer'}, 'deref']]
impl SubAssign<&i32> for Rational {
fn sub_assign(&mut self, rhs: &i32) {
Rational::subtract_assign_integer(self, &Integer::from(*rhs))
}
}
// ['Rational', 'u32', 'Rational::subtract_assign_integer', 'no', [], ['ref',
// {'convert': 'Integer'}]]
impl SubAssign<u32> for Rational {
fn sub_assign(&mut self, rhs: u32) {
Rational::subtract_assign_integer(self, &Integer::from(rhs))
}
}
// ['Rational', '&u32', 'Rational::subtract_assign_integer', 'no', [], ['ref',
// {'convert': 'Integer'}, 'deref']]
impl SubAssign<&u32> for Rational {
fn sub_assign(&mut self, rhs: &u32) {
Rational::subtract_assign_integer(self, &Integer::from(*rhs))
}
}
// ['Rational', 'i64', 'Rational::subtract_assign_integer', 'no', [], ['ref',
// {'convert': 'Integer'}]]
impl SubAssign<i64> for Rational {
fn sub_assign(&mut self, rhs: i64) {
Rational::subtract_assign_integer(self, &Integer::from(rhs))
}
}
// ['Rational', '&i64', 'Rational::subtract_assign_integer', 'no', [], ['ref',
// {'convert': 'Integer'}, 'deref']]
impl SubAssign<&i64> for Rational {
fn sub_assign(&mut self, rhs: &i64) {
Rational::subtract_assign_integer(self, &Integer::from(*rhs))
}
}
// ['Rational', 'u64', 'Rational::subtract_assign_integer', 'no', [], ['ref',
// {'convert': 'Integer'}]]
impl SubAssign<u64> for Rational {
fn sub_assign(&mut self, rhs: u64) {
Rational::subtract_assign_integer(self, &Integer::from(rhs))
}
}
// ['Rational', '&u64', 'Rational::subtract_assign_integer', 'no', [], ['ref',
// {'convert': 'Integer'}, 'deref']]
impl SubAssign<&u64> for Rational {
fn sub_assign(&mut self, rhs: &u64) {
Rational::subtract_assign_integer(self, &Integer::from(*rhs))
}
}
// ['Rational', 'i128', 'Rational::subtract_assign_integer', 'no', [], ['ref',
// {'convert': 'Integer'}]]
impl SubAssign<i128> for Rational {
fn sub_assign(&mut self, rhs: i128) {
Rational::subtract_assign_integer(self, &Integer::from(rhs))
}
}
// ['Rational', '&i128', 'Rational::subtract_assign_integer', 'no', [], ['ref',
// {'convert': 'Integer'}, 'deref']]
impl SubAssign<&i128> for Rational {
fn sub_assign(&mut self, rhs: &i128) {
Rational::subtract_assign_integer(self, &Integer::from(*rhs))
}
}
// ['Rational', 'u128', 'Rational::subtract_assign_integer', 'no', [], ['ref',
// {'convert': 'Integer'}]]
impl SubAssign<u128> for Rational {
fn sub_assign(&mut self, rhs: u128) {
Rational::subtract_assign_integer(self, &Integer::from(rhs))
}
}
// ['Rational', '&u128', 'Rational::subtract_assign_integer', 'no', [], ['ref',
// {'convert': 'Integer'}, 'deref']]
impl SubAssign<&u128> for Rational {
fn sub_assign(&mut self, rhs: &u128) {
Rational::subtract_assign_integer(self, &Integer::from(*rhs))
}
}
|
use super::prelude::*;
fn join(nums: &[i32]) -> String {
use std::fmt::Write;
if nums.is_empty() {
return String::new();
}
let mut res = format!("{}", nums[0]);
for num in &nums[1..] {
res.push(',');
let _ = write!(res, "{}", num);
}
res
}
#[command]
pub async fn sort(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {
let mut nums = args.iter::<i32>().map(|num| num.unwrap_or_default()).collect::<Vec<_>>();
nums.sort();
msg.channel_id.say(&ctx.http, format!("Result: {}", join(&nums))).await?;
Ok(())
}
|
// Copyright 2018 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// [START import_declarations]
use failure::{Error, ResultExt};
use fidl_fidl_examples_echo::{EchoRequest, EchoRequestStream, EchoServiceRequest};
use fuchsia_async as fasync;
use fuchsia_component::server::ServiceFs;
use futures::prelude::*;
// [END import_declarations]
// [START run_echo_server]
async fn run_echo_server(
mut stream: EchoRequestStream,
quiet: bool,
prefix: Option<&str>,
) -> Result<(), Error> {
while let Some(EchoRequest::EchoString { value, responder }) =
stream.try_next().await.context("error running echo server")?
{
if !quiet {
println!("Received echo request for string {:?}", value);
}
let response = match (&prefix, value.as_ref()) {
(Some(prefix), Some(value)) => Some(format!("{}: {}", prefix, value)),
_ => value,
};
responder.send(response.as_ref().map(|s| s.as_str())).context("error sending response")?;
if !quiet {
println!("echo response sent successfully");
}
}
Ok(())
}
// [END run_echo_server]
enum IncomingService {
// Host a legacy service (protocol).
Echo(EchoRequestStream),
// Host a unified service.
Svc(EchoServiceRequest),
// ... more services here
}
// [START main]
#[fasync::run_singlethreaded]
async fn main() -> Result<(), Error> {
let quiet = std::env::args().any(|arg| arg == "-q");
let mut fs = ServiceFs::new_local();
fs.dir("svc").add_fidl_service(IncomingService::Echo).add_unified_service(IncomingService::Svc);
fs.take_and_serve_directory_handle()?;
const MAX_CONCURRENT: usize = 10_000;
let fut = fs.for_each_concurrent(MAX_CONCURRENT, |request| {
match request {
IncomingService::Echo(stream) => run_echo_server(stream, quiet, None),
IncomingService::Svc(EchoServiceRequest::Foo(stream)) => {
run_echo_server(stream, quiet, Some("foo"))
}
IncomingService::Svc(EchoServiceRequest::Bar(stream)) => {
run_echo_server(stream, quiet, Some("bar"))
}
}
.unwrap_or_else(|e| println!("{:?}", e))
});
fut.await;
Ok(())
}
// [END main]
|
use std::num::NonZeroU8;
use std::collections::HashMap;
#[derive(Clone)]
pub struct Letters {
inner: HashMap<char, NonZeroU8>,
}
impl Letters {
pub fn from_string(string: &str) -> Result<Letters, ()> {
let mut result = Letters::empty();
for (i, letter) in string.chars().enumerate() {
if i >= ::std::usize::MAX {
Err(())?;
}
result.add(letter);
}
eprintln!("starts with {:?}", result.inner);
Ok(result)
}
pub fn has(&self, word: &str) -> bool {
// eprintln!("TRACE: entering fn has");
let mut allowed: Letters = self.clone();
for letter in word.chars() {
// eprintln!("DEBUG: checking letter {}", letter);
allowed = match allowed.subtract(letter) {
Ok(a) => a,
Err(()) => return false,
};
}
// eprintln!("TRACE: exiting fn has");
true
}
fn add(&mut self, new: char) {
self
.inner
.entry(new)
.and_modify(|count| *count = NonZeroU8::new(count.get() + 1).unwrap())
.or_insert(NonZeroU8::new(1).unwrap());
}
fn subtract(mut self, key: char) -> Result<Self,()> {
// eprintln!("TRACE: entering fn subtract");
// eprintln!("DEBUG: might error because of key not found");
let old_count = self
.inner
.remove(&key)
.ok_or(())?;
// eprintln!("DEBUG: didn't error because of key not found");
if let Some(updated_count) = NonZeroU8::new(old_count.get() - 1) {
self
.inner
.insert(key, updated_count);
}
// eprintln!("TRACE: exiting fn subtract");
Ok(self)
}
fn empty() -> Letters {
Letters {
inner: HashMap::new(),
}
}
}
|
use std::{
io,
sync::{Arc, Mutex, RwLock},
thread,
};
/// Utility flag to help threads indicate to other threads that they should
/// stop doing work.
#[derive(Clone)]
pub struct KeepGoing(Arc<RwLock<bool>>);
impl KeepGoing {
/// Creates a new `KeepGoing` with a default state of `true`.
pub fn new() -> Self {
Self(Arc::new(RwLock::new(true)))
}
/// Returns false as long as the thread should keep going.
pub fn should_stop(&self) -> bool {
if let Ok(flag) = self.0.read() {
!*flag
} else {
true
}
}
/// Changes the flag to `false` so other threads know to stop going.
pub fn stop(&self) {
if let Ok(mut flag) = self.0.write() {
*flag = false;
}
}
}
/// Utility struct for aggregating background threads.
///
/// Note: this is not a thread pool.
#[derive(Clone)]
pub struct WorkerThreads {
keep_going: KeepGoing,
threads: Arc<Mutex<Vec<thread::JoinHandle<()>>>>,
}
impl WorkerThreads {
/// Creates a new struct with its associated `KeepGoing`.
pub fn new() -> (KeepGoing, Self) {
let keep_going = KeepGoing::new();
let workers = Self {
keep_going: keep_going.clone(),
threads: Arc::new(Mutex::new(vec![])),
};
(keep_going, workers)
}
/// Spawns a thread and keeps track of it.
pub fn spawn<F>(&self, thread_name: String, f: F) -> io::Result<()>
where
F: FnOnce(),
F: Send + 'static,
{
match thread::Builder::new().name(thread_name.clone()).spawn(f) {
Ok(thread) => {
self.threads.lock().unwrap().push(thread);
Ok(())
}
Err(_) => {
let mut message = format!("Failed to spawn thread {}", thread_name);
if let Err(e) = self.stop() {
message = format!("{}; and also: {}", message, e);
}
Err(io::Error::new(io::ErrorKind::Other, message))
}
}
}
/// Stops and joins all threads.
pub fn stop(&self) -> io::Result<()> {
self.keep_going.stop();
let mut threads = self.threads.lock().unwrap();
let mut failed_threads = vec![];
for _ in 0..threads.len() {
let join_handle = threads.remove(0);
let thread_name = join_handle.thread().name().map(|n| n.to_string());
match join_handle.join() {
Ok(_) => {}
Err(_) => failed_threads.push(thread_name.unwrap_or("unknown".to_string())),
}
}
if failed_threads.is_empty() {
Ok(())
} else {
Err(io::Error::new(
io::ErrorKind::Other,
format!(
"Failed to join {} threads: [{}]",
failed_threads.len(),
failed_threads.join(", "),
),
))
}
}
}
|
mod ast;
#[macro_use]
mod error;
pub mod evaluate;
#[allow(dead_code)]
pub mod internal;
pub mod parser;
pub mod traits;
pub use ast::AST;
pub use error::{SMError, SMResult};
pub use evaluate::{Context, Runner};
pub use traits::{ToTex, ToWolfram};
#[test]
fn it_works() {}
|
use crate::entities::weapons;
use crate::helpers::Logger;
use crate::utils::{output, readable::Readable, texthash::TextHash};
use std::str::FromStr;
use strum::AsStaticRef;
#[derive(AsStaticStr)]
pub enum EntityType {
Weapon,
}
impl Default for EntityType {
fn default() -> EntityType {
EntityType::Weapon
}
}
impl FromStr for EntityType {
type Err = ();
fn from_str(input: &str) -> Result<EntityType, Self::Err> {
match input.to_ascii_lowercase().as_str() {
"weapon" => Ok(EntityType::Weapon),
_ => Err(()),
}
}
}
#[derive(Default)]
pub struct Parse {
entity: EntityType,
verbose: bool,
}
impl Parse {
pub fn new() -> Self {
Default::default()
}
pub fn entity(mut self, entity: &str) -> Self {
let logger = Logger::new("gdp:parse");
match EntityType::from_str(entity) {
Ok(entity) => self.entity = entity,
Err(_) => logger.error(format!(
"Entity type not supported: {}, Defaulting to Weapon",
entity
)),
}
self
}
pub fn verbose(mut self, verbose: bool) -> Self {
self.verbose = verbose;
self
}
pub fn run(self, texthash: TextHash, readable: Readable) -> crate::Result<()> {
let logger = Logger::new("gdp:parse");
if self.verbose {
logger.log("Starting new parse")
}
logger.log(format!("Parsing: {}", self.entity.as_static()));
let entity = match self.entity {
EntityType::Weapon => weapons::parse(texthash, readable),
};
logger.log(format!("Saving: {}", self.entity.as_static()));
match entity {
Ok(entity) => {
output::save(entity, self.entity.as_static());
}
Err(_) => {
logger.error("Entity is not serializable");
}
};
Ok(())
}
}
|
#[derive(Debug)]
struct Rectangle {
width: u32,
height: u32,
}
impl Rectangle {
fn area(&self) -> u32 {
self.width * self.height
}
fn can_hold(&self, target: &Rectangle) -> bool {
self.width > target.width && self.height > target.height
}
}
fn main() {
let rectangle = Rectangle {
width: 640,
height: 480,
};
println!("Rectangle: {:#?}", rectangle);
println!("Area of rectangle: {}", rectangle.area());
let rectangle2 = Rectangle {
width: 40,
height: 30,
};
println!(
"Rectangle 1 can hold rectangle 2: {}",
rectangle.can_hold(&rectangle2)
);
}
|
use crate::{types::*, utils::regex::SerializeRegex};
use onig::FindCaptures;
use serde::{Deserialize, Serialize};
pub mod composition;
use composition::{Composition, Group, MatchGraph};
use self::composition::GraphId;
#[derive(Serialize, Deserialize, Debug)]
pub struct TokenEngine {
pub(crate) composition: Composition,
pub(crate) antipatterns: Vec<Composition>,
}
impl TokenEngine {
fn get_match<'t>(&'t self, tokens: &'t [Token], i: usize) -> Option<MatchGraph<'t>> {
if let Some(graph) = self.composition.apply(tokens, i) {
let mut blocked = false;
// TODO: cache / move to outer loop
for i in 0..tokens.len() {
for antipattern in &self.antipatterns {
if let Some(anti_graph) = antipattern.apply(tokens, i) {
let anti_start = anti_graph.by_index(0).char_span.0;
let anti_end = anti_graph
.by_index(anti_graph.groups().len() - 1)
.char_span
.1;
let rule_start = graph.by_index(0).char_span.0;
let rule_end = graph.by_index(graph.groups().len() - 1).char_span.1;
if anti_start <= rule_end && rule_start <= anti_end {
blocked = true;
break;
}
}
}
if blocked {
break;
}
}
if !blocked {
return Some(graph);
}
}
None
}
}
#[derive(Serialize, Deserialize, Debug)]
pub enum Engine {
Token(TokenEngine),
Text(SerializeRegex, DefaultHashMap<GraphId, usize>),
}
struct TokenMatches<'a> {
engine: &'a TokenEngine,
index: usize,
mask: Vec<bool>,
}
struct TextMatches<'a, 't> {
byte_idx_to_char_idx: DefaultHashMap<usize, usize>,
id_to_idx: &'a DefaultHashMap<GraphId, usize>,
captures: FindCaptures<'a, 't>,
}
enum InnerMatches<'a: 't, 't> {
Token(TokenMatches<'a>),
Text(TextMatches<'a, 't>),
}
pub struct EngineMatches<'a, 't> {
tokens: &'t [Token<'t>],
start: GraphId,
end: GraphId,
inner: InnerMatches<'a, 't>,
}
impl<'a, 't> Iterator for EngineMatches<'a, 't> {
type Item = MatchGraph<'t>;
fn next(&mut self) -> Option<Self::Item> {
let tokens = self.tokens;
let start_id = self.start;
let end_id = self.end;
match &mut self.inner {
InnerMatches::Token(inner) => (inner.index..tokens.len()).find_map(|i| {
inner.engine.get_match(tokens, i).and_then(|graph| {
let start_group = graph.by_id(start_id);
let end_group = graph.by_id(end_id);
let start = start_group.char_span.0;
let end = end_group.char_span.1;
if inner.mask[start..end].iter().all(|x| !x) {
inner.mask[start..end].iter_mut().for_each(|x| *x = true);
inner.index += 1;
Some(graph)
} else {
None
}
})
}),
InnerMatches::Text(inner) => inner.captures.next().map(|captures| {
let bi_to_ci = &inner.byte_idx_to_char_idx;
let mut groups = Vec::new();
for group in captures.iter_pos() {
if let Some(group) = group {
let start = *bi_to_ci
.get(&group.0)
.expect("byte index is at char boundary");
let end = *bi_to_ci
.get(&group.1)
.expect("byte index is at char boundary");
groups.push(Group::new((start, end)));
} else {
groups.push(Group::new((0, 0)));
}
}
MatchGraph::new(groups, inner.id_to_idx, tokens)
}),
}
}
}
impl Engine {
pub fn get_matches<'a, 't>(
&'a self,
tokens: &'t [Token],
start: GraphId,
end: GraphId,
) -> EngineMatches<'a, 't> {
EngineMatches {
tokens,
start,
end,
inner: match &self {
Engine::Token(engine) => InnerMatches::Token(TokenMatches {
engine,
index: 0,
mask: vec![false; tokens[0].sentence.chars().count()],
}),
Engine::Text(regex, id_to_idx) => {
let sentence = tokens[0].sentence;
let mut bi_to_ci: DefaultHashMap<usize, usize> = sentence
.char_indices()
.enumerate()
.map(|(ci, (bi, _))| (bi, ci))
.collect();
bi_to_ci.insert(sentence.len(), bi_to_ci.len());
InnerMatches::Text(TextMatches {
byte_idx_to_char_idx: bi_to_ci,
id_to_idx,
captures: regex.captures_iter(sentence),
})
}
},
}
}
}
|
#[macro_use]
extern crate rskafka_wire_format_derive;
pub mod apis;
mod data;
mod error;
mod request;
mod response;
pub use data::{
api_key::ApiKey,
error::ErrorCode,
record::{Record, RecordBatch},
BrokerId,
};
pub use request::KafkaRequest;
pub use response::KafkaResponse;
#[cfg(test)]
mod test_utils {
pub fn hex_bytes(hex_str: &str) -> Vec<u8> {
let mut buf = Vec::new();
for i in 0..hex_str.len() / 2 {
let hex_byte = &hex_str[i * 2..=i * 2 + 1];
let byte = u8::from_str_radix(hex_byte, 16).unwrap();
buf.push(byte);
}
buf
}
pub fn hex_dump(data: &[u8]) -> &[u8] {
for bytes in data.chunks(16) {
for bytes in bytes.chunks(8) {
for byte in bytes {
eprint!("{:02x} ", byte);
}
eprint!(" ")
}
eprint!("\n")
}
data
}
}
|
//! General actions
#![allow(unused_imports)]
#![allow(dead_code)]
use chrono::*;
use bill::Currency;
use icalendar::Calendar;
use std::{env,fs};
use std::time;
use std::fmt::Write;
use std::path::{Path,PathBuf};
use util;
use super::BillType;
use storage::{Storage,StorageDir,Storable,StorageResult};
use project::Project;
use project::spec::IsProject;
use project::spec::IsClient;
use project::spec::Invoicable;
use project::spec::ProvidesData;
use project::spec::events::HasEvents;
#[cfg(feature="document_export")]
use fill_docs::fill_template;
pub mod error;
use self::error::*;
/// Sets up an instance of `Storage`.
pub fn setup_luigi() -> Result<Storage<Project>> {
trace!("setup_luigi()");
let working = ::CONFIG.get_str("dirs/working").ok_or("Faulty config: dirs/working does not contain a value")?;
let archive = ::CONFIG.get_str("dirs/archive").ok_or("Faulty config: dirs/archive does not contain a value")?;
let templates = ::CONFIG.get_str("dirs/templates").ok_or("Faulty config: dirs/templates does not contain a value")?;
let storage = Storage::new(util::get_storage_path(), working, archive, templates)?;
storage.health_check()?;
Ok(storage)
}
/// Sets up an instance of `Storage`, with git turned on.
pub fn setup_luigi_with_git() -> Result<Storage<Project>> {
trace!("setup_luigi()");
let working = ::CONFIG.get_str("dirs/working").ok_or("Faulty config: dirs/working does not contain a value")?;
let archive = ::CONFIG.get_str("dirs/archive").ok_or("Faulty config: dirs/archive does not contain a value")?;
let templates = ::CONFIG.get_str("dirs/templates").ok_or("Faulty config: dirs/templates does not contain a value")?;
let storage = Storage::new_with_git(util::get_storage_path(), working, archive, templates)?;
storage.health_check()?;
Ok(storage)
}
pub fn simple_with_projects<F>(dir:StorageDir, search_terms:&[&str], f:F)
where F:Fn(&Project)
{
match with_projects(dir, search_terms, |p| {f(p);Ok(())}){
Ok(_) => {},
Err(e) => error!("{}",e)
}
}
/// Helper method that passes projects matching the `search_terms` to the passt closure `f`
/// TODO Really move this to `Storage`
pub fn with_projects<F>(dir:StorageDir, search_terms:&[&str], f:F) -> Result<()>
where F:Fn(&Project)->Result<()>
{
trace!("with_projects({:?})", search_terms);
let luigi = setup_luigi()?;
let projects = luigi.search_projects_any(dir, search_terms)?;
if projects.is_empty() {
return Err(format!("Nothing found for {:?}", search_terms).into())
}
for project in &projects{
f(project)?;
}
Ok(())
}
pub fn csv(year:i32) -> Result<String> {
let luigi = setup_luigi()?;
let mut projects = luigi.open_projects(StorageDir::Year(year))?;
projects.sort_by(|pa,pb| pa.index().unwrap_or_else(||"zzzz".to_owned()).cmp( &pb.index().unwrap_or("zzzz".to_owned())));
projects_to_csv(&projects)
}
/// Produces a csv string from a list of `Project`s
/// TODO this still contains german terms
pub fn projects_to_csv(projects:&[Project]) -> Result<String>{
let mut string = String::new();
let splitter = ";";
writeln!(&mut string, "{}", [ "Rnum", "Bezeichnung", "Datum", "Rechnungsdatum", "Betreuer", "Verantwortlich", "Bezahlt am", "Betrag", "Canceled"].join(splitter))?;
for project in projects{
writeln!(&mut string, "{}", [
project.get("InvoiceNumber") .unwrap_or_else(|| String::from(r#""""#)),
project.get("Name") .unwrap_or_else(|| String::from(r#""""#)),
project.get("event/dates/0/begin") .unwrap_or_else(|| String::from(r#""""#)),
project.get("invoice/date") .unwrap_or_else(|| String::from(r#""""#)),
project.get("Employees") .unwrap_or_else(|| String::from(r#""""#)),
project.get("Responsible") .unwrap_or_else(|| String::from(r#""""#)),
project.get("invoice/payed_date") .unwrap_or_else(|| String::from(r#""""#)),
project.sum_sold().map(|c|c.value().to_string()).unwrap_or_else(|_| String::from(r#""""#)),
project.canceled_string().to_owned()
].join(splitter))?;
}
Ok(string)
}
/// Creates the latex files within each projects directory, either for Invoice or Offer.
#[cfg(feature="document_export")]
pub fn project_to_doc(project: &Project, template_name:&str, bill_type:&Option<BillType>, dry_run:bool, force:bool) -> Result<()> {
// init_export_config()
let template_ext = ::CONFIG.get_str("extensions/output_template").expect("Faulty default config");
let output_ext = ::CONFIG.get_str("extensions/output_file").expect("Faulty default config");
let convert_ext = ::CONFIG.get_str("convert/output_extension").expect("Faulty default config");
let convert_tool = ::CONFIG.get_str("convert/tool");
let output_folder = ::CONFIG.get_str("output_path").and_then(util::get_valid_path).expect("Faulty config \"output_path\"");
let trash_exts = ::CONFIG.get("convert/trash_extensions") .expect("Faulty default config")
.as_vec().expect("Faulty default config")
.into_iter()
.map(|v|v.as_str()).collect::<Vec<_>>();
// construct_template_path(&template_name) {
let mut template_path = PathBuf::new();
template_path.push(util::get_storage_path());
template_path.push(::CONFIG.get_str("dirs/templates").expect("Faulty config: dirs/templates does not contain a value"));
template_path.push(template_name);
template_path.set_extension(template_ext);
// }
// check stays here
debug!("template file={:?} exists={}", template_path, template_path.exists());
if !template_path.exists() {
return Err(format!("Template not found at {}", template_path.display()).into())
}
// project_readyness(&project) {
let ready_for_offer = project.is_ready_for_offer();
let ready_for_invoice = project.is_ready_for_invoice();
let project_file = project.file();
// tiny little helper
let to_local_file = |file:&Path, ext| {
let mut _tmpfile = file.to_owned();
_tmpfile.set_extension(ext);
Path::new(_tmpfile.file_name().unwrap().into()).to_owned()
};
use BillType::*;
let (dyn_bill_type, outfile_tex):
(Option<BillType>, Option<PathBuf>) =
match (bill_type, ready_for_offer, ready_for_invoice)
{
(&Some(Offer), Ok(_), _ ) |
(&None, Ok(_), Err(_)) => (Some(Offer), Some(project.dir().join(project.offer_file_name(output_ext).expect("this should have been cought by ready_for_offer()")))),
(&Some(Invoice), _, Ok(_)) |
(&None, _, Ok(_)) => (Some(Invoice), Some(project.dir().join(project.invoice_file_name(output_ext).expect("this should have been cought by ready_for_invoice()")))),
(&Some(Offer), Err(e), _ ) => {error!("cannot create an offer, check out:{}",e);(None,None)},
(&Some(Invoice), _, Err(e)) => {error!("cannot create an invoice, check out:{}",e);(None,None)},
(_, Err(e), Err(_)) => {error!("Neither an Offer nor an Invoice can be created from this project\n please check out {}", e);(None,None)}
};
// }
//debug!("{:?} -> {:?}",(bill_type, project.is_ready_for_offer(), project.is_ready_for_invoice()), (dyn_bill_type, outfile_tex));
if let (Some(outfile), Some(dyn_bill)) = (outfile_tex, dyn_bill_type) {
let filled = fill_template(project, &dyn_bill, &template_path)?;
let pdffile = to_local_file(&outfile, convert_ext);
let target = output_folder.join(&pdffile);
// ok, so apparently we can create a tex file, so lets do it
if !force && target.exists() && file_age(&target)? < file_age(&project_file)? {
// no wait, nothing has changed, so lets save ourselves the work
info!("nothing to be done, {} is younger than {}
use --force if you don't agree
use --pdf to only rebuild the pdf",
target.display(),
project_file.display());
unimplemented!();
} else {
// \o/ we created a tex file
if dry_run{
warn!("Dry run! This does not produce any output:\n * {}\n * {}", outfile.display(), pdffile.display());
} else {
let outfileb = project.write_to_file(&filled,&dyn_bill,output_ext)?;
debug!("{} vs\n {}", outfile.display(), outfileb.display());
util::pass_to_command(&convert_tool, &[&outfileb]);
}
// clean up expected trash files
for trash_ext in trash_exts.iter().filter_map(|x|*x){
let trash_file = to_local_file(&outfile, trash_ext);
if trash_file.exists() {
fs::remove_file(&trash_file)?;
debug!("just deleted: {}", trash_file.display())
}
else {
debug!("I expected there to be a {}, but there wasn't any ?", trash_file.display())
}
}
if pdffile.exists(){
debug!("now there is be a {:?} -> {:?}", pdffile, target);
fs::rename(&pdffile, &target)?;
}
}
}
Ok(())
}
/// Creates the latex files within each projects directory, either for Invoice or Offer.
#[cfg(feature="document_export")]
pub fn projects_to_doc(dir:StorageDir, search_term:&str, template_name:&str, bill_type:&Option<BillType>, dry_run:bool, force:bool) -> Result<()> {
with_projects(dir, &[search_term], |p| project_to_doc(p, template_name, bill_type, dry_run, force) )
}
fn file_age(path:&Path) -> Result<time::Duration> {
let metadata = fs::metadata(path)?;
let accessed = metadata.accessed()?;
Ok(accessed.elapsed()?)
}
/// Command DUES
pub fn open_wages() -> Result<Currency>{
let luigi = setup_luigi()?;
let projects = luigi.open_projects(StorageDir::Working)?;
Ok(projects.iter()
.filter(|p| !p.canceled() && p.age().unwrap_or(0) > 0)
.filter_map(|p| p.wages())
.fold(Currency::default(), |acc, x| acc + x))
}
/// Command DUES
pub fn open_payments() -> Result<Currency>{
let luigi = setup_luigi()?;
let projects = luigi.open_projects(StorageDir::Working)?;
Ok(projects.iter()
.filter(|p| !p.canceled() && !p.payed_by_client() && p.age().unwrap_or(0) > 0)
.filter_map(|p| p.sum_sold().ok())
.fold(Currency::default(), |acc, x| acc + x))
}
/// Testing only, tries to run complete spec on all projects.
/// TODO make this not panic :D
/// TODO move this to `spec::all_the_things`
pub fn spec() -> Result<()> {
use project::spec::*;
let luigi = setup_luigi()?;
//let projects = super::execute(||luigi.open_projects(StorageDir::All));
let projects = luigi.open_projects(StorageDir::Working)?;
for project in projects{
info!("{}", project.dir().display());
project.client().validate().map_err(|errors| println!("{}", errors)).unwrap();
project.client().full_name();
project.client().first_name();
project.client().title();
project.client().email();
project.hours().employees_string();
project.invoice().number_long_str();
project.invoice().number_str();
project.offer().number();
project.age().map(|a|format!("{} days", a)).unwrap();
project.modified_date().map(|d|d.year().to_string()).unwrap();
project.sum_sold().map(|c|util::currency_to_string(&c)).unwrap();
project.responsible().map(|s|s.to_owned()).unwrap();
project.name().map(|s|s.to_owned()).unwrap();
}
Ok(())
}
pub fn delete_project_confirmation(dir: StorageDir, search_terms:&[&str]) -> Result<()> {
let luigi = setup_luigi_with_git()?;
for project in luigi.search_projects_any(dir, search_terms)? {
luigi.delete_project_if(&project,
|| util::really(&format!("you want me to delete {:?} [y/N]", project.dir())) && util::really("really? [y/N]")
)?
}
Ok(())
}
pub fn archive_projects(search_terms:&[&str], manual_year:Option<i32>, force:bool) -> Result<Vec<PathBuf>>{
trace!("archive_projects matching ({:?},{:?},{:?})", search_terms, manual_year,force);
let luigi = setup_luigi_with_git()?;
Ok( luigi.archive_projects_if(search_terms, manual_year, || force) ?)
}
pub fn archive_all_projects() -> Result<Vec<PathBuf>> {
let luigi = setup_luigi_with_git()?;
let mut moved_files = Vec::new();
for project in luigi.open_projects(StorageDir::Working)?
.iter()
.filter(|p| p.is_ready_for_archive().is_ok()) {
println!(" we could get rid of: {}", project.name().unwrap_or(""));
moved_files.push(project.dir());
moved_files.append(&mut luigi.archive_project(&project, project.year().unwrap())?);
}
Ok(moved_files)
}
/// Command UNARCHIVE <YEAR> <NAME>
/// TODO: return a list of files that have to be updated in git
pub fn unarchive_projects(year:i32, search_terms:&[&str]) -> Result<Vec<PathBuf>> {
let luigi = setup_luigi_with_git()?;
Ok( luigi.unarchive_projects(year, search_terms) ?)
}
/// Command CALENDAR
pub fn calendar(dir: StorageDir) -> Result<String> {
let luigi = setup_luigi()?;
let projects = luigi.open_projects(dir)?;
let mut cal = Calendar::new();
for project in projects {
cal.append(&mut project.to_ical())
}
Ok(cal.to_string())
}
|
#![feature(proc_macro_hygiene)]
#[allow(unused)]
#[derive(macro_test::DeriveMacro)]
struct Test(#[test] u8);
fn main() {
macro_test::my_macro!();
macro_test::my_macro1!();
println!("{}", answer());
#[macro_test::attribute_macro_twice]
println!("twice");
}
|
use bulletproofs::r1cs::{ConstraintSystem, R1CSError};
use curve25519_dalek::scalar::Scalar;
use error::SpacesuitError;
use value::AllocatedQuantity;
/// Enforces that the quantity of v is in the range [0, 2^n).
pub fn fill_cs<CS: ConstraintSystem>(
cs: &mut CS,
v: AllocatedQuantity,
n: usize,
) -> Result<(), SpacesuitError> {
let mut constraint = vec![(v.variable, -Scalar::one())];
let mut exp_2 = Scalar::one();
for i in 0..n {
// Create low-level variables and add them to constraints
let (a, b, o) = cs.allocate(|| {
let q: u64 = v.assignment.ok_or(R1CSError::MissingAssignment)?;
let bit: u64 = (q >> i) & 1;
Ok(((1 - bit).into(), bit.into(), Scalar::zero()))
})?;
// Enforce a * b = 0, so one of (a,b) is zero
cs.constrain(o.into());
// Enforce that a = 1 - b, so they both are 1 or 0.
cs.constrain(a + (b - 1u64));
constraint.push((b, exp_2));
exp_2 = exp_2 + exp_2;
}
// Enforce that v = Sum(b_i * 2^i, i = 0..n-1)
cs.constrain(constraint.iter().collect());
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use bulletproofs::r1cs::{ProverCS, VerifierCS};
use bulletproofs::{BulletproofGens, PedersenGens};
use merlin::Transcript;
#[test]
fn range_proof_gadget() {
use rand::rngs::OsRng;
use rand::Rng;
let mut rng = OsRng::new().unwrap();
let m = 3; // number of values to test per `n`
for n in [2, 10, 32, 63].iter() {
let (min, max) = (0u64, ((1u128 << n) - 1) as u64);
let values: Vec<u64> = (0..m).map(|_| rng.gen_range(min, max)).collect();
for v in values {
assert!(range_proof_helper(v, *n).is_ok());
}
assert!(range_proof_helper(max + 1, *n).is_err());
}
}
fn range_proof_helper(v_val: u64, n: usize) -> Result<(), SpacesuitError> {
// Common
let pc_gens = PedersenGens::default();
let bp_gens = BulletproofGens::new(128, 1);
// Prover's scope
let (proof, commitments) = {
// Prover makes a `ConstraintSystem` instance representing a merge gadget
// v and v_blinding emptpy because we are only testing low-level variable constraints
let v: Vec<Scalar> = vec![v_val.into()];
let v_blinding: Vec<Scalar> = vec![Scalar::random(&mut rand::thread_rng())];
let mut prover_transcript = Transcript::new(b"RangeProofTest");
let (mut prover_cs, variables, commitments) = ProverCS::new(
&bp_gens,
&pc_gens,
&mut prover_transcript,
v,
v_blinding.clone(),
);
fill_cs(
&mut prover_cs,
AllocatedQuantity {
variable: variables[0],
assignment: Some(v_val),
},
n,
)?;
let proof = prover_cs.prove()?;
(proof, commitments)
};
// Verifier makes a `ConstraintSystem` instance representing a merge gadget
let mut verifier_transcript = Transcript::new(b"RangeProofTest");
let (mut verifier_cs, variables) =
VerifierCS::new(&bp_gens, &pc_gens, &mut verifier_transcript, commitments);
let result = fill_cs(
&mut verifier_cs,
AllocatedQuantity {
variable: variables[0],
assignment: None,
},
n,
);
assert!(result.is_ok());
Ok(verifier_cs.verify(&proof)?)
}
}
|
use crate::resource::Resource;
use log::{error, info};
use std::io::{self, Error, ErrorKind, Read, Write};
use url::Url;
pub struct Log {}
impl Resource for Log {
fn new(_: Url) -> Result<Log, crate::error::Error> {
Ok(Log {})
}
fn close(&mut self) {}
}
impl Read for Log {
fn read(&mut self, _buf: &mut [u8]) -> io::Result<usize> {
Err(io::Error::new(io::ErrorKind::Other, "not implemented"))
}
}
impl Write for Log {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
match std::str::from_utf8(buf) {
Ok(msg) => {
info!("log://: {}", msg);
Ok(buf.len())
}
Err(why) => {
error!("log write error: {}", why);
Err(Error::new(ErrorKind::Other, why))
}
}
}
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
|
use std::collections::HashMap;
use std::sync::{Arc, RwLock, RwLockReadGuard, RwLockWriteGuard};
use std::time::{Duration, Instant};
use error::BannerError;
#[derive(Clone, Debug)]
pub struct HashCache<T> {
cache: Arc<RwLock<HashMap<String, (T, Instant)>>>,
duration: Duration,
}
impl<T> From<HashMap<String, (T, Instant)>> for HashCache<T> {
fn from(map: HashMap<String, (T, Instant)>) -> HashCache<T> {
HashCache {
cache: Arc::new(RwLock::new(map)),
duration: Duration::new(0, 0),
}
}
}
pub type CacheResult<T> = Result<T, BannerError>;
impl<T> HashCache<T> {
pub fn new(duration: Duration) -> HashCache<T> {
HashCache {
cache: Arc::new(RwLock::new(HashMap::new())),
duration: duration,
}
}
pub fn reader(&self) -> CacheResult<RwLockReadGuard<HashMap<String, (T, Instant)>>> {
self.cache.read().map_err(|_| {
error!("Failed to acquire read guard for cache failed due to poisoning");
BannerError::CachePoisonedError
})
}
pub fn writer(&self) -> CacheResult<RwLockWriteGuard<HashMap<String, (T, Instant)>>> {
self.cache.write().map_err(|_| {
error!("Failed to acquire write guard for cache failed due to poisoning");
BannerError::CachePoisonedError
})
}
fn ignore_dur(&self) -> bool {
self.duration.as_secs() as f64 + self.duration.subsec_nanos() as f64 == 0.0
}
}
impl<T: Clone> HashCache<T> {
pub fn get<'a, S: Into<&'a str>>(&self, key: S) -> CacheResult<Option<T>> {
self.reader().map(|reader| {
let entry = reader.get(key.into());
match entry {
Some(&(ref val, created)) => {
if self.ignore_dur() || created.elapsed() <= self.duration {
Some(val.clone())
} else {
None
}
}
_ => None,
}
})
}
pub fn get_all(&self) -> CacheResult<HashMap<String, T>> {
let mut res: HashMap<String, T> = HashMap::new();
self.reader().map(|reader| {
for (k, &(ref f, created)) in reader.iter() {
if self.ignore_dur() || created.elapsed() <= self.duration {
res.insert(k.clone(), f.clone());
}
}
res
})
}
pub fn insert<S: Into<String>>(&self, key: S, val: &T) -> CacheResult<Option<T>> {
self.writer().map(|mut writer| {
writer
.insert(key.into(), (val.clone(), Instant::now()))
.map(|(v, _)| v)
})
}
pub fn remove<'a, S: Into<&'a str>>(&self, key: S) -> CacheResult<Option<T>> {
self.writer()
.map(|mut writer| writer.remove(key.into()).map(|(v, _)| v))
}
pub fn clear(&self) -> CacheResult<()> {
self.writer().map(|mut writer| writer.clear())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_write_then_read() {
let cache: HashCache<Vec<u8>> = HashCache::new(Duration::new(5, 0));
let val = vec![1, 2, 3];
let _ = cache.insert("3", &val);
assert_eq!(Some(val), cache.get("3").unwrap());
}
}
|
use std::io::{Read, Result as IOResult};
pub use crate::lump_data::brush_model::BrushModel;
pub use crate::lump_data::face::Face;
pub use crate::lump_data::vertex::Vertex;
mod brush_model;
mod face;
mod vertex;
#[derive(Clone, Copy, Debug)]
#[repr(u8)]
pub enum LumpType {
Entities = 0,
Textures = 1,
Planes = 2,
Nodes = 3,
Leafs = 4,
LeafFaces = 5,
LeafBrushes = 6,
Models = 7,
Brushes = 8,
BrushSides = 9,
Vertices = 10,
MeshVerts = 11,
Effects = 12,
Faces = 13,
Lightmaps = 14,
LightVols = 15,
VisData = 16
}
pub(crate) trait LumpData : Sized{
fn lump_type() -> LumpType;
fn element_size(version: i32) -> usize;
fn read(read: &mut dyn Read, version: i32) -> IOResult<Self>;
}
|
use crate::topology::config::{
component::ExampleError, GlobalOptions, SinkDescription, SourceDescription,
TransformDescription,
};
use colored::*;
use indexmap::IndexMap;
use serde::Serialize;
use std::collections::BTreeMap;
use structopt::StructOpt;
use toml::Value;
#[derive(StructOpt, Debug)]
#[structopt(rename_all = "kebab-case")]
pub struct Opts {
/// Whether to skip the generation of global fields.
#[structopt(short, long)]
fragment: bool,
/// Generate expression, e.g. 'stdin/json_parser,add_fields/console'
///
/// Three comma-separated lists of sources, transforms and sinks, divided by
/// forward slashes. If subsequent component types are not needed then
/// their dividers can be omitted from the expression.
///
/// For example:
///
/// `/json_parser` prints a `json_parser` transform.
///
/// `//file,http` prints a `file` and `http` sink.
///
/// `stdin//http` prints a `stdin` source and an `http` sink.
///
/// Generated components are given incremental names (`source1`, `source2`,
/// etc) which should be replaced in order to provide better context. You
/// can optionally specify the names of components by prefixing them with
/// `<name>:`, e.g.:
///
/// `foo:stdin/bar:regex_parser/baz:http` prints a `stdin` source called
/// `foo`, a `regex_parser` transform called `bar`, and an `http` sink
/// called `baz`.
///
/// Vector makes a best attempt at constructing a sensible topology. The
/// first transform generated will consume from all sources and subsequent
/// transforms will consume from their predecessor. All sinks will consume
/// from the last transform or, if none are specified, from all sources. It
/// is then up to you to restructure the `inputs` of each component to build
/// the topology you need.
expression: String,
}
#[derive(Serialize)]
pub struct SinkOuter {
pub healthcheck: bool,
pub inputs: Vec<String>,
#[serde(flatten)]
pub inner: Value,
pub buffer: crate::buffers::BufferConfig,
}
#[derive(Serialize)]
pub struct TransformOuter {
pub inputs: Vec<String>,
#[serde(flatten)]
pub inner: Value,
}
#[derive(Serialize, Default)]
pub struct Config {
pub sources: Option<IndexMap<String, Value>>,
pub transforms: Option<IndexMap<String, TransformOuter>>,
pub sinks: Option<IndexMap<String, SinkOuter>>,
}
fn generate_example(include_globals: bool, expression: &str) -> Result<String, Vec<String>> {
let components: Vec<Vec<_>> = expression
.split(|c| c == '|' || c == '/')
.map(|s| {
s.split(',')
.map(|s| s.trim().to_string())
.filter(|s| !s.is_empty())
.collect()
})
.collect();
let globals = {
let mut globals = GlobalOptions::default();
globals.data_dir = crate::topology::config::default_data_dir();
globals
};
let mut config = Config::default();
let mut errs = Vec::new();
let mut source_names = Vec::new();
if let Some(source_types) = components.get(0) {
let mut sources = IndexMap::new();
for (i, source_expr) in source_types.iter().enumerate() {
let (name, source_type) = if let Some(c_index) = source_expr.find(':') {
if c_index == 0 {
errs.push(format!(
"failed to generate source '{}': empty name is not allowed",
source_expr
));
continue;
}
let mut chopped_expr = source_expr.clone();
(
chopped_expr.drain(..c_index).collect(),
chopped_expr.drain(1..).collect(),
)
} else {
(format!("source{}", i), source_expr.clone())
};
source_names.push(name.clone());
let mut example = match SourceDescription::example(&source_type) {
Ok(example) => example,
Err(err) => {
if err != ExampleError::MissingExample {
errs.push(format!(
"failed to generate source '{}': {}",
source_type, err
));
}
Value::Table(BTreeMap::new())
}
};
example
.as_table_mut()
.expect("examples are always tables")
.insert("type".into(), source_type.to_owned().into());
sources.insert(name, example);
}
if !sources.is_empty() {
config.sources = Some(sources);
}
}
let mut transform_names = Vec::new();
if let Some(transform_types) = components.get(1) {
let mut transforms = IndexMap::new();
for (i, transform_expr) in transform_types.iter().enumerate() {
let (name, transform_type) = if let Some(c_index) = transform_expr.find(':') {
if c_index == 0 {
errs.push(format!(
"failed to generate transform '{}': empty name is not allowed",
transform_expr
));
continue;
}
let mut chopped_expr = transform_expr.clone();
(
chopped_expr.drain(..c_index).collect(),
chopped_expr.drain(1..).collect(),
)
} else {
(format!("transform{}", i), transform_expr.clone())
};
transform_names.push(name.clone());
let targets = if i == 0 {
source_names.clone()
} else {
vec![transform_names
.get(i - 1)
.unwrap_or(&"TODO".to_owned())
.to_owned()]
};
let mut example = match TransformDescription::example(&transform_type) {
Ok(example) => example,
Err(err) => {
if err != ExampleError::MissingExample {
errs.push(format!(
"failed to generate transform '{}': {}",
transform_type, err
));
}
Value::Table(BTreeMap::new())
}
};
example
.as_table_mut()
.expect("examples are always tables")
.insert("type".into(), transform_type.to_owned().into());
transforms.insert(
name,
TransformOuter {
inputs: targets,
inner: example,
},
);
}
if !transforms.is_empty() {
config.transforms = Some(transforms);
}
}
if let Some(sink_types) = components.get(2) {
let mut sinks = IndexMap::new();
for (i, sink_expr) in sink_types.iter().enumerate() {
let (name, sink_type) = if let Some(c_index) = sink_expr.find(':') {
if c_index == 0 {
errs.push(format!(
"failed to generate sink '{}': empty name is not allowed",
sink_expr
));
continue;
}
let mut chopped_expr = sink_expr.clone();
(
chopped_expr.drain(..c_index).collect(),
chopped_expr.drain(1..).collect(),
)
} else {
(format!("sink{}", i), sink_expr.clone())
};
let mut example = match SinkDescription::example(&sink_type) {
Ok(example) => example,
Err(err) => {
if err != ExampleError::MissingExample {
errs.push(format!("failed to generate sink '{}': {}", sink_type, err));
}
Value::Table(BTreeMap::new())
}
};
example
.as_table_mut()
.expect("examples are always tables")
.insert("type".into(), sink_type.to_owned().into());
sinks.insert(
name,
SinkOuter {
inputs: transform_names
.last()
.map(|s| vec![s.to_owned()])
.or_else(|| {
if !source_names.is_empty() {
Some(source_names.clone())
} else {
None
}
})
.unwrap_or_else(|| vec!["TODO".to_owned()]),
buffer: crate::buffers::BufferConfig::default(),
healthcheck: true,
inner: example,
},
);
}
if !sinks.is_empty() {
config.sinks = Some(sinks);
}
}
if !errs.is_empty() {
return Err(errs);
}
let mut builder = if include_globals {
match toml::to_string(&globals) {
Ok(s) => s,
Err(err) => {
errs.push(format!("failed to marshal globals: {}", err));
return Err(errs);
}
}
} else {
String::new()
};
if let Some(sources) = config.sources {
match toml::to_string(&{
let mut sub = Config::default();
sub.sources = Some(sources);
sub
}) {
Ok(v) => builder = [builder, v].join("\n"),
Err(e) => errs.push(format!("failed to marshal sources: {}", e)),
}
}
if let Some(transforms) = config.transforms {
match toml::to_string(&{
let mut sub = Config::default();
sub.transforms = Some(transforms);
sub
}) {
Ok(v) => builder = [builder, v].join("\n"),
Err(e) => errs.push(format!("failed to marshal transforms: {}", e)),
}
}
if let Some(sinks) = config.sinks {
match toml::to_string(&{
let mut sub = Config::default();
sub.sinks = Some(sinks);
sub
}) {
Ok(v) => builder = [builder, v].join("\n"),
Err(e) => errs.push(format!("failed to marshal sinks: {}", e)),
}
}
if !errs.is_empty() {
Err(errs)
} else {
Ok(builder)
}
}
pub fn cmd(opts: &Opts) -> exitcode::ExitCode {
match generate_example(!opts.fragment, &opts.expression) {
Ok(s) => {
println!("{}", s);
exitcode::OK
}
Err(errs) => {
errs.iter().for_each(|e| eprintln!("{}", e.red()));
exitcode::SOFTWARE
}
}
}
#[cfg(all(test, feature = "transforms-json_parser", feature = "sinks-console"))]
mod tests {
use super::*;
#[test]
fn generate_basic() {
assert_eq!(
generate_example(true, "stdin/json_parser/console"),
Ok(r#"data_dir = "/var/lib/vector/"
[sources.source0]
max_length = 102400
type = "stdin"
[transforms.transform0]
inputs = ["source0"]
drop_field = true
drop_invalid = false
type = "json_parser"
[sinks.sink0]
healthcheck = true
inputs = ["transform0"]
type = "console"
[sinks.sink0.buffer]
type = "memory"
max_events = 500
when_full = "block"
"#
.to_string())
);
assert_eq!(
generate_example(true, "stdin|json_parser|console"),
Ok(r#"data_dir = "/var/lib/vector/"
[sources.source0]
max_length = 102400
type = "stdin"
[transforms.transform0]
inputs = ["source0"]
drop_field = true
drop_invalid = false
type = "json_parser"
[sinks.sink0]
healthcheck = true
inputs = ["transform0"]
type = "console"
[sinks.sink0.buffer]
type = "memory"
max_events = 500
when_full = "block"
"#
.to_string())
);
assert_eq!(
generate_example(true, "stdin//console"),
Ok(r#"data_dir = "/var/lib/vector/"
[sources.source0]
max_length = 102400
type = "stdin"
[sinks.sink0]
healthcheck = true
inputs = ["source0"]
type = "console"
[sinks.sink0.buffer]
type = "memory"
max_events = 500
when_full = "block"
"#
.to_string())
);
assert_eq!(
generate_example(true, "//console"),
Ok(r#"data_dir = "/var/lib/vector/"
[sinks.sink0]
healthcheck = true
inputs = ["TODO"]
type = "console"
[sinks.sink0.buffer]
type = "memory"
max_events = 500
when_full = "block"
"#
.to_string())
);
assert_eq!(
generate_example(true, "/add_fields,json_parser,remove_fields"),
Ok(r#"data_dir = "/var/lib/vector/"
[transforms.transform0]
inputs = []
type = "add_fields"
[transforms.transform1]
inputs = ["transform0"]
drop_field = true
drop_invalid = false
type = "json_parser"
[transforms.transform2]
inputs = ["transform1"]
type = "remove_fields"
"#
.to_string())
);
assert_eq!(
generate_example(false, "/add_fields,json_parser,remove_fields"),
Ok(r#"
[transforms.transform0]
inputs = []
type = "add_fields"
[transforms.transform1]
inputs = ["transform0"]
drop_field = true
drop_invalid = false
type = "json_parser"
[transforms.transform2]
inputs = ["transform1"]
type = "remove_fields"
"#
.to_string())
);
}
}
|
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::{find_macro_calls, is_expn_of, return_ty};
use rustc_hir as hir;
use rustc_hir::intravisit::FnKind;
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::{sym, Span};
declare_clippy_lint! {
/// ### What it does
/// Checks for usage of `panic!`, `unimplemented!`, `todo!`, `unreachable!` or assertions in a function of type result.
///
/// ### Why is this bad?
/// For some codebases, it is desirable for functions of type result to return an error instead of crashing. Hence panicking macros should be avoided.
///
/// ### Known problems
/// Functions called from a function returning a `Result` may invoke a panicking macro. This is not checked.
///
/// ### Example
/// ```rust
/// fn result_with_panic() -> Result<bool, String>
/// {
/// panic!("error");
/// }
/// ```
/// Use instead:
/// ```rust
/// fn result_without_panic() -> Result<bool, String> {
/// Err(String::from("error"))
/// }
/// ```
#[clippy::version = "1.48.0"]
pub PANIC_IN_RESULT_FN,
restriction,
"functions of type `Result<..>` that contain `panic!()`, `todo!()`, `unreachable()`, `unimplemented()` or assertion"
}
declare_lint_pass!(PanicInResultFn => [PANIC_IN_RESULT_FN]);
impl<'tcx> LateLintPass<'tcx> for PanicInResultFn {
fn check_fn(
&mut self,
cx: &LateContext<'tcx>,
fn_kind: FnKind<'tcx>,
_: &'tcx hir::FnDecl<'tcx>,
body: &'tcx hir::Body<'tcx>,
span: Span,
hir_id: hir::HirId,
) {
if !matches!(fn_kind, FnKind::Closure) && is_type_diagnostic_item(cx, return_ty(cx, hir_id), sym::Result) {
lint_impl_body(cx, span, body);
}
}
}
fn lint_impl_body<'tcx>(cx: &LateContext<'tcx>, impl_span: Span, body: &'tcx hir::Body<'tcx>) {
let mut panics = find_macro_calls(
&[
"unimplemented",
"unreachable",
"panic",
"todo",
"assert",
"assert_eq",
"assert_ne",
],
body,
);
panics.retain(|span| is_expn_of(*span, "debug_assert").is_none());
if !panics.is_empty() {
span_lint_and_then(
cx,
PANIC_IN_RESULT_FN,
impl_span,
"used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result`",
move |diag| {
diag.help(
"`unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing",
);
diag.span_note(panics, "return Err() instead of panicking");
},
);
}
}
|
use aoc2020_rust::read_input;
fn main() {
let input_str = read_input();
part1(&input_str);
part2(&input_str);
}
fn part1(input: &String) {
let nums: Vec<i32> = input
.lines()
.map(|line| line.parse().expect("Input lines should all be integers."))
.collect();
for (i, x) in nums.iter().enumerate() {
for y in &nums[i + 1..] {
if x + y == 2020 {
println!("{}", x * y);
return;
}
}
}
println!("Failed to find a solution.");
}
fn part2(input: &String) {
let nums: Vec<i32> = input
.lines()
.map(|line| line.parse().expect("Input lines should all be integers."))
.collect();
for (i, x) in nums.iter().enumerate() {
for (j, y) in nums[i + 1..].iter().enumerate() {
for z in &nums[j + 1..] {
if x + y + z == 2020 {
println!("{}", x * y * z);
return;
}
}
}
}
println!("Failed to find a solution.");
}
|
// Authors: Matthew Bartlett & Arron Harman
// Major: (Software Development & Math) & (Software Development)
// Creation Date: October 27, 2020
// Due Date: November 24, 2020
// Course: CSC328
// Professor Name: Dr. Frye
// Assignment: Chat Server
// Filename: main.rs
// Purpose: Include libcs.a for libc
fn main(){
println!("cargo:rustc-flags=-L . -l libcs")
}
|
use input_i_scanner::InputIScanner;
fn main() {
let stdin = std::io::stdin();
let mut _i_i = InputIScanner::from(stdin.lock());
macro_rules! scan {
(($($t: ty),+)) => {
($(scan!($t)),+)
};
($t: ty) => {
_i_i.scan::<$t>() as $t
};
(($($t: ty),+); $n: expr) => {
std::iter::repeat_with(|| scan!(($($t),+))).take($n).collect::<Vec<_>>()
};
($t: ty; $n: expr) => {
std::iter::repeat_with(|| scan!($t)).take($n).collect::<Vec<_>>()
};
}
let (n, k) = scan!((usize, u64));
let mut a = scan!(u64; n);
a.sort();
let f = |proj_num: u64| -> bool {
let h = a.iter().filter(|&&a| a >= proj_num).count() as u64;
if h >= k {
return true;
}
let sum = a.iter().filter(|&&a| a < proj_num).sum::<u64>();
sum >= proj_num * (k - h)
};
let mut ok = 0;
let mut ng = 1_000_000_000_000_000_000;
while ng - ok > 1 {
let mid = (ok + ng) / 2;
if f(mid) {
ok = mid;
} else {
ng = mid;
}
}
println!("{}", ok);
}
|
#[macro_export]
macro_rules! r#kern_try {
($expr:expr) => {
match $expr {
mach::kern_return::KERN_SUCCESS => (),
err_code => return ::std::result::Result::Err(::std::io::Error::from_raw_os_error(err_code).into()),
}
};
($expr:expr,) => {
r#kern_try!($expr)
};
}
|
/*
* Copyright © 2019-today Peter M. Stahl pemistahl@gmail.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either expressed or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use crate::cluster::GraphemeCluster;
use crate::config::RegExpConfig;
use crate::grapheme::Grapheme;
use itertools::Itertools;
use petgraph::graph::NodeIndex;
use petgraph::stable_graph::{Edges, StableGraph};
use petgraph::visit::Dfs;
use petgraph::{Directed, Direction};
use std::cmp::{max, min};
use std::collections::{BTreeSet, HashMap, HashSet};
type State = NodeIndex<u32>;
type StateLabel = String;
type EdgeLabel = Grapheme;
pub struct Dfa<'a> {
alphabet: BTreeSet<Grapheme>,
graph: StableGraph<StateLabel, EdgeLabel>,
initial_state: State,
final_state_indices: HashSet<usize>,
config: &'a RegExpConfig,
}
impl<'a> Dfa<'a> {
pub(crate) fn from(
grapheme_clusters: &[GraphemeCluster],
is_minimized: bool,
config: &'a RegExpConfig,
) -> Self {
let mut dfa = Self::new(config);
for cluster in grapheme_clusters {
dfa.insert(cluster);
}
if is_minimized {
dfa.minimize();
}
dfa
}
pub(crate) fn state_count(&self) -> usize {
self.graph.node_count()
}
pub(crate) fn states_in_depth_first_order(&self) -> Vec<State> {
let mut depth_first_search = Dfs::new(&self.graph, self.initial_state);
let mut states = vec![];
while let Some(state) = depth_first_search.next(&self.graph) {
states.push(state);
}
states
}
pub(crate) fn outgoing_edges(&self, state: State) -> Edges<Grapheme, Directed> {
self.graph.edges_directed(state, Direction::Outgoing)
}
pub(crate) fn is_final_state(&self, state: State) -> bool {
self.final_state_indices.contains(&state.index())
}
fn new(config: &'a RegExpConfig) -> Self {
let mut graph = StableGraph::new();
let initial_state = graph.add_node("".to_string());
Self {
alphabet: BTreeSet::new(),
graph,
initial_state,
final_state_indices: HashSet::new(),
config,
}
}
fn insert(&mut self, cluster: &GraphemeCluster) {
let mut current_state = self.initial_state;
for grapheme in cluster.graphemes() {
self.alphabet.insert(grapheme.clone());
current_state = self.return_next_state(current_state, grapheme);
}
self.final_state_indices.insert(current_state.index());
}
fn return_next_state(&mut self, current_state: State, edge_label: &Grapheme) -> State {
match self.find_next_state(current_state, edge_label) {
Some(next_state) => next_state,
None => self.add_new_state(current_state, edge_label),
}
}
fn find_next_state(&mut self, current_state: State, grapheme: &Grapheme) -> Option<State> {
for next_state in self.graph.neighbors(current_state) {
let edge_idx = self.graph.find_edge(current_state, next_state).unwrap();
let current_grapheme = self.graph.edge_weight(edge_idx).unwrap();
if current_grapheme.value() != grapheme.value() {
continue;
}
if current_grapheme.maximum() == grapheme.maximum() - 1 {
let min = min(current_grapheme.minimum(), grapheme.minimum());
let max = max(current_grapheme.maximum(), grapheme.maximum());
let new_grapheme = Grapheme::new(
grapheme.chars().clone(),
min,
max,
self.config.is_capturing_group_enabled,
self.config.is_output_colorized,
);
self.graph
.update_edge(current_state, next_state, new_grapheme);
return Some(next_state);
} else if current_grapheme.maximum() == grapheme.maximum() {
return Some(next_state);
}
}
None
}
fn add_new_state(&mut self, current_state: State, edge_label: &Grapheme) -> State {
let next_state = self.graph.add_node("".to_string());
self.graph
.add_edge(current_state, next_state, edge_label.clone());
next_state
}
#[allow(clippy::many_single_char_names)]
fn minimize(&mut self) {
let mut p = self.get_initial_partition();
let mut w = p.iter().cloned().collect_vec();
while !w.is_empty() {
let a = w.drain(0..1).next().unwrap();
for edge_label in self.alphabet.iter() {
let x = self.get_parent_states(&a, edge_label);
let mut replacements = vec![];
let mut is_replacement_needed = true;
let mut start_idx = 0;
while is_replacement_needed {
for (idx, y) in p.iter().enumerate().skip(start_idx) {
if x.intersection(y).count() == 0 || y.difference(&x).count() == 0 {
is_replacement_needed = false;
continue;
}
let i = x.intersection(y).copied().collect::<HashSet<State>>();
let d = y.difference(&x).copied().collect::<HashSet<State>>();
is_replacement_needed = true;
start_idx = idx;
replacements.push((y.clone(), i, d));
break;
}
if is_replacement_needed {
let (_, i, d) = replacements.last().unwrap();
p.remove(start_idx);
p.insert(start_idx, i.clone());
p.insert(start_idx + 1, d.clone());
}
}
for (y, i, d) in replacements {
if w.contains(&y) {
let idx = w.iter().position(|it| it == &y).unwrap();
w.remove(idx);
w.push(i);
w.push(d);
} else if i.len() <= d.len() {
w.push(i);
} else {
w.push(d);
}
}
}
}
self.recreate_graph(p.iter().filter(|&it| !it.is_empty()).collect_vec());
}
fn get_initial_partition(&self) -> Vec<HashSet<State>> {
let (final_states, non_final_states): (HashSet<State>, HashSet<State>) = self
.graph
.node_indices()
.partition(|&state| !self.final_state_indices.contains(&state.index()));
vec![final_states, non_final_states]
}
fn get_parent_states(&self, a: &HashSet<State>, label: &Grapheme) -> HashSet<State> {
let mut x = HashSet::new();
for &state in a {
let direct_parent_states = self.graph.neighbors_directed(state, Direction::Incoming);
for parent_state in direct_parent_states {
let edge = self.graph.find_edge(parent_state, state).unwrap();
let grapheme = self.graph.edge_weight(edge).unwrap();
if grapheme.value() == label.value()
&& (grapheme.maximum() == label.maximum()
|| grapheme.minimum() == label.minimum())
{
x.insert(parent_state);
break;
}
}
}
x
}
fn recreate_graph(&mut self, p: Vec<&HashSet<State>>) {
let mut graph = StableGraph::<StateLabel, EdgeLabel>::new();
let mut final_state_indices = HashSet::new();
let mut state_mappings = HashMap::new();
let mut new_initial_state: Option<NodeIndex> = None;
for equivalence_class in p.iter() {
let new_state = graph.add_node("".to_string());
for old_state in equivalence_class.iter() {
if self.initial_state == *old_state {
new_initial_state = Some(new_state);
}
state_mappings.insert(*old_state, new_state);
}
}
for equivalence_class in p.iter() {
let old_source_state = *equivalence_class.iter().next().unwrap();
let new_source_state = state_mappings.get(&old_source_state).unwrap();
for old_target_state in self.graph.neighbors(old_source_state) {
let edge = self
.graph
.find_edge(old_source_state, old_target_state)
.unwrap();
let grapheme = self.graph.edge_weight(edge).unwrap().clone();
let new_target_state = state_mappings.get(&old_target_state).unwrap();
graph.add_edge(*new_source_state, *new_target_state, grapheme.clone());
if self.final_state_indices.contains(&old_target_state.index()) {
final_state_indices.insert(new_target_state.index());
}
}
}
self.initial_state = new_initial_state.unwrap();
self.final_state_indices = final_state_indices;
self.graph = graph;
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_state_count() {
let config = RegExpConfig::new();
let mut dfa = Dfa::new(&config);
assert_eq!(dfa.state_count(), 1);
dfa.insert(&GraphemeCluster::from("abcd", &RegExpConfig::new()));
assert_eq!(dfa.state_count(), 5);
}
#[test]
fn test_is_final_state() {
let config = RegExpConfig::new();
let dfa = Dfa::from(
&[GraphemeCluster::from("abcd", &RegExpConfig::new())],
true,
&config,
);
let intermediate_state = State::new(3);
assert_eq!(dfa.is_final_state(intermediate_state), false);
let final_state = State::new(4);
assert_eq!(dfa.is_final_state(final_state), true);
}
#[test]
fn test_outgoing_edges() {
let config = RegExpConfig::new();
let dfa = Dfa::from(
&[
GraphemeCluster::from("abcd", &RegExpConfig::new()),
GraphemeCluster::from("abxd", &RegExpConfig::new()),
],
true,
&config,
);
let state = State::new(2);
let mut edges = dfa.outgoing_edges(state);
let first_edge = edges.next();
assert!(first_edge.is_some());
assert_eq!(
first_edge.unwrap().weight(),
&Grapheme::from("c", false, false)
);
let second_edge = edges.next();
assert!(second_edge.is_some());
assert_eq!(
second_edge.unwrap().weight(),
&Grapheme::from("x", false, false)
);
let third_edge = edges.next();
assert!(third_edge.is_none());
}
#[test]
fn test_states_in_depth_first_order() {
let config = RegExpConfig::new();
let dfa = Dfa::from(
&[
GraphemeCluster::from("abcd", &RegExpConfig::new()),
GraphemeCluster::from("axyz", &RegExpConfig::new()),
],
true,
&config,
);
let states = dfa.states_in_depth_first_order();
assert_eq!(states.len(), 7);
let first_state = states.get(0).unwrap();
let mut edges = dfa.outgoing_edges(*first_state);
assert_eq!(
edges.next().unwrap().weight(),
&Grapheme::from("a", false, false)
);
assert!(edges.next().is_none());
let second_state = states.get(1).unwrap();
edges = dfa.outgoing_edges(*second_state);
assert_eq!(
edges.next().unwrap().weight(),
&Grapheme::from("b", false, false)
);
assert_eq!(
edges.next().unwrap().weight(),
&Grapheme::from("x", false, false)
);
assert!(edges.next().is_none());
let third_state = states.get(2).unwrap();
edges = dfa.outgoing_edges(*third_state);
assert_eq!(
edges.next().unwrap().weight(),
&Grapheme::from("y", false, false)
);
assert!(edges.next().is_none());
let fourth_state = states.get(3).unwrap();
edges = dfa.outgoing_edges(*fourth_state);
assert_eq!(
edges.next().unwrap().weight(),
&Grapheme::from("z", false, false)
);
assert!(edges.next().is_none());
let fifth_state = states.get(4).unwrap();
edges = dfa.outgoing_edges(*fifth_state);
assert!(edges.next().is_none());
let sixth_state = states.get(5).unwrap();
edges = dfa.outgoing_edges(*sixth_state);
assert_eq!(
edges.next().unwrap().weight(),
&Grapheme::from("c", false, false)
);
assert!(edges.next().is_none());
let seventh_state = states.get(6).unwrap();
edges = dfa.outgoing_edges(*seventh_state);
assert_eq!(
edges.next().unwrap().weight(),
&Grapheme::from("d", false, false)
);
assert!(edges.next().is_none());
}
#[test]
fn test_minimization_algorithm() {
let config = RegExpConfig::new();
let mut dfa = Dfa::new(&config);
assert_eq!(dfa.graph.node_count(), 1);
assert_eq!(dfa.graph.edge_count(), 0);
dfa.insert(&GraphemeCluster::from("abcd", &RegExpConfig::new()));
assert_eq!(dfa.graph.node_count(), 5);
assert_eq!(dfa.graph.edge_count(), 4);
dfa.insert(&GraphemeCluster::from("abxd", &RegExpConfig::new()));
assert_eq!(dfa.graph.node_count(), 7);
assert_eq!(dfa.graph.edge_count(), 6);
dfa.minimize();
assert_eq!(dfa.graph.node_count(), 5);
assert_eq!(dfa.graph.edge_count(), 5);
}
#[test]
fn test_dfa_constructor() {
let config = RegExpConfig::new();
let dfa = Dfa::from(
&[
GraphemeCluster::from("abcd", &RegExpConfig::new()),
GraphemeCluster::from("abxd", &RegExpConfig::new()),
],
true,
&config,
);
assert_eq!(dfa.graph.node_count(), 5);
assert_eq!(dfa.graph.edge_count(), 5);
}
}
|
use std::convert::AsRef;
use std::fs::File;
use std::io;
use std::io::prelude::*;
use std::mem;
use std::path::Path;
use std::slice;
#[derive(Debug, Clone)]
pub struct Bitmap {
width: usize,
height: usize,
compression: Compression,
bit_count: usize,
data: BitmapData,
}
impl Bitmap {
/// Loads and parses a bitmap file from disk.
pub fn load<P: AsRef<Path>>(path: P) -> Result<Bitmap, Error> {
// Open file and read all bytes.
let bytes = {
let mut file = File::open(path)?;
let mut bytes = Vec::new();
file.read_to_end(&mut bytes)?;
bytes
};
Bitmap::from_bytes(&*bytes)
}
/// Parses a byte array representing a bitmap file.
pub fn from_bytes(bytes: &[u8]) -> Result<Bitmap, Error> {
// Extract the headers to get information about the bitmap.
let file_header = {
let ptr = bytes.as_ptr() as *const FileHeader;
unsafe { &*ptr }
};
let info_header = {
let offset = mem::size_of::<FileHeader>() as isize;
let ptr = unsafe { bytes.as_ptr().offset(offset) } as *const InfoHeader;
unsafe { &*ptr }
};
// // Extract the color masks.
// let color_masks = {
// let offset = (mem::size_of::<FileHeader>() + mem::size_of::<InfoHeader>()) as isize;
// let ptr = unsafe { bytes.as_ptr().offset(offset) };
// unsafe { slice::from_raw_parts(ptr as *const RgbQuad, 5) }
// };
// Extract color data.
let image_data = {
let offset = file_header.data_offset as isize;
let ptr = unsafe { bytes.as_ptr().offset(offset) };
let byte_count = info_header.image_size as usize;
unsafe { slice::from_raw_parts(ptr, byte_count) }
};
// Parse the raw data into a BitmapData structure.
let data = match info_header.compression {
Compression::Rgb => {
assert!(image_data.len() % 3 == 0, "Rgb image data must have a byte count multiple of 3");
// Convert slice.
let ptr = image_data.as_ptr() as *const (u8, u8, u8);
let len = image_data.len() / 3;
let data = unsafe { slice::from_raw_parts(ptr, len) };
BitmapData::Bgr(data.into())
},
Compression::Rle8 => unimplemented!(),
Compression::Rle4 => unimplemented!(),
Compression::Bitfields => unimplemented!(),
Compression::Jpeg => unimplemented!(),
Compression::Png => unimplemented!(),
};
// Creat the bitmap from the parsed data.
Ok(Bitmap {
width: info_header.width as usize,
height: info_header.height as usize,
compression: info_header.compression,
bit_count: info_header.bit_count as usize,
data: data,
})
}
/// The width of the bitmap in pixels.
pub fn width(&self) -> usize {
self.width
}
/// The height of the bitmap in pixels.
pub fn height(&self) -> usize {
self.height
}
/// The raw bytes of the bitmap.
///
/// The format of the data is defined by the compression of the file, which can be gotten
/// using `compression()`.
pub fn data(&self) -> &BitmapData {
&self.data
}
/// The compression used by the bitmap.
///
/// This determines the format of the data yielded by `data()`.
pub fn compression(&self) -> Compression {
self.compression
}
}
#[derive(Debug)]
pub enum Error {
IoError(io::Error),
}
impl From<io::Error> for Error {
fn from(from: io::Error) -> Error {
Error::IoError(from)
}
}
/// Represents the possible data formats for a bitmap.
#[derive(Debug, Clone)]
pub enum BitmapData {
Bgr(Vec<(u8, u8, u8)>),
Bgra(Vec<(u8, u8, u8, u8)>),
}
#[repr(u32)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Compression {
/// Uncompressed format. The bits per pixel is determined by `bit_count`.
Rgb = 0,
/// Run-length encoded with 8 bits per pixel.
///
/// The compression format is a 2-byte format consisting of a count byte followed by a byte
/// containing a color index. For more information, see
/// [Bitmap Compression](https://msdn.microsoft.com/en-us/library/dd183383(v=vs.85).aspx).
Rle8 = 1,
/// Run-length encoded with 4 bits per pixel.
///
/// The compression format is a 2-byte format consisting of a count byte followed by a byte
/// containing a color index. For more information, see
/// [Bitmap Compression](https://msdn.microsoft.com/en-us/library/dd183383(v=vs.85).aspx).
Rle4 = 2,
/// Uncompressed using a color mask to define which bits specify which colors.
///
/// Specifies that the bitmap is not compressed and that the color table consists of three
/// 32 bit color masks that specify the red, green, and blue components, respectively, of each
/// pixel. This is valid when used with 16- and 32-bpp bitmaps.
Bitfields = 3,
/// Indicates that the image is a JPEG image.
Jpeg = 4,
/// Indicates that the image is a PNG image.
Png = 5,
}
#[repr(C)]
#[derive(Debug, Clone)]
pub struct RgbQuad {
pub blue: u8,
pub green: u8,
pub red: u8,
pub reserved: u8,
}
// TODO: Don't use #[repr(packed)] to load from the buffer, read members in a portable way.
#[repr(C, packed)]
#[derive(Debug)]
struct FileHeader {
/// The file type, must be BM (whatever that means).
pub file_type: u16,
/// The file size in bytes.
pub file_size: u32,
/// Reservd; must be zero.
pub reserved_1: u16,
/// Reserved; must be zero.
pub reserved_2: u16,
/// The offset in bytes from the beginning of the FileHeader (and consequently from the
/// beginning of the file) to the bitmap bits.
pub data_offset: u32,
}
#[repr(C, packed)]
#[derive(Debug)]
struct InfoHeader {
/// The number of bytes required by the structure (???).
pub size: u32,
/// The width of the bitmap in pixels.
///
/// If compression is used `width` specifies the width of the decompressed image file.
pub width: i32,
/// The height in pixels of the bitmap. If `height` is positive the bitmap is bottom-up and
/// its origin is the lower-left corner. If `height` is negative the bitmap is top-down and
/// the origin is the upper-left corner.
///
/// Top-down images cannot be compressed so `compression` must be `Rgb` or `BitFields`.
///
/// If the image is compressed then `height` specifies the height of the decompressed image.
pub height: i32,
/// The number of planes for the target device. This value must be set to 1.
pub planes: u16,
/// The number of bits-per-pixel.
pub bit_count: u16,
/// Specifies how the data is stored, e.g. whether it's uncompressed RGBA quads, RLE encoded,
/// or one of the other supported formats.
pub compression: Compression,
/// The size in bytes of the image. May be set to zero for RBG bitmaps.
///
/// If the image is compressed `image_size` represents the size of the compressed buffer.
pub image_size: u32,
/// The horizontal resolution, in pixels-per-meter, of the target device.
pub x_pixels_per_meter: i32,
/// The vertical resolution, in pixels-per-meter, of the target device.
pub y_pixels_per_meter: i32,
/// The number of color indexes in the color table that are actually used by the bitmap. If
/// this value is zero the bitmap uses the maximum number of colors corresponding to the
/// value of `bit_count` member for the compresion mode specified by `compression`.
///
/// If `colors_used` is nonzero and the `bit_count` is less than 16 then `colors_used`
/// specifies the actual number of colors the graphics engine or device driver accesses. If
/// `bit_count` equals 16 or greater the `colors_used` member specifies the size of the color
/// table used to optimize performance of the system color palettes. If `bit_count` equals 16
/// or 32 the optimal color palette starts immediately following the three masks.
pub colors_used: u16,
/// The number of color indexes that are required for displaying the bitmap. If this value is
/// zero all colors are required.
pub colors_important: u16,
}
|
use std::io::{Read, Write};
use std::net::TcpStream;
use serde::Serialize;
pub static OK_RESP: &[u8] = &[0x62, 0x6F, 0x6B];
pub fn new_log_add_req(name: &str) -> Vec<u8> {
#[derive(Serialize)]
struct Body {
log_name: String,
}
let mut body = vec![0x00, 0x01];
let req = serde_cbor::to_vec(&Body {
log_name: name.into(),
})
.unwrap();
body.extend(req);
let mut size = (body.len() as u32).to_be_bytes().to_vec();
size.extend(body);
size
}
pub fn new_log_show_req(name: &str) -> Vec<u8> {
#[derive(Serialize)]
struct Body {
log_name: String,
}
let mut body = vec![0x00, 0x00];
let req = serde_cbor::to_vec(&Body {
log_name: name.into(),
})
.unwrap();
body.extend(req);
let mut size = (body.len() as u32).to_be_bytes().to_vec();
size.extend(body);
size
}
pub fn new_log_del_req(name: &str) -> Vec<u8> {
#[derive(Serialize)]
struct Body {
log_name: String,
}
let mut body = vec![0x00, 0x02];
let req = serde_cbor::to_vec(&Body {
log_name: name.into(),
})
.unwrap();
body.extend(req);
let mut size = (body.len() as u32).to_be_bytes().to_vec();
size.extend(body);
size
}
pub fn new_iterator_add_req(name: &str, iterator_name: &str, typ: &str) -> Vec<u8> {
#[derive(Serialize)]
struct Body {
log_name: String,
iterator_name: String,
iterator_kind: String,
iterator_func: String,
}
let mut body = vec![0x00, 0x05];
let req = serde_cbor::to_vec(&Body {
log_name: name.into(),
iterator_name: iterator_name.into(),
iterator_kind: typ.into(),
iterator_func: "return msg".into(),
})
.unwrap();
body.extend(req);
let mut size = (body.len() as u32).to_be_bytes().to_vec();
size.extend(body);
size
}
pub fn new_msg_add_req(name: &str, message: Vec<u8>) -> Vec<u8> {
#[derive(Serialize)]
struct Body {
log_name: String,
message: serde_cbor::Value,
}
let mut body = vec![0x00, 0x04];
let req = serde_cbor::to_vec(&Body {
log_name: name.into(),
message: serde_cbor::Value::Bytes(message),
})
.unwrap();
body.extend(req);
let mut size = (body.len() as u32).to_be_bytes().to_vec();
size.extend(body);
size
}
pub fn new_log_list_req() -> Vec<u8> {
let body = vec![0x00, 0x03];
let mut size = (body.len() as u32).to_be_bytes().to_vec();
size.extend(body);
size
}
pub fn new_iterator_list_req() -> Vec<u8> {
let body = vec![0x00, 0x06];
let mut size = (body.len() as u32).to_be_bytes().to_vec();
size.extend(body);
size
}
pub fn new_iterator_next_req(name: &str, message_id: usize, count: usize) -> Vec<u8> {
#[derive(Serialize)]
struct Body {
iterator_name: String,
message_id: usize,
count: usize,
}
let mut body = vec![0x00, 0x07];
let req = serde_cbor::to_vec(&Body {
iterator_name: name.into(),
message_id,
count,
})
.unwrap();
body.extend(req);
let mut size = (body.len() as u32).to_be_bytes().to_vec();
size.extend(body);
size
}
pub fn send_req(bytes: Vec<u8>) -> (u8, u8, Vec<u8>) {
let mut stream = connect_to_remits();
stream.write_all(&bytes).expect("could not send command");
let mut buffer = [0; 4];
stream.read_exact(&mut buffer).unwrap();
let size = u32::from_be_bytes(buffer);
let mut output_buffer = vec![0 as u8; (size) as usize].as_slice().to_owned();
stream.read_exact(&mut output_buffer).expect("peek failed");
(
output_buffer[0],
output_buffer[1],
output_buffer[2..].to_vec(),
)
}
pub fn connect_to_remits() -> TcpStream {
if let Ok(stream) = TcpStream::connect("localhost:4242") {
stream
} else {
println!("Couldn't connect to server...");
panic!()
}
}
|
use crate::Asset;
/// How much someone is willing to exchange for an asset.
#[derive(
Clone,
Debug,
Default,
Eq,
Ord,
PartialEq,
PartialOrd,
parity_scale_codec::Decode,
parity_scale_codec::Encode,
)]
pub struct OfferRate<B> {
asset: Asset,
rate: B,
}
impl<B> OfferRate<B> {
/// Creates a new instance from a given `asset` and `rate`.
#[inline]
pub fn new(asset: Asset, rate: B) -> Self {
Self { asset, rate }
}
/// Asset
#[inline]
pub const fn asset(&self) -> Asset {
self.asset
}
/// Rate
#[inline]
pub const fn rate(&self) -> &B {
&self.rate
}
}
|
/* origin: FreeBSD /usr/src/lib/msun/src/e_acosf.c */
/*
* Conversion to float by Ian Lance Taylor, Cygnus Support, ian@cygnus.com.
*/
/*
* ====================================================
* Copyright (C) 1993 by Sun Microsystems, Inc. All rights reserved.
*
* Developed at SunPro, a Sun Microsystems, Inc. business.
* Permission to use, copy, modify, and distribute this
* software is freely granted, provided that this notice
* is preserved.
* ====================================================
*/
use super::sqrtf::sqrtf;
const PIO2_HI: f32 = 1.5707962513e+00; /* 0x3fc90fda */
const PIO2_LO: f32 = 7.5497894159e-08; /* 0x33a22168 */
const P_S0: f32 = 1.6666586697e-01;
const P_S1: f32 = -4.2743422091e-02;
const P_S2: f32 = -8.6563630030e-03;
const Q_S1: f32 = -7.0662963390e-01;
fn r(z: f32) -> f32 {
let p = z * (P_S0 + z * (P_S1 + z * P_S2));
let q = 1. + z * Q_S1;
p / q
}
/// Arccosine (f32)
///
/// Computes the inverse cosine (arc cosine) of the input value.
/// Arguments must be in the range -1 to 1.
/// Returns values in radians, in the range of 0 to pi.
#[cfg_attr(all(test, assert_no_panic), no_panic::no_panic)]
pub fn acosf(x: f32) -> f32 {
let x1p_120 = f32::from_bits(0x03800000); // 0x1p-120 === 2 ^ (-120)
let z: f32;
let w: f32;
let s: f32;
let mut hx = x.to_bits();
let ix = hx & 0x7fffffff;
/* |x| >= 1 or nan */
if ix >= 0x3f800000 {
if ix == 0x3f800000 {
if (hx >> 31) != 0 {
return 2. * PIO2_HI + x1p_120;
}
return 0.;
}
return 0. / (x - x);
}
/* |x| < 0.5 */
if ix < 0x3f000000 {
if ix <= 0x32800000 {
/* |x| < 2**-26 */
return PIO2_HI + x1p_120;
}
return PIO2_HI - (x - (PIO2_LO - x * r(x * x)));
}
/* x < -0.5 */
if (hx >> 31) != 0 {
z = (1. + x) * 0.5;
s = sqrtf(z);
w = r(z) * s - PIO2_LO;
return 2. * (PIO2_HI - (s + w));
}
/* x > 0.5 */
z = (1. - x) * 0.5;
s = sqrtf(z);
hx = s.to_bits();
let df = f32::from_bits(hx & 0xfffff000);
let c = (z - df * df) / (s + df);
w = r(z) * s + c;
2. * (df + w)
}
|
use indexmap::{IndexMap, IndexSet};
use crate::{
dynamic::{Field, SchemaError},
registry::{MetaField, MetaType, Registry},
};
/// A GraphQL object type
///
/// # Examples
///
/// ```
/// use async_graphql::{dynamic::*, value, Value};
///
/// let query = Object::new("Query").field(Field::new("value", TypeRef::named_nn(TypeRef::STRING), |ctx| {
/// FieldFuture::new(async move { Ok(Some(Value::from("abc"))) })
/// }));
///
/// # tokio::runtime::Runtime::new().unwrap().block_on(async move {
///
/// let schema = Schema::build(query.type_name(), None, None)
/// .register(query)
/// .finish()?;
///
/// assert_eq!(
/// schema
/// .execute("{ value }")
/// .await
/// .into_result()
/// .unwrap()
/// .data,
/// value!({ "value": "abc" })
/// );
///
/// # Ok::<_, SchemaError>(())
/// # }).unwrap();
/// ```
#[derive(Debug)]
pub struct Object {
pub(crate) name: String,
pub(crate) description: Option<String>,
pub(crate) fields: IndexMap<String, Field>,
pub(crate) implements: IndexSet<String>,
keys: Vec<String>,
extends: bool,
shareable: bool,
inaccessible: bool,
tags: Vec<String>,
}
impl Object {
/// Create a GraphQL object type
#[inline]
pub fn new(name: impl Into<String>) -> Self {
Self {
name: name.into(),
description: None,
fields: Default::default(),
implements: Default::default(),
keys: Vec::new(),
extends: false,
shareable: false,
inaccessible: false,
tags: Vec::new(),
}
}
impl_set_description!();
impl_set_extends!();
impl_set_shareable!();
impl_set_inaccessible!();
impl_set_tags!();
/// Add an field to the object
#[inline]
pub fn field(mut self, field: Field) -> Self {
assert!(
!self.fields.contains_key(&field.name),
"Field `{}` already exists",
field.name
);
self.fields.insert(field.name.clone(), field);
self
}
/// Add an implement to the object
#[inline]
pub fn implement(mut self, interface: impl Into<String>) -> Self {
let interface = interface.into();
assert!(
!self.implements.contains(&interface),
"Implement `{}` already exists",
interface
);
self.implements.insert(interface);
self
}
/// Add an entity key
///
/// # Examples
///
/// ```
/// use async_graphql::{dynamic::*, Value};
///
/// let obj = Object::new("MyObj")
/// .field(Field::new("a", TypeRef::named(TypeRef::INT), |_| {
/// FieldFuture::new(async move { Ok(Some(Value::from(10))) })
/// }))
/// .field(Field::new("b", TypeRef::named(TypeRef::INT), |_| {
/// FieldFuture::new(async move { Ok(Some(Value::from(20))) })
/// }))
/// .field(Field::new("c", TypeRef::named(TypeRef::INT), |_| {
/// FieldFuture::new(async move { Ok(Some(Value::from(30))) })
/// }))
/// .key("a b")
/// .key("c");
/// ```
pub fn key(mut self, fields: impl Into<String>) -> Self {
self.keys.push(fields.into());
self
}
/// Returns the type name
#[inline]
pub fn type_name(&self) -> &str {
&self.name
}
pub(crate) fn register(&self, registry: &mut Registry) -> Result<(), SchemaError> {
let mut fields = IndexMap::new();
for field in self.fields.values() {
let mut args = IndexMap::new();
for argument in field.arguments.values() {
args.insert(argument.name.clone(), argument.to_meta_input_value());
}
fields.insert(
field.name.clone(),
MetaField {
name: field.name.clone(),
description: field.description.clone(),
args,
ty: field.ty.to_string(),
deprecation: field.deprecation.clone(),
cache_control: Default::default(),
external: field.external,
requires: field.requires.clone(),
provides: field.provides.clone(),
visible: None,
shareable: field.shareable,
inaccessible: field.inaccessible,
tags: field.tags.clone(),
override_from: field.override_from.clone(),
compute_complexity: None,
},
);
}
registry.types.insert(
self.name.clone(),
MetaType::Object {
name: self.name.clone(),
description: self.description.clone(),
fields,
cache_control: Default::default(),
extends: self.extends,
shareable: self.shareable,
keys: if !self.keys.is_empty() {
Some(self.keys.clone())
} else {
None
},
visible: None,
inaccessible: self.inaccessible,
tags: self.tags.clone(),
is_subscription: false,
rust_typename: None,
},
);
for interface in &self.implements {
registry.add_implements(&self.name, interface);
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use crate::{dynamic::*, value, Value};
#[tokio::test]
async fn borrow_context() {
struct MyObjData {
value: i32,
}
let my_obj =
Object::new("MyObj").field(Field::new("value", TypeRef::named(TypeRef::INT), |ctx| {
FieldFuture::new(async move {
Ok(Some(Value::from(
ctx.parent_value.try_downcast_ref::<MyObjData>()?.value,
)))
})
}));
let query = Object::new("Query").field(Field::new(
"obj",
TypeRef::named_nn(my_obj.type_name()),
|ctx| {
FieldFuture::new(async move {
Ok(Some(FieldValue::borrowed_any(
ctx.data_unchecked::<MyObjData>(),
)))
})
},
));
let schema = Schema::build("Query", None, None)
.register(query)
.register(my_obj)
.data(MyObjData { value: 123 })
.finish()
.unwrap();
assert_eq!(
schema
.execute("{ obj { value } }")
.await
.into_result()
.unwrap()
.data,
value!({
"obj": {
"value": 123,
}
})
);
}
}
|
use crate::{
ast::{
self, Assignment, Ast, AstKind, AstString, Call, Identifier, Int, List, MatchCase,
OrPattern, Struct, StructAccess, Symbol, Text, TextPart,
},
builtin_functions::{self, BuiltinFunction},
cst::{self, CstDb},
cst_to_ast::CstToAst,
error::{CompilerError, CompilerErrorPayload},
hir::{self, Body, Expression, Function, HirError, IdKey, Pattern, PatternIdentifierId},
id::IdGenerator,
module::{Module, Package},
position::Offset,
string_to_rcst::ModuleError,
utils::AdjustCasingOfFirstLetter,
};
use itertools::Itertools;
use rustc_hash::FxHashMap;
use std::{collections::hash_map::Entry, mem, ops::Range, sync::Arc};
#[salsa::query_group(AstToHirStorage)]
pub trait AstToHir: CstDb + CstToAst {
#[salsa::transparent]
fn hir_to_ast_id(&self, id: hir::Id) -> Option<ast::Id>;
#[salsa::transparent]
fn hir_to_cst_id(&self, id: hir::Id) -> Option<cst::Id>;
#[salsa::transparent]
fn hir_id_to_span(&self, id: hir::Id) -> Option<Range<Offset>>;
#[salsa::transparent]
fn hir_id_to_display_span(&self, id: hir::Id) -> Option<Range<Offset>>;
#[salsa::transparent]
fn ast_to_hir_id(&self, id: ast::Id) -> Vec<hir::Id>;
#[salsa::transparent]
fn cst_to_hir_id(&self, module: Module, id: cst::Id) -> Vec<hir::Id>;
fn hir(&self, module: Module) -> HirResult;
}
pub type HirResult = Result<(Arc<Body>, Arc<FxHashMap<hir::Id, ast::Id>>), ModuleError>;
fn hir_to_ast_id(db: &dyn AstToHir, id: hir::Id) -> Option<ast::Id> {
let (_, hir_to_ast_id_mapping) = db.hir(id.module.clone()).ok()?;
hir_to_ast_id_mapping.get(&id).cloned()
}
fn hir_to_cst_id(db: &dyn AstToHir, id: hir::Id) -> Option<cst::Id> {
db.ast_to_cst_id(db.hir_to_ast_id(id)?)
}
fn hir_id_to_span(db: &dyn AstToHir, id: hir::Id) -> Option<Range<Offset>> {
db.ast_id_to_span(db.hir_to_ast_id(id)?)
}
fn hir_id_to_display_span(db: &dyn AstToHir, id: hir::Id) -> Option<Range<Offset>> {
let cst_id = db.hir_to_cst_id(id.clone())?;
Some(db.find_cst(id.module, cst_id).display_span())
}
fn ast_to_hir_id(db: &dyn AstToHir, id: ast::Id) -> Vec<hir::Id> {
if let Ok((_, hir_to_ast_id_mapping)) = db.hir(id.module.clone()) {
hir_to_ast_id_mapping
.iter()
.filter_map(|(key, value)| if value == &id { Some(key) } else { None })
.cloned()
.collect_vec()
} else {
vec![]
}
}
fn cst_to_hir_id(db: &dyn AstToHir, module: Module, id: cst::Id) -> Vec<hir::Id> {
let ids = db.cst_to_ast_id(module, id);
ids.into_iter()
.flat_map(|id| db.ast_to_hir_id(id))
.collect_vec()
}
fn hir(db: &dyn AstToHir, module: Module) -> HirResult {
db.ast(module.clone()).map(|(ast, _)| {
let (body, id_mapping) = compile_top_level(db, module, &ast);
(Arc::new(body), Arc::new(id_mapping))
})
}
fn compile_top_level(
db: &dyn AstToHir,
module: Module,
ast: &[Ast],
) -> (Body, FxHashMap<hir::Id, ast::Id>) {
let mut context = Context {
module: module.clone(),
id_mapping: FxHashMap::default(),
db,
public_identifiers: FxHashMap::default(),
body: Body::default(),
id_prefix: hir::Id::new(module.clone(), vec![]),
identifiers: im::HashMap::new(),
is_top_level: true,
use_id: None,
};
if module.package == Package::builtins() {
context.generate_sparkles();
}
context.generate_use();
context.compile(ast);
context.generate_exports_struct();
let id_mapping = context
.id_mapping
.into_iter()
.filter_map(|(key, value)| value.map(|value| (key, value)))
.collect();
(context.body, id_mapping)
}
struct Context<'a> {
module: Module,
id_mapping: FxHashMap<hir::Id, Option<ast::Id>>,
db: &'a dyn AstToHir,
public_identifiers: FxHashMap<String, hir::Id>,
body: Body,
id_prefix: hir::Id,
identifiers: im::HashMap<String, hir::Id>,
is_top_level: bool,
use_id: Option<hir::Id>,
}
impl Context<'_> {
fn start_non_top_level(&mut self) -> NonTopLevelResetState {
NonTopLevelResetState(mem::replace(&mut self.is_top_level, false))
}
fn end_non_top_level(&mut self, reset_state: NonTopLevelResetState) {
self.is_top_level = reset_state.0;
}
}
struct NonTopLevelResetState(bool);
impl Context<'_> {
#[must_use]
fn start_scope(&mut self) -> ScopeResetState {
ScopeResetState {
body: mem::take(&mut self.body),
id_prefix: self.id_prefix.clone(),
identifiers: self.identifiers.clone(),
non_top_level_reset_state: self.start_non_top_level(),
}
}
#[must_use]
fn end_scope(&mut self, reset_state: ScopeResetState) -> Body {
let inner_body = mem::replace(&mut self.body, reset_state.body);
self.id_prefix = reset_state.id_prefix;
self.identifiers = reset_state.identifiers;
self.end_non_top_level(reset_state.non_top_level_reset_state);
inner_body
}
}
struct ScopeResetState {
body: Body,
id_prefix: hir::Id,
identifiers: im::HashMap<String, hir::Id>,
non_top_level_reset_state: NonTopLevelResetState,
}
impl Context<'_> {
fn compile(&mut self, asts: &[Ast]) -> hir::Id {
if asts.is_empty() {
self.push(None, Expression::nothing(), None)
} else {
let mut last_id = None;
for ast in asts {
last_id = Some(self.compile_single(ast));
}
last_id.unwrap()
}
}
fn compile_single(&mut self, ast: &Ast) -> hir::Id {
match &ast.kind {
AstKind::Int(Int(int)) => {
self.push(Some(ast.id.clone()), Expression::Int(int.to_owned()), None)
}
AstKind::Text(text) => self.lower_text(Some(ast.id.clone()), text),
AstKind::TextPart(TextPart(string)) => self.push(
Some(ast.id.clone()),
Expression::Text(string.value.to_owned()),
None,
),
AstKind::Identifier(Identifier(name)) => {
let reference = match self.identifiers.get(&name.value) {
Some(reference) => reference.to_owned(),
None => {
return self.push_error(
Some(name.id.clone()),
self.db.ast_id_to_display_span(ast.id.clone()).unwrap(),
HirError::UnknownReference {
name: name.value.clone(),
},
);
}
};
self.push(Some(ast.id.clone()), Expression::Reference(reference), None)
}
AstKind::Symbol(Symbol(symbol)) => self.push(
Some(ast.id.clone()),
Expression::Symbol(symbol.value.to_owned()),
None,
),
AstKind::List(List(items)) => {
let hir_items = items
.iter()
.map(|item| self.compile_single(item))
.collect_vec();
self.push(Some(ast.id.clone()), Expression::List(hir_items), None)
}
AstKind::Struct(Struct { fields }) => {
let fields = fields
.iter()
.map(|(key, value)| {
let key = key
.as_ref()
.map(|key| self.compile_single(key))
.unwrap_or_else(|| match &value.kind {
AstKind::Identifier(Identifier(name)) => self.push(
Some(value.id.clone()),
Expression::Symbol(name.value.uppercase_first_letter()),
None,
),
AstKind::Error { errors, .. } => self.push(
Some(ast.id.clone()),
Expression::Error {
child: None,
// TODO: These errors are already reported for the value itself.
errors: errors.clone(),
},
None,
),
_ => panic!(
"Expected identifier in struct shorthand, got {value:?}."
),
});
(key, self.compile_single(value))
})
.collect();
self.push(Some(ast.id.clone()), Expression::Struct(fields), None)
}
AstKind::StructAccess(struct_access) => {
self.lower_struct_access(Some(ast.id.clone()), struct_access)
}
AstKind::Function(function) => self.compile_function(ast.id.clone(), function, None),
AstKind::Call(call) => self.lower_call(Some(ast.id.clone()), call),
AstKind::Assignment(Assignment { is_public, body }) => {
let (names, body) = match body {
ast::AssignmentBody::Function { name, function } => {
let name_string = name.value.to_owned();
let body =
self.compile_function(ast.id.clone(), function, Some(name_string));
let name_id = self.push(
Some(name.id.clone()),
Expression::Reference(body.clone()),
Some(name.value.to_owned()),
);
(vec![(name.value.to_owned(), name_id)], body)
}
ast::AssignmentBody::Body { pattern, body } => {
let reset_state = self.start_non_top_level();
let body = self.compile(body);
self.end_non_top_level(reset_state);
let (pattern, identifier_ids) = self.lower_pattern(pattern);
let body = self.push(
None,
Expression::Destructure {
expression: body,
pattern,
},
None,
);
let names = identifier_ids
.into_iter()
.sorted_by_key(|(_, (_, identifier_id))| identifier_id.0)
.map(|(name, (ast_id, identifier_id))| {
let id = self.push(
Some(ast_id),
Expression::PatternIdentifierReference(identifier_id),
Some(name.to_owned()),
);
(name, id)
})
.collect_vec();
self.push(
Some(ast.id.clone()),
Expression::Symbol("Nothing".to_string()),
None,
);
(names, body)
}
};
if *is_public {
if self.is_top_level {
for (name, id) in names {
if self.public_identifiers.contains_key(&name) {
self.push_error(
None,
self.db.ast_id_to_display_span(ast.id.clone()).unwrap(),
HirError::PublicAssignmentWithSameName {
name: name.to_owned(),
},
);
}
self.public_identifiers.insert(name, id);
}
} else {
self.push_error(
None,
self.db.ast_id_to_display_span(ast.id.clone()).unwrap(),
HirError::PublicAssignmentInNotTopLevel,
);
}
}
body
}
AstKind::Match(ast::Match { expression, cases }) => {
let expression = self.compile_single(expression);
let reset_state = self.start_scope();
let match_id = self.create_next_id(Some(ast.id.clone()), None);
self.id_prefix = match_id.clone();
let cases = cases
.iter()
.map(|case| match &case.kind {
AstKind::MatchCase(MatchCase { box pattern, body }) => {
let (pattern, pattern_identifiers) = self.lower_pattern(pattern);
let reset_state = self.start_scope();
for (name, (ast_id, identifier_id)) in pattern_identifiers {
self.push(
Some(ast_id),
Expression::PatternIdentifierReference(identifier_id),
Some(name.to_owned()),
);
}
self.compile(body.as_ref());
let body = self.end_scope(reset_state);
(pattern, body)
}
AstKind::Error { errors, .. } => {
let pattern = Pattern::Error {
child: None,
errors: errors.to_owned(),
};
let reset_state = self.start_scope();
self.compile(&[]);
let body = self.end_scope(reset_state);
(pattern, body)
}
_ => unreachable!("Expected match case in match cases, got {case:?}."),
})
.collect_vec();
// The scope is only for hierarchical IDs. The actual bodies are
// inside the cases.
let _ = self.end_scope(reset_state);
self.push_with_existing_id(match_id, Expression::Match { expression, cases }, None)
}
AstKind::MatchCase(_) => {
unreachable!("Match cases should be handled in match directly.")
}
AstKind::OrPattern(_) => {
unreachable!("Or patterns should be handled in `PatternContext`.")
}
AstKind::Error { child, errors } => {
let child = child.as_ref().map(|child| self.compile_single(child));
self.push(
Some(ast.id.clone()),
Expression::Error {
child,
errors: errors.clone(),
},
None,
)
}
}
}
fn lower_text(&mut self, id: Option<ast::Id>, text: &Text) -> hir::Id {
let text_concatenate_function = self.push(
None,
Expression::Builtin(BuiltinFunction::TextConcatenate),
None,
);
let type_of_function = self.push(None, Expression::Builtin(BuiltinFunction::TypeOf), None);
let text_symbol = self.push(None, Expression::Symbol("Text".to_string()), None);
let equals_function = self.push(None, Expression::Builtin(BuiltinFunction::Equals), None);
let if_else_function = self.push(None, Expression::Builtin(BuiltinFunction::IfElse), None);
let to_debug_text_function = self.push(
None,
Expression::Builtin(BuiltinFunction::ToDebugText),
None,
);
let compiled_parts = text
.0
.iter()
.map(|part| {
let hir = self.compile_single(part);
if part.kind.is_text_part() {
return hir;
}
// Convert the part to text if it is not already a text.
let type_of = self.push(
None,
Expression::Call {
function: type_of_function.clone(),
arguments: vec![hir.clone()],
},
None,
);
let is_text = self.push(
None,
Expression::Call {
function: equals_function.clone(),
arguments: vec![type_of, text_symbol.clone()],
},
None,
);
let reset_state = self.start_scope();
let then_function_id = self.create_next_id(None, None);
self.id_prefix = then_function_id.clone();
self.push(None, Expression::Reference(hir.clone()), None);
let then_body = self.end_scope(reset_state);
let then_function = self.push_with_existing_id(
then_function_id,
Expression::Function(Function {
parameters: vec![],
body: then_body,
fuzzable: false,
}),
None,
);
let reset_state = self.start_scope();
let else_function_id = self.create_next_id(None, None);
self.id_prefix = else_function_id.clone();
self.push(
None,
Expression::Call {
function: to_debug_text_function.clone(),
arguments: vec![hir],
},
None,
);
let else_body = self.end_scope(reset_state);
let else_function = self.push_with_existing_id(
else_function_id,
Expression::Function(Function {
parameters: vec![],
body: else_body,
fuzzable: false,
}),
None,
);
self.push(
None,
Expression::Call {
function: if_else_function.clone(),
arguments: vec![is_text, then_function, else_function],
},
None,
)
})
.collect_vec();
compiled_parts
.into_iter()
.reduce(|left, right| {
self.push(
None,
Expression::Call {
function: text_concatenate_function.clone(),
arguments: vec![left, right],
},
None,
)
})
.unwrap_or_else(|| self.push(id, Expression::Text("".to_string()), None))
}
fn compile_function(
&mut self,
id: ast::Id,
function: &ast::Function,
identifier: Option<String>,
) -> hir::Id {
let reset_state = self.start_scope();
let function_id = self.create_next_id(Some(id), identifier);
self.id_prefix = function_id.clone();
for parameter in function.parameters.iter() {
let name = parameter.value.to_string();
let id = self.create_next_id(Some(parameter.id.clone()), Some(name.clone()));
self.body.identifiers.insert(id.clone(), name.clone());
self.identifiers.insert(name, id);
}
self.compile(&function.body);
let inner_body = self.end_scope(reset_state);
self.push_with_existing_id(
function_id.clone(),
Expression::Function(Function {
parameters: function
.parameters
.iter()
.map(|parameter| function_id.child(parameter.value.clone()))
.collect(),
body: inner_body,
fuzzable: function.fuzzable,
}),
None,
)
}
fn lower_struct_access(
&mut self,
id: Option<ast::Id>,
struct_access: &StructAccess,
) -> hir::Id {
// We forward struct accesses to `(use "Builtins").structGet` to reuse
// its validation logic. However, this only works outside the Builtins
// package.
let struct_get_id = if self.module.package == Package::builtins() {
self.push(None, Expression::Builtin(BuiltinFunction::StructGet), None)
} else {
let builtins = self.push(None, Expression::Text("Builtins".to_string()), None);
let builtins_id = self.push(
None,
Expression::Call {
function: self.use_id.clone().unwrap(),
arguments: vec![builtins],
},
None,
);
let struct_get_id =
self.push(None, Expression::Builtin(BuiltinFunction::StructGet), None);
let struct_get = self.push(None, Expression::Symbol("StructGet".to_string()), None);
self.push(
None,
Expression::Call {
function: struct_get_id,
arguments: vec![builtins_id, struct_get],
},
None,
)
};
let struct_ = self.compile_single(&struct_access.struct_);
let key_id = self.push(
Some(struct_access.key.id.clone()),
Expression::Symbol(struct_access.key.value.uppercase_first_letter()),
None,
);
self.push(
id,
Expression::Call {
function: struct_get_id,
arguments: vec![struct_, key_id],
},
None,
)
}
fn lower_call(&mut self, id: Option<ast::Id>, call: &Call) -> hir::Id {
let (mut arguments, uncompiled_arguments) = if call.is_from_pipe {
let [first_argument, remaining @ ..] = &call.arguments[..] else {
panic!("Calls that are generated from the pipe operator must have at least one argument");
};
(vec![(self.compile_single(first_argument))], remaining)
} else {
(vec![], &call.arguments[..])
};
let function = match &call.receiver.kind {
AstKind::Identifier(Identifier(AstString {
id: name_id,
value: name,
})) if name == "needs" => {
let expression = match &self.lower_call_arguments(&call.arguments[..])[..] {
[condition, reason] => Expression::Needs {
condition: condition.clone(),
reason: reason.clone(),
},
[condition] => Expression::Needs {
condition: condition.clone(),
reason: self.push(
None,
Expression::Text(
match self.db.ast_id_to_span(call.arguments[0].id.clone()) {
Some(span) => format!(
"`{}` was not satisfied",
&self
.db
.get_module_content_as_string(
call.arguments[0].id.module.clone()
)
.unwrap()
[*span.start..*span.end],
),
None => "the needs of a function were not met".to_string(),
},
),
None,
),
},
_ => {
return self.push_error(
id,
self.db.ast_id_to_span(name_id.to_owned()).unwrap(),
HirError::NeedsWithWrongNumberOfArguments {
num_args: call.arguments.len(),
},
);
}
};
return self.push(id, expression, None);
}
_ => self.compile_single(call.receiver.as_ref()),
};
arguments.extend(self.lower_call_arguments(uncompiled_arguments));
self.push(
id,
Expression::Call {
function,
arguments,
},
None,
)
}
fn lower_call_arguments(&mut self, arguments: &[Ast]) -> Vec<hir::Id> {
arguments
.iter()
.map(|argument| self.compile_single(argument))
.collect_vec()
}
fn lower_pattern(&mut self, ast: &Ast) -> (Pattern, PatternIdentifierIds) {
let mut context = PatternContext {
db: self.db,
module: self.module.clone(),
identifier_id_generator: Default::default(),
identifier_ids: Default::default(),
};
let pattern = context.compile_pattern(ast);
(pattern, context.identifier_ids)
}
fn push(
&mut self,
ast_id: Option<ast::Id>,
expression: Expression,
identifier: Option<String>,
) -> hir::Id {
let id = self.create_next_id(ast_id, identifier.clone());
self.push_with_existing_id(id, expression, identifier)
}
fn push_with_existing_id(
&mut self,
id: hir::Id,
expression: Expression,
identifier: Option<String>,
) -> hir::Id {
self.body
.push(id.to_owned(), expression, identifier.clone());
if let Some(identifier) = identifier {
self.identifiers.insert(identifier, id.clone());
}
id
}
fn push_error(
&mut self,
ast_id: Option<ast::Id>,
span: Range<Offset>,
error: HirError,
) -> hir::Id {
self.push(
ast_id,
Expression::Error {
child: None,
errors: vec![CompilerError {
module: self.module.clone(),
span,
payload: error.into(),
}],
},
None,
)
}
fn create_next_id(&mut self, ast_id: Option<ast::Id>, key: Option<String>) -> hir::Id {
for disambiguator in 0.. {
let last_part = if let Some(key) = &key {
if disambiguator == 0 {
key.to_string().into()
} else {
IdKey::Named {
name: key.to_string(),
disambiguator,
}
}
} else {
disambiguator.into()
};
let id = self.id_prefix.child(last_part);
if let Entry::Vacant(entry) = self.id_mapping.entry(id.clone()) {
entry.insert(ast_id);
return id;
}
}
unreachable!()
}
fn generate_sparkles(&mut self) {
let mut sparkles_map = FxHashMap::default();
for builtin_function in builtin_functions::VALUES.iter() {
let symbol = self.push(
None,
Expression::Symbol(format!("{builtin_function:?}")),
None,
);
let builtin = self.push(None, Expression::Builtin(*builtin_function), None);
sparkles_map.insert(symbol, builtin);
}
let sparkles_map = Expression::Struct(sparkles_map);
self.push(None, sparkles_map, Some("✨".to_string()));
}
fn generate_use(&mut self) {
// HirId(~:test.candy:use) = function { HirId(~:test.candy:use:relativePath) ->
// HirId(~:test.candy:use:importedFileContent) = useModule
// currently in ~:test.candy:use:importedFileContent
// relative path: HirId(~:test.candy:use:relativePath)
// }
assert!(self.use_id.is_none());
let reset_state = self.start_scope();
let use_id = self.create_next_id(None, Some("use".to_string()));
self.id_prefix = use_id.clone();
let relative_path = use_id.child("relativePath");
self.push(
None,
Expression::UseModule {
current_module: self.module.clone(),
relative_path: relative_path.clone(),
},
Some("importedModule".to_string()),
);
let inner_body = self.end_scope(reset_state);
self.push_with_existing_id(
use_id.clone(),
Expression::Function(Function {
parameters: vec![relative_path],
body: inner_body,
fuzzable: false,
}),
Some("use".to_string()),
);
self.use_id = Some(use_id);
}
fn generate_exports_struct(&mut self) -> hir::Id {
// HirId(~:test.candy:100) = symbol Foo
// HirId(~:test.candy:102) = struct [
// HirId(~:test.candy:100): HirId(~:test.candy:101),
// ]
let mut exports = FxHashMap::default();
for (name, id) in self.public_identifiers.clone() {
exports.insert(
self.push(
None,
Expression::Symbol(name.uppercase_first_letter()),
None,
),
id,
);
}
self.push(None, Expression::Struct(exports), None)
}
}
/// The `ast::Id` is the ID of the first occurrence of this identifier in the
/// AST.
type PatternIdentifierIds = FxHashMap<String, (ast::Id, PatternIdentifierId)>;
struct PatternContext<'a> {
db: &'a dyn AstToHir,
module: Module,
identifier_id_generator: IdGenerator<PatternIdentifierId>,
identifier_ids: PatternIdentifierIds,
}
impl<'a> PatternContext<'a> {
fn compile_pattern(&mut self, ast: &Ast) -> Pattern {
match &ast.kind {
AstKind::Int(Int(int)) => Pattern::Int(int.to_owned()),
AstKind::Text(Text(text)) => Pattern::Text(
text.iter()
.map(|part| match &part.kind {
AstKind::TextPart(TextPart(string)) => string.value.to_owned(),
_ => panic!("AST pattern can't contain text interpolations."),
})
.join(""),
),
AstKind::TextPart(_) => unreachable!("TextPart should not occur in AST patterns."),
AstKind::Identifier(Identifier(name)) => {
let (_, pattern_id) = self
.identifier_ids
.entry(name.value.to_owned())
.or_insert_with(|| {
(ast.id.to_owned(), self.identifier_id_generator.generate())
});
Pattern::NewIdentifier(pattern_id.to_owned())
}
AstKind::Symbol(Symbol(symbol)) => Pattern::Tag {
symbol: symbol.value.to_owned(),
value: None,
},
AstKind::List(List(items)) => {
let items = items
.iter()
.map(|item| self.compile_pattern(item))
.collect_vec();
Pattern::List(items)
}
AstKind::Struct(Struct { fields }) => {
let fields = fields
.iter()
.map(|(key, value)| {
let key = key
.as_ref()
.map(|key| self.compile_pattern(key))
.unwrap_or_else(|| match &value.kind {
AstKind::Identifier(Identifier(name)) => Pattern::Tag {
symbol: name.value.uppercase_first_letter(),
value: None,
},
AstKind::Error { errors, .. } => Pattern::Error {
child: None,
// TODO: These errors are already reported for the value itself.
errors: errors.to_owned(),
},
_ => panic!(
"Expected identifier in struct shorthand, got {value:?}."
),
});
(key, self.compile_pattern(value))
})
.collect();
Pattern::Struct(fields)
}
AstKind::Call(call) => {
let receiver = self.compile_pattern(&call.receiver);
let Pattern::Tag { symbol, value } = receiver else {
return self.error(ast, HirError::PatternContainsCall);
};
if value.is_some() {
return self.error(ast, HirError::PatternContainsCall);
}
if call.arguments.len() != 1 {
return self.error(ast, HirError::PatternContainsCall);
}
Pattern::Tag {
symbol,
value: Some(Box::new(self.compile_pattern(&call.arguments[0]))),
}
}
AstKind::StructAccess(_)
| AstKind::Function(_)
| AstKind::Assignment(_)
| AstKind::Match(_)
| AstKind::MatchCase(_) => {
panic!(
"AST pattern can't contain struct access, function, call, assignment, match, or match case, but found {ast:?}."
)
}
AstKind::OrPattern(OrPattern(patterns)) => {
let patterns = patterns
.iter()
.map(|pattern| self.compile_pattern(pattern))
.collect();
Pattern::Or(patterns)
}
AstKind::Error { child, errors, .. } => {
let child = child
.as_ref()
.map(|child| Box::new(self.compile_pattern(child)));
Pattern::Error {
child,
errors: errors.to_owned(),
}
}
}
}
fn error(&self, ast: &Ast, error: HirError) -> Pattern {
Pattern::Error {
child: None,
errors: vec![CompilerError {
module: self.module.clone(),
span: self.db.ast_id_to_span(ast.id.clone()).unwrap(),
payload: CompilerErrorPayload::Hir(error),
}],
}
}
}
|
//! Visual state container.
pub mod selection;
pub trait StateGroup {
const MASK: u32;
}
pub trait State {
type Group: StateGroup;
const VALUE: u32;
}
#[macro_export]
macro_rules! state_group {
($([$group:ident: $mask:literal] = {
$($state:ident = $value:literal),+ $(,)?
})+) => {
$(
pub struct $group;
impl $crate::state::StateGroup for $group {
const MASK: u32 = $mask;
}
$(
pub struct $state;
impl $crate::state::State for $state {
type Group = $group;
const VALUE: u32 = $value;
}
)+
)+
};
}
#[derive(Copy, Clone, Default)]
pub struct WidgetState(u32);
impl WidgetState {
pub fn has_state<S: State>(self, _state: S) -> bool {
self.0 & S::Group::MASK == S::VALUE
}
pub fn set_state<S: State>(&mut self, _state: S) -> bool {
let old = self.0;
self.0 = (self.0 & !S::Group::MASK) | S::VALUE;
self.0 != old
}
}
|
//! This module defines the CommonTokens type,
//! used to pass constants of type from `syn` to
//! many functions in the `abi_stable_derive_lib::stable_abi` module.
use proc_macro2::{Span, TokenStream as TokenStream2};
use std::{
cmp::{Eq, Ord, Ordering, PartialEq, PartialOrd},
marker::PhantomData,
};
use crate::Arenas;
macro_rules! declare_common_tokens {
(
with_new[ $( $field_new:ident = $token_new:ty , )* ]
token[ $( $field_token:ident = $token_token:ident , )* ]
token_streams[ $( $field_ts:ident = $ts_str:expr , )* ]
types[ $( $field_ty:ident = $ty_str:expr , )* ]
idents[ $( $field_ident:ident = $ident_str:expr , )* ]
lifetime[ $( $lifetime_ident:ident = $lifetime_str:expr , )* ]
str_lits[ $( $strlit_ident:ident = $strlit_str:expr , )* ]
) => {
#[derive(Debug)]
pub(crate) struct CommonTokens<'a>{
$( pub(crate) $field_new : $token_new , )*
$( pub(crate) $field_token : ::syn::token::$token_token , )*
$( pub(crate) $field_ts : TokenStream2 , )*
$( pub(crate) $field_ty : ::syn::Type , )*
$( pub(crate) $field_ident : ::syn::Ident , )*
$( pub(crate) $lifetime_ident : ::syn::Lifetime , )*
$( pub(crate) $strlit_ident : ::syn::LitStr , )*
_marker: PhantomData<&'a ()>,
}
impl<'a> CommonTokens<'a>{
#[allow(unused_variables)]
pub(crate) fn new(arenas:&'a Arenas)->Self{
let span=Span::call_site();
Self{
$( $field_new : < $token_new >::new(span) , )*
$( $field_token : Default::default() , )*
$( $field_ts : ::syn::parse_str($ts_str).expect("BUG") , )*
$( $field_ty : ::syn::parse_str($ty_str).expect("BUG") , )*
$( $field_ident : ::syn::Ident::new($ident_str,span) , )*
$( $lifetime_ident : ::syn::parse_str($lifetime_str).expect("BUG") , )*
$( $strlit_ident : ::syn::LitStr::new($strlit_str,span) , )*
_marker: PhantomData,
}
}
}
$(
impl<'a> AsRef<$token_new> for CommonTokens<'a>{
fn as_ref(&self)->&$token_new{
&self.$field_new
}
}
)*
}
}
impl<'a> Eq for CommonTokens<'a> {}
impl<'a> PartialEq for CommonTokens<'a> {
fn eq(&self, _other: &Self) -> bool {
true
}
}
impl<'a> PartialOrd for CommonTokens<'a> {
fn partial_cmp(&self, _other: &Self) -> Option<Ordering> {
Some(Ordering::Equal)
}
}
impl<'a> Ord for CommonTokens<'a> {
fn cmp(&self, _other: &Self) -> Ordering {
Ordering::Equal
}
}
declare_common_tokens! {
with_new[
start_len_tokens=crate::common_tokens::StartLenTokens,
fn_pointer_tokens=crate::common_tokens::FnPointerTokens,
]
token[
and_=And,
comma=Comma,
equal=Eq,
colon2=Colon2,
bracket=Bracket,
paren=Paren,
lt=Lt,
gt=Gt,
]
token_streams[
und_storage="__Storage,",
]
types[
empty_tuple="()",
]
idents[
some="Some",
none="None",
new="new",
comp_tl_fields="__CompTLFields",
//layout="LAYOUT",
static_equivalent="__GetStaticEquivalent",
cap_opaque_field="OPAQUE_FIELD",
cap_sabi_opaque_field="SABI_OPAQUE_FIELD",
]
lifetime[
static_lt="'static",
]
str_lits[
]
}
|
use super::{checkout::Checkout, subscription::Subscription, virtual_currency::VirtualCurrency};
use serde::Deserialize;
/// Информация о покупке
/// https://developers.xsolla.com/ru/api/v2/getting-started/#api_param_webhooks_payment_purchase
#[derive(Debug, Deserialize)]
pub struct PurchaseInfo {
pub virtual_currency: Option<VirtualCurrency>,
pub checkout: Option<Checkout>,
pub subscription: Option<Subscription>,
pub merchant_id: i32,
}
|
// This file was generated by gir (https://github.com/gtk-rs/gir)
// from ../gir-files
// DO NOT EDIT
use crate::Session;
use glib::object::Cast;
use glib::translate::*;
use std::fmt;
glib::wrapper! {
#[doc(alias = "SoupSessionAsync")]
pub struct SessionAsync(Object<ffi::SoupSessionAsync, ffi::SoupSessionAsyncClass>) @extends Session;
match fn {
type_ => || ffi::soup_session_async_get_type(),
}
}
impl SessionAsync {
#[doc(alias = "soup_session_async_new")]
pub fn new() -> SessionAsync {
assert_initialized_main_thread!();
unsafe {
Session::from_glib_full(ffi::soup_session_async_new()).unsafe_cast()
}
}
//#[doc(alias = "soup_session_async_new_with_options")]
//#[doc(alias = "new_with_options")]
//pub fn with_options(optname1: &str, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) -> SessionAsync {
// unsafe { TODO: call ffi:soup_session_async_new_with_options() }
//}
}
impl Default for SessionAsync {
fn default() -> Self {
Self::new()
}
}
pub const NONE_SESSION_ASYNC: Option<&SessionAsync> = None;
impl fmt::Display for SessionAsync {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("SessionAsync")
}
}
|
use super::{process, Config, SubCommand};
use alfred::ItemBuilder;
use chrono::prelude::Local;
pub fn run(x: SubCommand) {
debug!("Starting in run");
let print_config;
let mut config: Config;
match x {
SubCommand::Config {
display,
auth_token,
number_pins,
number_tags,
shared,
toread,
fuzzy,
tags_only,
auto_update,
suggest_tags,
check_bookmarked_page,
show_url_vs_tags,
} => {
print_config = display;
let mut token = String::new();
if auth_token.is_some() {
if auth_token.as_ref().unwrap().find(':').is_some() {
token = auth_token.as_ref().unwrap().clone();
} else {
crate::show_error_alfred("Invalid Auth Token format!".to_string());
process::exit(1);
}
}
config = Config::setup().unwrap_or_else(|err| {
info!("{}", err.to_string());
let mut config = Config::new();
if auth_token.is_some() {
config.auth_token = token;
} else {
crate::show_error_alfred(err.to_string());
process::exit(1);
}
config
});
debug!("{:?}", config);
config.auth_token.update(auth_token);
config.pins_to_show.update(number_pins);
config.tags_to_show.update(number_tags);
// config.private_new_pin.update(!shared);
config.private_new_pin = !shared.unwrap_or(!config.private_new_pin);
config.toread_new_pin.update(toread);
config.fuzzy_search.update(fuzzy);
config.tag_only_search.update(tags_only);
config.auto_update_cache.update(auto_update);
config.suggest_tags.update(suggest_tags);
config.page_is_bookmarked.update(check_bookmarked_page);
config.show_url_vs_tags.update(show_url_vs_tags);
}
_ => unreachable!(),
}
if let Err(e) = config.save() {
error!("Couldn't save config file: {:?}", e);
} else {
debug!(
"Saved new configs to {} in: {}",
crate::workflow_config::CONFIG_FILE_NAME,
config.data_dir().to_string_lossy()
);
}
if print_config {
show_config(&config);
}
}
fn show_config(config: &Config) {
debug!("Starting in show_config");
let items = vec![
ItemBuilder::new("Only search tags")
.subtitle(format!("{:?}", config.tag_only_search))
.arg("pset tagonly")
.icon_path("tagonly.png")
.into_item(),
ItemBuilder::new("Use fuzzy search")
.subtitle(format!("{:?}", config.fuzzy_search))
.arg("pset fuzzy")
.icon_path("fuzzy.png")
.into_item(),
ItemBuilder::new("Automatically update cache")
.subtitle(format!("{:?}", config.auto_update_cache))
.arg("pset auto")
.icon_path("auto_update_cache.png")
.into_item(),
ItemBuilder::new("Suggest popular tags for open browser tab")
.subtitle(format!("{:?}", config.suggest_tags))
.arg("pset suggest_tags")
.icon_path("suggest.png")
.into_item(),
ItemBuilder::new("Mark new bookmarks as toread")
.subtitle(format!("{:?}", config.toread_new_pin))
.arg("pset toread")
.icon_path("toread.png")
.into_item(),
ItemBuilder::new("Mark new bookmarks as private")
.subtitle(format!("{:?}", config.private_new_pin))
.arg("pset shared")
.icon_path("private.png")
.into_item(),
ItemBuilder::new("Check if page is bookmarked")
.subtitle(format!("{:?}", config.page_is_bookmarked))
.arg("pset check_bookmarked")
.icon_path("check_bookmarked_page.png")
.into_item(),
ItemBuilder::new("Show TAGs vs URLs in search results")
.subtitle(format!("{:?}", config.show_url_vs_tags))
.arg("pset url_tag")
.icon_path("url.png")
.into_item(),
ItemBuilder::new("Number of tags to show")
.subtitle(format!("{:?}", config.tags_to_show))
.arg("pset tags")
.icon_path("no_of_tags.png")
.into_item(),
ItemBuilder::new("Number of bookmarks to show")
.subtitle(format!("{:?}", config.pins_to_show))
.arg("pset bookmarks")
.icon_path("no_of_pins.png")
.into_item(),
ItemBuilder::new("Click to check for Workflow updates.")
.arg("pcheck")
.icon_path("check_update.png")
.into_item(),
ItemBuilder::new(
config
.update_time
.with_timezone(&Local)
.format("%Y-%m-%d %H:%M:%S")
.to_string(),
)
.subtitle("Latest cache update")
.arg("pupdate")
.icon_path("auto_update.png")
.into_item(),
];
let none: Option<Vec<(&str, &str)>> = None;
crate::write_to_alfred(items, config.can_use_json(), none);
}
/// Trait to update a value optionally based on `opt`
trait OptionalUpdate: Sized {
fn update(&mut self, opt: Option<Self>) {
if let Some(val) = opt {
*self = val;
}
}
}
impl<T> OptionalUpdate for T {}
|
#[test]
fn test_struct_2d_members() {
let origin_x = 0;
let origin_y = 0;
}
struct Point {
x: i32,
y: i32,
}
#[test]
fn test_immutable_struct() {
let origin = Point { x: 0, y: 0 };
println!("the origin is at ({}, {})", origin.x, origin.y);
}
#[test]
fn test_mutable_struct() {
let mut point = Point { x: 0, y: 0 };
point.x = 5;
println!("The point is at ({},{})", point.x, point.y);
}
// won't compile. Mutability isn't supported on fields
//struct Point2 {
// mut x: i32,
// y: i32,
//}
// won't compile. point.y is immutable
//#[test]
//fn test_temp_mutability() {
// let mut point = Point { x: 0, y: 0 };
// point.x = 5;
// let point = point;
// point.y = 6;
//}
struct PointRef<'a> {
x: &'a mut i32,
y: &'a mut i32,
}
#[test]
fn test_struct_mut_pointers() {
let mut point = Point { x: 0, y: 0 };
{
let r = PointRef { x: &mut point.x, y: &mut point.y };
*r.x = 5;
*r.y = 6;
}
assert_eq!(5, point.x);
assert_eq!(6, point.y);
}
struct Point3d {
x: i32,
y: i32,
z: i32,
}
#[test]
fn test_struct_copy() {
let mut point = Point3d { x: 0, y: 0, z: 0 };
point = Point3d { y: 1, .. point };
assert_eq!(0, point.x);
assert_eq!(1, point.y);
assert_eq!(0, point.z);
}
#[test]
fn test_struct_copy_2() {
let origin = Point3d { x: 0, y: 0, z: 0 };
let point = Point3d { z: 1, x: 2, .. origin };
assert_eq!(2, point.x);
assert_eq!(0, point.y);
assert_eq!(1, point.z);
}
struct ColorTuple(i32, i32, i32);
struct PointTuple(i32, i32, i32);
#[test]
fn test_struct_tuple() {
let black = ColorTuple(0, 0, 0);
let origin = PointTuple(0, 0, 0);
}
// structs are almost always better than tuple structs
// struct values can be referenced by name.
// tuple struct values are referenced by positions
struct Color {
red: i32,
blue: i32,
green: i32,
}
struct Inches(i32);
#[test]
fn test_newtype() {
let length = Inches(10);
let Inches(integer_length) = length;
println!("lehgth is {} inches", integer_length);
}
struct Electron;
#[test]
fn test_unit_like_structs() {
let x = Electron;
}
|
//!
//!
//! Work Tock
//! ==========
//!
//! A command line work tracking program.
//!
//! Use --help to get basic usage on the command line
//!
//! Basic Usage
//! ----------
//!
//! You can use this program to clockin
//!
//! work_tock -i <JobName>
//!
//! clockout
//!
//! work_tock -o
//!
//! Or print a record of recent clock entries with flags
//!
//! work_tock --job_s dothing -p --since 03/04/2020
//!
//!
//! The program works with a single text file that is easy to edit if needed. The program will never overwrite your file, only read and append, so all data in that file will otherwise remain untouched.
//!
//! To set the location of the core file, the default config for your program can be found in "$HOME/.config/work\_tock/init.toml
//!
//! ```toml
//! [config]
//! # Set path the the current working file
//! # anything within "{}" is read as an environment variable
//! file="{HOME}/<path>/<to>/<file>"
//!
//! #Optional require all job entries to be snake_case
//! snake=true
//!
//! #camel=true #if you prefer camelCase
//! ```
//!
//! A standard file looks like this :
//!
//! ```toml
//! $home_jobs[car_wash,eat]
//! 23/01/2019
//! car_wash,12:30-13:50
//! 15:00,#car_wash is implied by previous Job
//! programming,16:00,#Clockout for car_wash is implied by new Job
//! eat,17:00
//! -18:00,#Clockout
//!
//! 24/01/2019
//! 12:00,#Eating is implied as it was the last job
//! -13:00
//! programming,14:00
//! -16:00
//! ```
//!
//! * Commas and newlines begin a new item
//! * Whitespace is ignored next to either side or a break (",","\n")
//! * Jobs are Letters only
//! * Clockins are "hh:mm"
//! * Clockouts are "-hh:mm"
//! * Tags begin with an "\_" and can be cleared with "\_\_"
//! * Dates are dd/mm/yyyy, but if you set year=2019, dates can be dd/mm after that.
//! * Groups are defined by a ```$group_name[list,of,jobs,in,group]```
//!
//! Every Clockin will use the most recent Job,Date, and Tags for the clocking,
//!
//! So given the example file ```work_tock``` will produce:
//!
//! ```toml
//! {"car_wash": 02:20, "eat": 04:00, "programming": 01:00}
//!
//! Total Time = 07:20
//!
//! ```
//!
//! Printing and Filters
//! ------------------
//!
//! Using "-p" Will print all entries, but if you want to be morse spcific you can apply a filter and -p will print only entries that pass that filter.
//!
//! > NOTE: -t for today used to be -d which is now used to specify a date to work on
//!
//! To get more relevent data you can use filters such as "-t" :Today, or "--day 3/1/2019", or by job
//!
//! eg: ```work_tock -p --job car_wash``` will return
//!
//! ```toml
//! 23/01/2019
//! car_wash: 12:30-13:50 = 01:20 => 01:20
//! car_wash: 15:00-16:00 = 01:00 => 02:20
//!
//! {"car_wash": 02:20}
//!
//! Total Time = 02:20
//!
//! ```
//!
//! or ```work_tock -p --group home_jobs``` will produce:
//!
//! ```toml
//! Filtering by group home_jobs
//! 23/01/2019
//! car_wash: 12:30-13:50 = 01:20 => 01:20
//! car_wash: 15:00-16:00 = 01:00 => 02:20
//! eat: 17:00-18:00 = 01:00 => 03:20
//! 24/01/2019
//! eat: 12:00-13:00 = 01:00 => 04:20
//!
//! {"car_wash": 02:20, "eat": 02:00}
//!
//! Total Time = 04:20
//!
//! ```
//!
//!
//!
//!
//! For more information use ```work_tock --help```
//!
extern crate work_tock_lib;
use work_tock_lib::{
clockin, Clockin, ClockAction, STime, TokErr, gob
};
use gobble::Parser;
use std::collections::BTreeMap;
use std::io::Write;
use std::str::FromStr;
use chrono::naive::NaiveDate;
use chrono::offset::Local;
use chrono::{Datelike, Weekday};
use clap_conf::*;
fn append_to(fname: &str) -> Result<std::fs::File, failure::Error> {
std::fs::OpenOptions::new()
.append(true)
.open(&fname)
.map_err(|e| e.into())
}
fn main() -> Result<(), failure::Error> {
let clap = clap_app!(
work_tock=>
(version: crate_version!())
(author: "Matthew Stoodley")
(about: "Clock in and out of work")
(@arg config: -c "Config File") //allow clap_conf config loader to work
(@arg attime:-a +takes_value "perform activity at given time")
(@arg ondate:-d +takes_value "perform activity on given date")
(@arg file: -f --file +takes_value "Filename")
(@arg read: -r --read +takes_value #{1,40} "Read Only Files list")
(@arg week: --week +takes_value "Filter by Week.")
(@arg this_week: -w "Filter by this week")
//(@arg on_date: --date +takes_value "Filter by date.")
(@arg today: -t "Filter by Today")
(@arg month: --month +takes_value "Filter by Month 1--12.")
(@arg this_month: -m "Filter by this month")
(@arg print: -p "Print Filtered Results nicely")
(@arg clockin: -i --in +takes_value "Clock in to named job, (comma separate clockin time if not for now)")
(@arg quickin: -q "Clock in now to previous job")
(@arg clockout: -o --out "Clock out Now")
(@arg clockoutat: --outat +takes_value "Clock out at given time")
(@arg long_day: -l --long_day "Acknowledge working past midnight")
(@arg yesterday: -y --yesterday "go back one day equivilat to -d <the day before>")
(@arg same_day:-s --same_day "Clockout on the same day as the clockin")
(@arg since: --since +takes_value "Filter Since given date (inclusive)")
(@arg until: --until +takes_value "Filter until given date (inclusive)")
(@arg job: --job +takes_value "Filter by Job")
(@arg group: -g --group + takes_value "Filter by group")
(@arg jobstart: --job_s +takes_value "Filter by Job Starts with")
(@arg tag: --tag +takes_value "Filter by Tag")
(@arg camel: --camel "Force Camel Case on job input")
(@arg snake: --snake "Force Camel Case on job input")
(@subcommand complete => )
)
.get_matches();
let cfg = clap_conf::with_toml_env(&clap, &["{HOME}/.config/work_tock/init.toml"]);
//core options
let fname = cfg
.grab()
.arg("file")
.conf("config.file")
.rep_env()
.expect("No File given");
//Tab Complete list
if let Some(_sc) = clap.subcommand_matches("complete"){
let s = std::fs::read_to_string(&fname)?; //.map_err(|_| format!("Could not read file: {}", fname))?;
let mut mp = std::collections::BTreeSet::new();
let line_actions = gob::line_clock_actions().parse_s(&s).map_err(|e|e.strung())?;
for a in line_actions {
match a.action{
ClockAction::SetJob(d)=>{
mp.insert(d);
}
_=>{}
}
}
for k in mp {
print!(" {}",k);
}
println!("");
return Ok(()) ;
}
let s = std::fs::read_to_string(&fname)?; //.map_err(|_| format!("Could not read file: {}", fname))?;
let mut clock_data = match clockin::read_string(&s){
Ok(c)=>c,
Err(e)=> {
println!("\n\n Errs : \n");
return Err(e.into());
}
};
if let Some(rlist) =clap.values_of("read"){
for r in rlist{
let s2 = std::fs::read_to_string(&fname)?; //.map_err(|_| format!("Could not read file: {}", fname))?;
let cdata2 = match clockin::read_string(&s2) {
Ok(c)=>c,
Err(e)=>{
println!("\n\nErrs in File : {}" ,r );
return Err(e.into());
}
};
}
}
let mut curr = None;
let mut c_io = Vec::new();
//Get outs with ins so filter makes sense
//If currently clocked in leaves curr as an option to be added later
for c in clock_data.clocks {
match c {
Clockin::In(data) => {
if let Some(cin) = curr {
c_io.push((cin, data.time));
}
curr = Some(data);
}
Clockin::Out(cout) => {
match curr {
Some(data) => {
if cout < data.time {
return Err(TokErr::NegativeTime.on_line(data.line).into());
}
c_io.push((data, cout));
}
None => println!("Two Out's in a row"),
}
curr = None;
}
}
}
let today = match cfg.grab().arg("ondate").done(){
Some(s)=>clockin::read_date(&s)?,
None=>Local::now().date().naive_local(),
};
let today = match cfg.bool_flag("yesterday",Filter::Arg){
true => today- chrono::Duration::days(1),
false => today,
};
let now = match cfg.grab().arg("attime").done(){
Some(s)=>STime::from_str(&s)?,
None=>STime::now(),
};
if cfg.bool_flag("clockout", Filter::Arg) {
let c_data = curr
.take()
.ok_or(TokErr::from("Cannot clock out if not clocked in"))?;
if today > c_data.date && !cfg.bool_flag("long_day", Filter::Arg) {
return Err(TokErr::from(format!(
"Last Clockin was not today: {}. Please use -l to confirm long day",
c_data
))
.into());
}
let since = now.since(&today,c_data.time, &c_data.date);
if since < STime::new(0,0) {
return Err(TokErr::from(format!("Cannot clockout before clockin")).into());
}
let mut f = append_to(&fname)?;
let otime = since + c_data.time;
writeln!(f, " -{}", otime)?; //.map_err(|e| format!("{:?}", e))?;
println!("You are now clocking out from {} at {}", c_data, otime);
c_io.push((c_data, otime));
}
if let Some(_tm) = cfg.grab().arg("clockoutat").done() {
println!(r#""--outat <time>" has been replaced by "-o -a <time>""#)
}
let mut clockin = None;
if cfg.bool_flag("quickin", Filter::Arg) {
clockin = Some(c_io.get(c_io.len() - 1).map(|x|x.0.job.clone()).ok_or(TokErr::from("no previous job"))?);
}
if let Some(istr) = cfg.grab().arg("clockin").done() {
//camel case required
let f_camel = cfg.bool_flag("camel",Filter::Arg) || cfg.bool_flag("config.camel",Filter::Conf);
if f_camel && istr.contains("_"){
return Err(TokErr::from("You have required CamelCase job entires").into());
}
//snake case required{
let f_snake = cfg.bool_flag("snake",Filter::Arg) || cfg.bool_flag("config.snake",Filter::Conf);
if f_snake && istr.chars().find(|c|c.is_uppercase()).is_some(){
return Err(TokErr::from("You have required snake_case job entires").into());
}
clockin = Some(istr);
}
if let Some(job) = clockin{
//first check that we are not clockedin on a different date
if let Some(c_data) = curr.take() {
if c_data.date != today {
return Err(TokErr::from("You are currently clocked in from a different date, Please clockout from that before clocking in.").into());
}
let since = now.since(&today,c_data.time, &c_data.date);
if since < STime::new(0,0) {
return Err(TokErr::from("You are currently clocked in since after the given time. Cannot clockout before clocking in").into());
}
println!("You are now clocking out from {} at {} ({}hrs)", c_data, now,since);
c_io.push((c_data, now));
}
let real_today = Local::now().date().naive_local();
let date_str = if real_today != today {
today.format("on %d/%m/%Y").to_string()
} else {
"today".to_string()
};
println!(// message
"You are now clocking in {} at {} for \"{}\"",
date_str,
now,
job
);
let lastjob = c_io.get(c_io.len() - 1).map(|x| x.clone().0);//Option
let f_line = match lastjob {
Some(lj) =>{
let mut f = if lj.date != today{
today.format("%d/%m/%Y\n\t").to_string()
}else { "\t".to_string()};
if lj.job != job {
f.push_str(&format!("{},",do_quotes(&job)));
}
f.push_str(&now.to_string());
f
}
None=>{
format!("{}\n\t{},{}",today.format("%d/%m/%Y"),job,now)
}
};
let mut f = append_to(&fname)?;
writeln!(f, "{}", f_line)?
}
if let Some(c_data) = curr {
let since_last = now.since(&today,c_data.time, &c_data.date);
println!(
"You have been clocked in for {} for {} hours",
c_data, since_last,
);
let otime = since_last + c_data.time;
c_io.push((c_data, otime));
}
//filter.
if cfg.bool_flag("this_week", Filter::Arg) {
let dt = Local::today();
let wk = dt.iso_week().week();
let st = NaiveDate::from_isoywd(dt.year(), dt.iso_week().week(), Weekday::Mon);
let fin = NaiveDate::from_isoywd(dt.year(), dt.iso_week().week(), Weekday::Sun);
println!("Filtering by week {}", wk);
c_io.retain(|(ind, _)| ind.date >= st && ind.date <= fin);
}
if let Some(grp) = cfg.grab().arg("group").done(){
println!("Filtering by group {}",grp);
let group = clock_data.groups.get(&grp).ok_or(TokErr::Mess(format!("Group not defined \"{}\"",grp) ))?;
c_io.retain(|(ind,_)| group.contains(&ind.job));
}
if let Some(wks) = cfg.grab().arg("week").done() {
let dt = Local::today();
let wk = wks.parse::<u32>()?;
//.map_err(|_| "Could not parse week value")?;
let st = NaiveDate::from_isoywd(dt.year(), wk, Weekday::Mon);
let fin = NaiveDate::from_isoywd(dt.year(), wk, Weekday::Sun);
println!("Filtering by week {}", wk);
c_io.retain(|(ind, _)| ind.date >= st && ind.date <= fin);
}
//local closure for month filter
let month_s_fin = |yr, m| {
(
NaiveDate::from_ymd(yr, m, 1),
match m {
12 => NaiveDate::from_ymd(yr + 1, 1, 1),
_ => NaiveDate::from_ymd(yr, m + 1, 1),
},
)
};
if cfg.bool_flag("this_month", Filter::Arg) {
let dt = Local::today();
let (st, fin) = month_s_fin(dt.year(), dt.month());
c_io.retain(|(ind, _)| ind.date >= st && ind.date < fin);
}
if let Some(mth) = cfg.grab().arg("month").done() {
let dt = Local::today();
let (st, fin) = month_s_fin(dt.year(), mth.parse()?);
c_io.retain(|(ind, _)| ind.date >= st && ind.date < fin);
}
//TODO filter by given date
if cfg.bool_flag("today", Filter::Arg) {
let dt = Local::today().naive_local();
println!("Filtering by Today");
c_io.retain(|(ind, _)| ind.date == dt);
}
if let Some(d) = cfg.grab().arg("since").done() {
let dt = clockin::read_date(&d)?;
c_io.retain(|(ind, _)| ind.date >= dt);
}
if let Some(d) = cfg.grab().arg("until").done() {
let dt = clockin::read_date(&d)?;
c_io.retain(|(ind, _)| ind.date <= dt);
}
if let Some(jb) = cfg.grab().arg("job").done() {
c_io.retain(|(ind, _)| ind.job == jb);
}
if let Some(jbs) = cfg.grab().arg("jobstart").done() {
c_io.retain(|(ind, _)| ind.job.starts_with(&jbs));
}
if let Some(tg) = cfg.grab().arg("tag").done() {
c_io.retain(|(ind, _)| ind.tags.contains(&tg.to_string()));
}
//build report
let mut r_times: BTreeMap<String, STime> = BTreeMap::new();
let mut t_time = STime::new(0, 0);
let mut last_dat = NaiveDate::from_ymd(1, 1, 1);
for (idat, otime) in c_io {
let tt = r_times
.get(&idat.job)
.map(|x| *x)
.unwrap_or(STime::new(0, 0));
t_time += otime - idat.time;
if cfg.bool_flag("print", Filter::Arg) {
//maybe move out later
if last_dat != idat.date {
println!("{}", idat.date.format("%d/%m/%Y"));
last_dat = idat.date;
}
println!(
" {}: {}-{} = {} => {}",
idat.job,
idat.time,
otime,
otime - idat.time,
t_time
);
}
r_times.insert(idat.job, tt + otime - idat.time);
}
println!("\n{:?}\n", r_times);
println!("Total Time = {}", t_time);
Ok(())
}
fn do_quotes(s:&str)->String{
for c in " \n\t".chars(){
if s.contains(c){
return format!("\"{}\"",s);
}
}
s.to_string()
}
|
use std::collections::HashMap;
use logos::{Lexer, Logos};
use thiserror::Error;
use crate::direction::Direction;
use crate::op::Op;
#[derive(Debug, Error)]
pub enum AsmError {
#[error("line {lineno}: parse error: {msg}")]
Parse { lineno: usize, msg: String },
#[error("line {lineno}: code size overflow")]
Overflow { lineno: usize },
#[error("line {lineno}: undefined label: {label}")]
UndefinedLabel { lineno: usize, label: String },
#[error("line {lineno}: set_jump_on_damage 0 is not permitted")]
SetJumpOnDamageZero { lineno: usize },
#[error("I/O error: {0}")]
Io(#[from] std::io::Error),
}
pub type AsmResult<T> = Result<T, AsmError>;
#[derive(Debug, Logos)]
enum Token {
#[regex(r"move")]
MnemonicMove,
#[regex(r"jump")]
MnemonicJump,
#[regex(r"set_sleep_timer")]
MnemonicSetSleepTimer,
#[regex(r"loop_begin")]
MnemonicLoopBegin,
#[regex(r"loop_end")]
MnemonicLoopEnd,
#[regex(r"shoot_direction")]
MnemonicShootDirection,
#[regex(r"set_sprite")]
MnemonicSetSprite,
#[regex(r"set_homing_timer")]
MnemonicSetHomingTimer,
#[regex(r"set_inversion")]
MnemonicSetInversion,
#[regex(r"set_position")]
MnemonicSetPosition,
#[regex(r"set_jump_on_damage")]
MnemonicSetJumpOnDamage,
#[regex(r"unset_jump_on_damage")]
MnemonicUnsetJumpOnDamage,
// バイトコードは set_jump_on_damage と同一。
#[regex(r"set_health")]
MnemonicSetHealth,
#[regex(r"increment_sprite")]
MnemonicIncrementSprite,
#[regex(r"decrement_sprite")]
MnemonicDecrementSprite,
#[regex(r"set_part")]
MnemonicSetPart,
#[regex(r"randomize_x")]
MnemonicRandomizeX,
#[regex(r"randomize_y")]
MnemonicRandomizeY,
#[regex(r"bcc_x")]
MnemonicBccX,
#[regex(r"bcs_x")]
MnemonicBcsX,
#[regex(r"bcc_y")]
MnemonicBccY,
#[regex(r"bcs_y")]
MnemonicBcsY,
#[regex(r"shoot_aim")]
MnemonicShootAim,
#[regex(r"restore_music")]
MnemonicRestoreMusic,
#[regex(r"play_sound")]
MnemonicPlaySound,
#[regex(r"[A-Za-z_][[:word:]]*:", |lex| lex.slice()[0..lex.slice().len()-1].to_owned())]
LabelDefinition(String),
#[regex(r"[A-Za-z_][[:word:]]*", |lex| lex.slice().to_owned())]
LabelReference(String),
#[regex(r"0x[A-Fa-f0-9]+", |lex| u8::from_str_radix(&lex.slice()[2..], 16))]
#[regex(r"0o[0-7]+", |lex| u8::from_str_radix(&lex.slice()[2..], 8))]
#[regex(r"0b[01]+", |lex| u8::from_str_radix(&lex.slice()[2..], 2))]
#[regex(r"[0-9]+", |lex| u8::from_str_radix(lex.slice(), 10))]
Number(u8),
#[regex(r",")]
Comma,
#[error]
#[regex(r"[[:space:]]+", logos::skip)]
Error,
}
#[derive(Debug)]
struct Statement {
lineno: usize,
addr: usize,
op: Op,
label: Option<String>,
}
impl Statement {
fn new(lineno: usize, addr: usize, op: Op) -> Self {
Self {
lineno,
addr,
op,
label: None,
}
}
fn with_label(lineno: usize, addr: usize, op: Op, label: String) -> Self {
Self {
lineno,
addr,
op,
label: Some(label),
}
}
}
pub fn asm<R: std::io::Read>(rdr: R) -> AsmResult<Vec<u8>> {
use std::io::BufRead as _;
let rdr = std::io::BufReader::new(rdr);
let mut stmts = vec![];
let mut label_to_addr = HashMap::new();
let mut addr = 0;
for (i, line) in rdr.lines().enumerate() {
let lineno = i + 1;
let line = line?;
let line = trim_comment(&line);
if line.trim().is_empty() {
continue;
}
parse_line(lineno, line, &mut addr, &mut stmts, &mut label_to_addr)?;
if addr > 0x100 {
return Err(AsmError::Overflow { lineno });
}
}
resolve_labels(&mut stmts, &label_to_addr)?;
let mut buf = vec![0_u8; addr];
emit_code(&mut buf, &stmts);
Ok(buf)
}
fn emit_code(buf: &mut [u8], stmts: &[Statement]) {
let mut addr = 0;
for op in stmts.iter().map(|stmt| stmt.op) {
op.encode(&mut buf[addr..]);
addr += op.len();
}
}
fn resolve_labels(stmts: &mut [Statement], label_to_addr: &HashMap<String, u8>) -> AsmResult<()> {
for stmt in stmts {
if let Some(label) = stmt.label.take() {
let addr = *label_to_addr
.get(&label)
.ok_or_else(|| AsmError::UndefinedLabel {
lineno: stmt.lineno,
label,
})?;
stmt.op = match stmt.op {
Op::Jump(_) => Op::Jump(addr),
Op::SetJumpOnDamage(_) => {
if addr == 0 {
return Err(AsmError::SetJumpOnDamageZero {
lineno: stmt.lineno,
});
}
Op::SetJumpOnDamage(addr)
}
Op::BccX(_) => Op::BccX(addr),
Op::BcsX(_) => Op::BcsX(addr),
Op::BccY(_) => Op::BccY(addr),
Op::BcsY(_) => Op::BcsY(addr),
_ => unreachable!(),
};
}
}
Ok(())
}
fn parse_line(
lineno: usize,
line: &str,
addr: &mut usize,
stmts: &mut Vec<Statement>,
labels: &mut HashMap<String, u8>,
) -> AsmResult<()> {
use std::convert::TryFrom;
let mut lex = Token::lexer(line);
let lex = &mut lex;
macro_rules! add_stmt {
($op:expr) => {{
let op = $op;
stmts.push(Statement::new(lineno, *addr, op));
*addr += op.len();
}};
}
macro_rules! add_stmt_with_label {
($op:expr, $label:expr) => {{
let op = $op;
stmts.push(Statement::with_label(lineno, *addr, op, $label));
*addr += op.len();
}};
}
match lex.next() {
Some(Token::LabelDefinition(label)) => {
expect_end(lineno, lex)?;
labels.insert(label, u8::try_from(*addr).unwrap());
}
Some(Token::MnemonicMove) => {
let dir = expect_dir(lineno, lex)?;
expect_end(lineno, lex)?;
add_stmt!(Op::new_move(dir));
}
Some(Token::MnemonicJump) => {
let label = expect_label_reference(lineno, lex)?;
expect_end(lineno, lex)?;
add_stmt_with_label!(Op::new_jump(0), label);
}
Some(Token::MnemonicSetSleepTimer) => {
let idx = expect_nibble(lineno, lex)?;
expect_end(lineno, lex)?;
add_stmt!(Op::new_set_sleep_timer(idx));
}
Some(Token::MnemonicLoopBegin) => {
let idx = expect_loop_idx(lineno, lex)?;
expect_end(lineno, lex)?;
add_stmt!(Op::new_loop_begin(idx));
}
Some(Token::MnemonicLoopEnd) => {
expect_end(lineno, lex)?;
add_stmt!(Op::new_loop_end());
}
Some(Token::MnemonicShootDirection) => {
let dir = expect_dir_shoot(lineno, lex)?;
expect_end(lineno, lex)?;
add_stmt!(Op::new_shoot_direction(dir));
}
Some(Token::MnemonicSetSprite) => {
let idx = expect_nibble(lineno, lex)?;
expect_end(lineno, lex)?;
add_stmt!(Op::new_set_sprite(idx));
}
Some(Token::MnemonicSetHomingTimer) => {
let idx = expect_nibble(lineno, lex)?;
expect_end(lineno, lex)?;
add_stmt!(Op::new_set_homing_timer(idx));
}
Some(Token::MnemonicSetInversion) => {
let inv_x = expect_bool(lineno, lex)?;
expect_comma(lineno, lex)?;
let inv_y = expect_bool(lineno, lex)?;
expect_end(lineno, lex)?;
add_stmt!(Op::new_set_inversion(inv_x, inv_y));
}
Some(Token::MnemonicSetPosition) => {
let x = expect_number(lineno, lex)?;
expect_comma(lineno, lex)?;
let y = expect_number(lineno, lex)?;
expect_end(lineno, lex)?;
add_stmt!(Op::new_set_position(x, y));
}
Some(Token::MnemonicSetJumpOnDamage) => {
let label = expect_label_reference(lineno, lex)?;
expect_end(lineno, lex)?;
add_stmt_with_label!(Op::new_set_jump_on_damage(0xFF), label);
}
Some(Token::MnemonicUnsetJumpOnDamage) => {
expect_end(lineno, lex)?;
add_stmt!(Op::new_unset_jump_on_damage());
}
Some(Token::MnemonicSetHealth) => {
let health = expect_number(lineno, lex)?;
expect_end(lineno, lex)?;
add_stmt!(Op::new_set_jump_on_damage(health));
}
Some(Token::MnemonicIncrementSprite) => {
expect_end(lineno, lex)?;
add_stmt!(Op::new_increment_sprite());
}
Some(Token::MnemonicDecrementSprite) => {
expect_end(lineno, lex)?;
add_stmt!(Op::new_decrement_sprite());
}
Some(Token::MnemonicSetPart) => {
let part = expect_number(lineno, lex)?;
expect_end(lineno, lex)?;
add_stmt!(Op::new_set_part(part));
}
Some(Token::MnemonicRandomizeX) => {
let mask = expect_number(lineno, lex)?;
expect_end(lineno, lex)?;
add_stmt!(Op::new_randomize_x(mask));
}
Some(Token::MnemonicRandomizeY) => {
let mask = expect_number(lineno, lex)?;
expect_end(lineno, lex)?;
add_stmt!(Op::new_randomize_y(mask));
}
Some(Token::MnemonicBccX) => {
let label = expect_label_reference(lineno, lex)?;
expect_end(lineno, lex)?;
add_stmt_with_label!(Op::new_bcc_x(0), label);
}
Some(Token::MnemonicBcsX) => {
let label = expect_label_reference(lineno, lex)?;
expect_end(lineno, lex)?;
add_stmt_with_label!(Op::new_bcs_x(0), label);
}
Some(Token::MnemonicBccY) => {
let label = expect_label_reference(lineno, lex)?;
expect_end(lineno, lex)?;
add_stmt_with_label!(Op::new_bcc_y(0), label);
}
Some(Token::MnemonicBcsY) => {
let label = expect_label_reference(lineno, lex)?;
expect_end(lineno, lex)?;
add_stmt_with_label!(Op::new_bcs_y(0), label);
}
Some(Token::MnemonicShootAim) => {
let unused = expect_nibble(lineno, lex)?;
expect_end(lineno, lex)?;
add_stmt!(Op::new_shoot_aim(unused));
}
Some(Token::MnemonicRestoreMusic) => {
expect_end(lineno, lex)?;
add_stmt!(Op::new_restore_music());
}
Some(Token::MnemonicPlaySound) => {
let sound = expect_sound(lineno, lex)?;
expect_end(lineno, lex)?;
add_stmt!(Op::new_play_sound(sound));
}
_ => {
return Err(AsmError::Parse {
lineno,
msg: format!("unexpected token: {}", lex.slice()),
});
}
}
Ok(())
}
fn expect_label_reference(lineno: usize, lex: &mut Lexer<Token>) -> AsmResult<String> {
if let Some(Token::LabelReference(label)) = lex.next() {
Ok(label)
} else {
Err(AsmError::Parse {
lineno,
msg: format!("expected label reference, but got: {}", lex.slice()),
})
}
}
fn expect_dir(lineno: usize, lex: &mut Lexer<Token>) -> AsmResult<Direction> {
let idx = expect_number(lineno, lex)?;
if !(0..=0x3F).contains(&idx) {
return Err(AsmError::Parse {
lineno,
msg: format!("invalid direction: {}", idx),
});
}
Ok(Direction::new(idx))
}
fn expect_dir_shoot(lineno: usize, lex: &mut Lexer<Token>) -> AsmResult<Direction> {
let idx = expect_number(lineno, lex)?;
if !(0..=0xF).contains(&idx) {
return Err(AsmError::Parse {
lineno,
msg: format!("invalid shooting direction: {}", idx),
});
}
Ok(Direction::new(idx))
}
fn expect_nibble(lineno: usize, lex: &mut Lexer<Token>) -> AsmResult<u8> {
const RANGE: std::ops::RangeInclusive<u8> = 0..=0xF;
let idx = expect_number(lineno, lex)?;
if !RANGE.contains(&idx) {
return Err(AsmError::Parse {
lineno,
msg: format!("operand must be within {:?}: {}", RANGE, idx),
});
}
Ok(idx)
}
fn expect_loop_idx(lineno: usize, lex: &mut Lexer<Token>) -> AsmResult<u8> {
let idx = expect_number(lineno, lex)?;
if !(0..=0xF).contains(&idx) || idx == 1 {
return Err(AsmError::Parse {
lineno,
msg: "invalid loop index".to_owned(),
});
}
Ok(idx)
}
fn expect_sound(lineno: usize, lex: &mut Lexer<Token>) -> AsmResult<u8> {
const RANGE: std::ops::RangeInclusive<u8> = 1..=0xF;
let sound = expect_number(lineno, lex)?;
if !RANGE.contains(&sound) {
return Err(AsmError::Parse {
lineno,
msg: format!("sound must be within {:?}: {}", RANGE, sound),
});
}
Ok(sound)
}
fn expect_bool(lineno: usize, lex: &mut Lexer<Token>) -> AsmResult<bool> {
let n = expect_number(lineno, lex)?;
if !(0..=1).contains(&n) {
return Err(AsmError::Parse {
lineno,
msg: format!("bool value must be 0 or 1: {}", lex.slice()),
});
}
Ok(n != 0)
}
fn expect_number(lineno: usize, lex: &mut Lexer<Token>) -> AsmResult<u8> {
if let Some(Token::Number(addr)) = lex.next() {
Ok(addr)
} else {
Err(AsmError::Parse {
lineno,
msg: format!("expected number, but got: {}", lex.slice()),
})
}
}
fn expect_comma(lineno: usize, lex: &mut Lexer<Token>) -> AsmResult<()> {
if let Some(Token::Comma) = lex.next() {
Ok(())
} else {
Err(AsmError::Parse {
lineno,
msg: format!("expected comma, but got: {}", lex.slice()),
})
}
}
fn expect_end(lineno: usize, lex: &mut Lexer<Token>) -> AsmResult<()> {
if lex.next().is_none() {
Ok(())
} else {
Err(AsmError::Parse {
lineno,
msg: format!("expected end, but got: {}", lex.slice()),
})
}
}
fn trim_comment(s: &str) -> &str {
let pos = s.find(';').unwrap_or(s.len());
&s[..pos]
}
|
use crate::errors::AppError;
use crate::schema::*;
use diesel::prelude::*;
type Result<T> = std::result::Result<T, AppError>;
#[derive(Queryable, Identifiable, Serialize, Debug, PartialEq)]
pub struct User {
pub id: i32,
pub username: String,
}
// NOTE: Add bookmark for last page: 124
pub fn create_user(conn: &SqliteConnection, username: &str) -> Result<User> {
conn.transaction(|| {
diesel::insert_into(users::table)
.values((users::username.eq(username)))
.execute(conn)?;
users::table
.order(users::id.desc())
.select((users::id, users::username))
.first(conn)
.map_err(Into::into)
})
}
enum UserKey<'a> {
Username(&'a str), // NOTE: added lifetime, the Username value's lifetime lives as long as the struct's lifetime
ID(i32),
}
|
// Fibonacci implementation using Rust's Iterator trait to find
// the nth fibonacci number
struct Fib {
c: u64,
n: u64
}
impl Fib {
fn new() -> Fib {
Fib { c: 0, n: 1}
}
/// returns the nth number of the Fibonacci sequence
#[allow(dead_code)]
fn get_nth(index: usize) -> u64 {
if index == 0 || index == 1 {return index as u64;}
let f = Fib::new();
f.take(index).last().unwrap()
}
}
impl Iterator for Fib {
type Item = u64;
fn next(&mut self) -> Option<Self::Item> {
let n = self.c + self.n;
self.c = self.n;
self.n = n;
Some(self.n)
}
}
fn main() {
let mut f = Fib::new();
// print the 25th fibo number using Iterator.nth()
dbg!( f.nth(25) );
// print the first 10 fibo numbers using a for loop
let ff = Fib::new();
for (i, n) in ff.enumerate().take(51) {
println!("{i:3}: {n}");
}
} |
#![feature(box_syntax, slice_get_slice)]
use std::fs::File;
use std::io::{self, Read, Write};
use std::ops::{Index, IndexMut};
use std::path::Path;
use std::slice::SliceIndex;
// Sizes
pub const PAGE: usize = 0x100;
pub const BANK: usize = PAGE * 256;
pub const MEMORY: usize = BANK * 256;
pub const FULL_MEMORY: usize = MEMORY + 8;
pub const SAMPLE_RATE: usize = PAGE * 60;
// Offsets
pub const INPUT: usize = 0;
pub const PC: usize = 2;
pub const VIDEO: usize = 5;
pub const AUDIO: usize = 6;
type CpuResult = io::Result<()>;
pub struct Memory {
inner: Box<[u8; FULL_MEMORY]>,
}
impl Memory {
pub fn new() -> Self {
Self { inner: box [0; FULL_MEMORY] }
}
pub fn address_at(&self, offset: usize) -> usize {
self.inner[offset] as usize * BANK + self.inner[offset + 1] as usize * PAGE +
self.inner[offset + 2] as usize
}
pub fn clear(&mut self) {
self.inner = box [0; FULL_MEMORY];
}
pub fn fill_page(&mut self, offset: u8, f: fn(usize) -> u8) {
let offset = (offset as usize) << 16;
for i in 0..PAGE {
self.inner[offset + i] = f(i);
}
}
pub fn fill_bank(&mut self, offset: u16, f: fn(usize) -> u8) {
let offset = (offset as usize) << 8;
for i in 0..BANK {
self.inner[offset + i] = f(i);
}
}
}
impl<I: SliceIndex<[u8]>> Index<I> for Memory {
type Output = I::Output;
fn index(&self, index: I) -> &Self::Output {
&self.inner[index]
}
}
impl<I: SliceIndex<[u8]>> IndexMut<I> for Memory {
fn index_mut(&mut self, index: I) -> &mut Self::Output {
&mut self.inner[index]
}
}
pub struct Cpu {
pub memory: Memory,
pc: u32,
step: u32,
}
impl Cpu {
pub fn new() -> Self {
Self {
memory: Memory::new(),
pc: 0,
step: 0,
}
}
pub fn default_palette() -> [[u8; 4]; 256] {
let mut palette = [[0; 4]; 256];
for index in 0..256 {
match index {
0x00...0xd7 => {
palette[index] = [
(index as u32 / 36 * 0x33) as u8,
(index as u32 / 6 % 6 * 0x33) as u8,
(index as u32 % 6 * 0x33) as u8,
0,
]
}
_ => (),
}
}
palette
}
pub fn load_file<P: AsRef<Path>>(&mut self, file: P) -> CpuResult {
let mut file = File::open(file)?;
file.read(&mut self.memory[..MEMORY])?;
Ok(())
}
pub fn load_data(&mut self, data: &[u8]) -> CpuResult {
(&mut self.memory[..MEMORY]).write(data)?;
Ok(())
}
pub fn clear_memory(&mut self) {
self.memory.clear();
}
pub fn process_input(&mut self, input: (u8, u8)) {
self.memory[INPUT] = input.0;
self.memory[INPUT + 1] = input.1;
}
pub fn cycle(&mut self) {
if self.step == 0 {
self.pc = self.memory.address_at(PC) as u32;
}
self.step += 1;
let pc = self.pc as usize;
let src = self.memory.address_at(pc);
let byte = self.memory[src];
let dst = self.memory.address_at(pc + 3);
self.memory[dst] = byte;
self.pc = self.memory.address_at(pc + 6) as u32;
if self.step > 65535 {
self.step = 0;
}
}
pub fn finish_frame(&mut self) {
for _ in self.step..65536 {
self.cycle();
}
}
pub fn pc(&self) -> u32 { self.pc }
pub fn step(&self) -> u32 { self.step }
pub fn get_video_slice(&self) -> &[u8] {
let offset = self.memory[VIDEO] as usize * BANK;
&self.memory[offset..offset + BANK]
}
pub fn get_audio_slice(&self) -> &[u8] {
let offset = self.memory[AUDIO] as usize * BANK + self.memory[AUDIO + 1] as usize * PAGE;
&self.memory[offset..offset + PAGE]
}
}
|
use crate::auth;
use crate::handlers::types::*;
use crate::Pool;
use actix_web::{web, Error, HttpResponse};
use actix_web_httpauth::extractors::bearer::BearerAuth;
use crate::controllers::email_setting_controller::*;
pub async fn update_email_details(
db: web::Data<Pool>,
auth: BearerAuth,
space_name: web::Path<PathInfo>,
item: web::Json<AddEmail>,
) -> Result<HttpResponse, Error> {
match auth::validate_token(&auth.token().to_string()) {
Ok(res) => {
if res == true {
Ok(web::block(move || {
update_email_details_db(db, auth.token().to_string(), space_name, item)
})
.await
.map(|response| HttpResponse::Ok().json(response))
.map_err(|_| {
HttpResponse::Ok().json(Response::new(
false,
"Error updating email details".to_string(),
))
})?)
} else {
Ok(HttpResponse::Ok().json(ResponseError::new(false, "jwt error".to_string())))
}
}
Err(_) => Ok(HttpResponse::Ok().json(ResponseError::new(false, "jwt error".to_string()))),
}
}
pub async fn delete_email_setting(
db: web::Data<Pool>,
auth: BearerAuth,
space_name: web::Path<PathInfo>,
) -> Result<HttpResponse, Error> {
match auth::validate_token(&auth.token().to_string()) {
Ok(res) => {
if res == true {
Ok(web::block(move || {
delete_email_setting_db(db, auth.token().to_string(), space_name)
})
.await
.map(|response| HttpResponse::Ok().json(response))
.map_err(|_| {
HttpResponse::Ok().json(Response::new(
false,
"Error deleting email details".to_string(),
))
})?)
} else {
Ok(HttpResponse::Ok().json(ResponseError::new(false, "jwt error".to_string())))
}
}
Err(_) => Ok(HttpResponse::Ok().json(ResponseError::new(false, "jwt error".to_string()))),
}
}
pub async fn get_email_setting(
db: web::Data<Pool>,
auth: BearerAuth,
space_name: web::Path<PathInfo>,
) -> Result<HttpResponse, Error> {
match auth::validate_token(&auth.token().to_string()) {
Ok(res) => {
if res == true {
Ok(web::block(move || {
get_email_setting_db(db, auth.token().to_string(), space_name)
})
.await
.map(|response| HttpResponse::Ok().json(response))
.map_err(|_| {
HttpResponse::Ok().json(Response::new(
false,
"Error deleting email details".to_string(),
))
})?)
} else {
Ok(HttpResponse::Ok().json(ResponseError::new(false, "jwt error".to_string())))
}
}
Err(_) => Ok(HttpResponse::Ok().json(ResponseError::new(false, "jwt error".to_string()))),
}
}
|
use std::ops::RangeInclusive;
use crate::parser::errors::CustomError;
use crate::parser::prelude::*;
use crate::parser::trivia::from_utf8_unchecked;
use toml_datetime::*;
use winnow::combinator::alt;
use winnow::combinator::cut_err;
use winnow::combinator::opt;
use winnow::combinator::preceded;
use winnow::token::one_of;
use winnow::token::take_while;
use winnow::trace::trace;
// ;; Date and Time (as defined in RFC 3339)
// date-time = offset-date-time / local-date-time / local-date / local-time
// offset-date-time = full-date time-delim full-time
// local-date-time = full-date time-delim partial-time
// local-date = full-date
// local-time = partial-time
// full-time = partial-time time-offset
pub(crate) fn date_time(input: &mut Input<'_>) -> PResult<Datetime> {
trace(
"date-time",
alt((
(full_date, opt((time_delim, partial_time, opt(time_offset))))
.map(|(date, opt)| {
match opt {
// Offset Date-Time
Some((_, time, offset)) => Datetime {
date: Some(date),
time: Some(time),
offset,
},
// Local Date
None => Datetime {
date: Some(date),
time: None,
offset: None,
},
}
})
.context(StrContext::Label("date-time")),
partial_time
.map(|t| t.into())
.context(StrContext::Label("time")),
)),
)
.parse_next(input)
}
// full-date = date-fullyear "-" date-month "-" date-mday
pub(crate) fn full_date(input: &mut Input<'_>) -> PResult<Date> {
trace(
"full-date",
(date_fullyear, b'-', cut_err((date_month, b'-', date_mday)))
.map(|(year, _, (month, _, day))| Date { year, month, day }),
)
.parse_next(input)
}
// partial-time = time-hour ":" time-minute ":" time-second [time-secfrac]
pub(crate) fn partial_time(input: &mut Input<'_>) -> PResult<Time> {
trace(
"partial-time",
(
time_hour,
b':',
cut_err((time_minute, b':', time_second, opt(time_secfrac))),
)
.map(|(hour, _, (minute, _, second, nanosecond))| Time {
hour,
minute,
second,
nanosecond: nanosecond.unwrap_or_default(),
}),
)
.parse_next(input)
}
// time-offset = "Z" / time-numoffset
// time-numoffset = ( "+" / "-" ) time-hour ":" time-minute
pub(crate) fn time_offset(input: &mut Input<'_>) -> PResult<Offset> {
trace(
"time-offset",
alt((
one_of((b'Z', b'z')).value(Offset::Z),
(
one_of((b'+', b'-')),
cut_err((time_hour, b':', time_minute)),
)
.map(|(sign, (hours, _, minutes))| {
let sign = match sign {
b'+' => 1,
b'-' => -1,
_ => unreachable!("Parser prevents this"),
};
sign * (hours as i16 * 60 + minutes as i16)
})
.verify(|minutes| ((-24 * 60)..=(24 * 60)).contains(minutes))
.map(|minutes| Offset::Custom { minutes }),
))
.context(StrContext::Label("time offset")),
)
.parse_next(input)
}
// date-fullyear = 4DIGIT
pub(crate) fn date_fullyear(input: &mut Input<'_>) -> PResult<u16> {
unsigned_digits::<4, 4>
.map(|s: &str| s.parse::<u16>().expect("4DIGIT should match u8"))
.parse_next(input)
}
// date-month = 2DIGIT ; 01-12
pub(crate) fn date_month(input: &mut Input<'_>) -> PResult<u8> {
unsigned_digits::<2, 2>
.try_map(|s: &str| {
let d = s.parse::<u8>().expect("2DIGIT should match u8");
if (1..=12).contains(&d) {
Ok(d)
} else {
Err(CustomError::OutOfRange)
}
})
.parse_next(input)
}
// date-mday = 2DIGIT ; 01-28, 01-29, 01-30, 01-31 based on month/year
pub(crate) fn date_mday(input: &mut Input<'_>) -> PResult<u8> {
unsigned_digits::<2, 2>
.try_map(|s: &str| {
let d = s.parse::<u8>().expect("2DIGIT should match u8");
if (1..=31).contains(&d) {
Ok(d)
} else {
Err(CustomError::OutOfRange)
}
})
.parse_next(input)
}
// time-delim = "T" / %x20 ; T, t, or space
pub(crate) fn time_delim(input: &mut Input<'_>) -> PResult<u8> {
one_of(TIME_DELIM).parse_next(input)
}
const TIME_DELIM: (u8, u8, u8) = (b'T', b't', b' ');
// time-hour = 2DIGIT ; 00-23
pub(crate) fn time_hour(input: &mut Input<'_>) -> PResult<u8> {
unsigned_digits::<2, 2>
.try_map(|s: &str| {
let d = s.parse::<u8>().expect("2DIGIT should match u8");
if (0..=23).contains(&d) {
Ok(d)
} else {
Err(CustomError::OutOfRange)
}
})
.parse_next(input)
}
// time-minute = 2DIGIT ; 00-59
pub(crate) fn time_minute(input: &mut Input<'_>) -> PResult<u8> {
unsigned_digits::<2, 2>
.try_map(|s: &str| {
let d = s.parse::<u8>().expect("2DIGIT should match u8");
if (0..=59).contains(&d) {
Ok(d)
} else {
Err(CustomError::OutOfRange)
}
})
.parse_next(input)
}
// time-second = 2DIGIT ; 00-58, 00-59, 00-60 based on leap second rules
pub(crate) fn time_second(input: &mut Input<'_>) -> PResult<u8> {
unsigned_digits::<2, 2>
.try_map(|s: &str| {
let d = s.parse::<u8>().expect("2DIGIT should match u8");
if (0..=60).contains(&d) {
Ok(d)
} else {
Err(CustomError::OutOfRange)
}
})
.parse_next(input)
}
// time-secfrac = "." 1*DIGIT
pub(crate) fn time_secfrac(input: &mut Input<'_>) -> PResult<u32> {
static SCALE: [u32; 10] = [
0,
100_000_000,
10_000_000,
1_000_000,
100_000,
10_000,
1_000,
100,
10,
1,
];
const INF: usize = usize::MAX;
preceded(b'.', unsigned_digits::<1, INF>)
.try_map(|mut repr: &str| -> Result<u32, CustomError> {
let max_digits = SCALE.len() - 1;
if max_digits < repr.len() {
// Millisecond precision is required. Further precision of fractional seconds is
// implementation-specific. If the value contains greater precision than the
// implementation can support, the additional precision must be truncated, not rounded.
repr = &repr[0..max_digits];
}
let v = repr.parse::<u32>().map_err(|_| CustomError::OutOfRange)?;
let num_digits = repr.len();
// scale the number accordingly.
let scale = SCALE.get(num_digits).ok_or(CustomError::OutOfRange)?;
let v = v.checked_mul(*scale).ok_or(CustomError::OutOfRange)?;
Ok(v)
})
.parse_next(input)
}
pub(crate) fn unsigned_digits<'i, const MIN: usize, const MAX: usize>(
input: &mut Input<'i>,
) -> PResult<&'i str> {
take_while(MIN..=MAX, DIGIT)
.map(|b: &[u8]| unsafe { from_utf8_unchecked(b, "`is_ascii_digit` filters out on-ASCII") })
.parse_next(input)
}
// DIGIT = %x30-39 ; 0-9
const DIGIT: RangeInclusive<u8> = b'0'..=b'9';
#[cfg(test)]
mod test {
use super::*;
#[test]
fn offset_date_time() {
let inputs = [
(
"1979-05-27T07:32:00Z",
Datetime {
date: Some(Date {
year: 1979,
month: 5,
day: 27,
}),
time: Some(Time {
hour: 7,
minute: 32,
second: 0,
nanosecond: 0,
}),
offset: Some(Offset::Z),
},
),
(
"1979-05-27T00:32:00-07:00",
Datetime {
date: Some(Date {
year: 1979,
month: 5,
day: 27,
}),
time: Some(Time {
hour: 0,
minute: 32,
second: 0,
nanosecond: 0,
}),
offset: Some(Offset::Custom { minutes: -7 * 60 }),
},
),
(
"1979-05-27T00:32:00-00:36",
Datetime {
date: Some(Date {
year: 1979,
month: 5,
day: 27,
}),
time: Some(Time {
hour: 0,
minute: 32,
second: 0,
nanosecond: 0,
}),
offset: Some(Offset::Custom { minutes: -36 }),
},
),
(
"1979-05-27T00:32:00.999999",
Datetime {
date: Some(Date {
year: 1979,
month: 5,
day: 27,
}),
time: Some(Time {
hour: 0,
minute: 32,
second: 0,
nanosecond: 999999000,
}),
offset: None,
},
),
];
for (input, expected) in inputs {
dbg!(input);
let actual = date_time.parse(new_input(input)).unwrap();
assert_eq!(expected, actual);
}
}
#[test]
fn local_date_time() {
let inputs = [
(
"1979-05-27T07:32:00",
Datetime {
date: Some(Date {
year: 1979,
month: 5,
day: 27,
}),
time: Some(Time {
hour: 7,
minute: 32,
second: 0,
nanosecond: 0,
}),
offset: None,
},
),
(
"1979-05-27T00:32:00.999999",
Datetime {
date: Some(Date {
year: 1979,
month: 5,
day: 27,
}),
time: Some(Time {
hour: 0,
minute: 32,
second: 0,
nanosecond: 999999000,
}),
offset: None,
},
),
];
for (input, expected) in inputs {
dbg!(input);
let actual = date_time.parse(new_input(input)).unwrap();
assert_eq!(expected, actual);
}
}
#[test]
fn local_date() {
let inputs = [
(
"1979-05-27",
Datetime {
date: Some(Date {
year: 1979,
month: 5,
day: 27,
}),
time: None,
offset: None,
},
),
(
"2017-07-20",
Datetime {
date: Some(Date {
year: 2017,
month: 7,
day: 20,
}),
time: None,
offset: None,
},
),
];
for (input, expected) in inputs {
dbg!(input);
let actual = date_time.parse(new_input(input)).unwrap();
assert_eq!(expected, actual);
}
}
#[test]
fn local_time() {
let inputs = [
(
"07:32:00",
Datetime {
date: None,
time: Some(Time {
hour: 7,
minute: 32,
second: 0,
nanosecond: 0,
}),
offset: None,
},
),
(
"00:32:00.999999",
Datetime {
date: None,
time: Some(Time {
hour: 0,
minute: 32,
second: 0,
nanosecond: 999999000,
}),
offset: None,
},
),
];
for (input, expected) in inputs {
dbg!(input);
let actual = date_time.parse(new_input(input)).unwrap();
assert_eq!(expected, actual);
}
}
#[test]
fn time_fraction_truncated() {
let input = "1987-07-05T17:45:00.123456789012345Z";
date_time.parse(new_input(input)).unwrap();
}
}
|
#![allow(unused_variables, non_upper_case_globals, non_snake_case, unused_unsafe, non_camel_case_types, dead_code, clippy::all)]
#[cfg(feature = "Graphics_DirectX_Direct3D11")]
pub mod Direct3D11;
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: marker :: Copy, :: core :: clone :: Clone, :: core :: default :: Default, :: core :: fmt :: Debug)]
#[repr(transparent)]
pub struct DirectXAlphaMode(pub i32);
impl DirectXAlphaMode {
pub const Unspecified: DirectXAlphaMode = DirectXAlphaMode(0i32);
pub const Premultiplied: DirectXAlphaMode = DirectXAlphaMode(1i32);
pub const Straight: DirectXAlphaMode = DirectXAlphaMode(2i32);
pub const Ignore: DirectXAlphaMode = DirectXAlphaMode(3i32);
}
impl ::core::convert::From<i32> for DirectXAlphaMode {
fn from(value: i32) -> Self {
Self(value)
}
}
unsafe impl ::windows::core::Abi for DirectXAlphaMode {
type Abi = Self;
}
unsafe impl ::windows::core::RuntimeType for DirectXAlphaMode {
const SIGNATURE: ::windows::core::ConstBuffer = ::windows::core::ConstBuffer::from_slice(b"enum(Windows.Graphics.DirectX.DirectXAlphaMode;i4)");
}
impl ::windows::core::DefaultType for DirectXAlphaMode {
type DefaultType = Self;
}
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: marker :: Copy, :: core :: clone :: Clone, :: core :: default :: Default, :: core :: fmt :: Debug)]
#[repr(transparent)]
pub struct DirectXColorSpace(pub i32);
impl DirectXColorSpace {
pub const RgbFullG22NoneP709: DirectXColorSpace = DirectXColorSpace(0i32);
pub const RgbFullG10NoneP709: DirectXColorSpace = DirectXColorSpace(1i32);
pub const RgbStudioG22NoneP709: DirectXColorSpace = DirectXColorSpace(2i32);
pub const RgbStudioG22NoneP2020: DirectXColorSpace = DirectXColorSpace(3i32);
pub const Reserved: DirectXColorSpace = DirectXColorSpace(4i32);
pub const YccFullG22NoneP709X601: DirectXColorSpace = DirectXColorSpace(5i32);
pub const YccStudioG22LeftP601: DirectXColorSpace = DirectXColorSpace(6i32);
pub const YccFullG22LeftP601: DirectXColorSpace = DirectXColorSpace(7i32);
pub const YccStudioG22LeftP709: DirectXColorSpace = DirectXColorSpace(8i32);
pub const YccFullG22LeftP709: DirectXColorSpace = DirectXColorSpace(9i32);
pub const YccStudioG22LeftP2020: DirectXColorSpace = DirectXColorSpace(10i32);
pub const YccFullG22LeftP2020: DirectXColorSpace = DirectXColorSpace(11i32);
pub const RgbFullG2084NoneP2020: DirectXColorSpace = DirectXColorSpace(12i32);
pub const YccStudioG2084LeftP2020: DirectXColorSpace = DirectXColorSpace(13i32);
pub const RgbStudioG2084NoneP2020: DirectXColorSpace = DirectXColorSpace(14i32);
pub const YccStudioG22TopLeftP2020: DirectXColorSpace = DirectXColorSpace(15i32);
pub const YccStudioG2084TopLeftP2020: DirectXColorSpace = DirectXColorSpace(16i32);
pub const RgbFullG22NoneP2020: DirectXColorSpace = DirectXColorSpace(17i32);
pub const YccStudioGHlgTopLeftP2020: DirectXColorSpace = DirectXColorSpace(18i32);
pub const YccFullGHlgTopLeftP2020: DirectXColorSpace = DirectXColorSpace(19i32);
pub const RgbStudioG24NoneP709: DirectXColorSpace = DirectXColorSpace(20i32);
pub const RgbStudioG24NoneP2020: DirectXColorSpace = DirectXColorSpace(21i32);
pub const YccStudioG24LeftP709: DirectXColorSpace = DirectXColorSpace(22i32);
pub const YccStudioG24LeftP2020: DirectXColorSpace = DirectXColorSpace(23i32);
pub const YccStudioG24TopLeftP2020: DirectXColorSpace = DirectXColorSpace(24i32);
}
impl ::core::convert::From<i32> for DirectXColorSpace {
fn from(value: i32) -> Self {
Self(value)
}
}
unsafe impl ::windows::core::Abi for DirectXColorSpace {
type Abi = Self;
}
unsafe impl ::windows::core::RuntimeType for DirectXColorSpace {
const SIGNATURE: ::windows::core::ConstBuffer = ::windows::core::ConstBuffer::from_slice(b"enum(Windows.Graphics.DirectX.DirectXColorSpace;i4)");
}
impl ::windows::core::DefaultType for DirectXColorSpace {
type DefaultType = Self;
}
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: marker :: Copy, :: core :: clone :: Clone, :: core :: default :: Default, :: core :: fmt :: Debug)]
#[repr(transparent)]
pub struct DirectXPixelFormat(pub i32);
impl DirectXPixelFormat {
pub const Unknown: DirectXPixelFormat = DirectXPixelFormat(0i32);
pub const R32G32B32A32Typeless: DirectXPixelFormat = DirectXPixelFormat(1i32);
pub const R32G32B32A32Float: DirectXPixelFormat = DirectXPixelFormat(2i32);
pub const R32G32B32A32UInt: DirectXPixelFormat = DirectXPixelFormat(3i32);
pub const R32G32B32A32Int: DirectXPixelFormat = DirectXPixelFormat(4i32);
pub const R32G32B32Typeless: DirectXPixelFormat = DirectXPixelFormat(5i32);
pub const R32G32B32Float: DirectXPixelFormat = DirectXPixelFormat(6i32);
pub const R32G32B32UInt: DirectXPixelFormat = DirectXPixelFormat(7i32);
pub const R32G32B32Int: DirectXPixelFormat = DirectXPixelFormat(8i32);
pub const R16G16B16A16Typeless: DirectXPixelFormat = DirectXPixelFormat(9i32);
pub const R16G16B16A16Float: DirectXPixelFormat = DirectXPixelFormat(10i32);
pub const R16G16B16A16UIntNormalized: DirectXPixelFormat = DirectXPixelFormat(11i32);
pub const R16G16B16A16UInt: DirectXPixelFormat = DirectXPixelFormat(12i32);
pub const R16G16B16A16IntNormalized: DirectXPixelFormat = DirectXPixelFormat(13i32);
pub const R16G16B16A16Int: DirectXPixelFormat = DirectXPixelFormat(14i32);
pub const R32G32Typeless: DirectXPixelFormat = DirectXPixelFormat(15i32);
pub const R32G32Float: DirectXPixelFormat = DirectXPixelFormat(16i32);
pub const R32G32UInt: DirectXPixelFormat = DirectXPixelFormat(17i32);
pub const R32G32Int: DirectXPixelFormat = DirectXPixelFormat(18i32);
pub const R32G8X24Typeless: DirectXPixelFormat = DirectXPixelFormat(19i32);
pub const D32FloatS8X24UInt: DirectXPixelFormat = DirectXPixelFormat(20i32);
pub const R32FloatX8X24Typeless: DirectXPixelFormat = DirectXPixelFormat(21i32);
pub const X32TypelessG8X24UInt: DirectXPixelFormat = DirectXPixelFormat(22i32);
pub const R10G10B10A2Typeless: DirectXPixelFormat = DirectXPixelFormat(23i32);
pub const R10G10B10A2UIntNormalized: DirectXPixelFormat = DirectXPixelFormat(24i32);
pub const R10G10B10A2UInt: DirectXPixelFormat = DirectXPixelFormat(25i32);
pub const R11G11B10Float: DirectXPixelFormat = DirectXPixelFormat(26i32);
pub const R8G8B8A8Typeless: DirectXPixelFormat = DirectXPixelFormat(27i32);
pub const R8G8B8A8UIntNormalized: DirectXPixelFormat = DirectXPixelFormat(28i32);
pub const R8G8B8A8UIntNormalizedSrgb: DirectXPixelFormat = DirectXPixelFormat(29i32);
pub const R8G8B8A8UInt: DirectXPixelFormat = DirectXPixelFormat(30i32);
pub const R8G8B8A8IntNormalized: DirectXPixelFormat = DirectXPixelFormat(31i32);
pub const R8G8B8A8Int: DirectXPixelFormat = DirectXPixelFormat(32i32);
pub const R16G16Typeless: DirectXPixelFormat = DirectXPixelFormat(33i32);
pub const R16G16Float: DirectXPixelFormat = DirectXPixelFormat(34i32);
pub const R16G16UIntNormalized: DirectXPixelFormat = DirectXPixelFormat(35i32);
pub const R16G16UInt: DirectXPixelFormat = DirectXPixelFormat(36i32);
pub const R16G16IntNormalized: DirectXPixelFormat = DirectXPixelFormat(37i32);
pub const R16G16Int: DirectXPixelFormat = DirectXPixelFormat(38i32);
pub const R32Typeless: DirectXPixelFormat = DirectXPixelFormat(39i32);
pub const D32Float: DirectXPixelFormat = DirectXPixelFormat(40i32);
pub const R32Float: DirectXPixelFormat = DirectXPixelFormat(41i32);
pub const R32UInt: DirectXPixelFormat = DirectXPixelFormat(42i32);
pub const R32Int: DirectXPixelFormat = DirectXPixelFormat(43i32);
pub const R24G8Typeless: DirectXPixelFormat = DirectXPixelFormat(44i32);
pub const D24UIntNormalizedS8UInt: DirectXPixelFormat = DirectXPixelFormat(45i32);
pub const R24UIntNormalizedX8Typeless: DirectXPixelFormat = DirectXPixelFormat(46i32);
pub const X24TypelessG8UInt: DirectXPixelFormat = DirectXPixelFormat(47i32);
pub const R8G8Typeless: DirectXPixelFormat = DirectXPixelFormat(48i32);
pub const R8G8UIntNormalized: DirectXPixelFormat = DirectXPixelFormat(49i32);
pub const R8G8UInt: DirectXPixelFormat = DirectXPixelFormat(50i32);
pub const R8G8IntNormalized: DirectXPixelFormat = DirectXPixelFormat(51i32);
pub const R8G8Int: DirectXPixelFormat = DirectXPixelFormat(52i32);
pub const R16Typeless: DirectXPixelFormat = DirectXPixelFormat(53i32);
pub const R16Float: DirectXPixelFormat = DirectXPixelFormat(54i32);
pub const D16UIntNormalized: DirectXPixelFormat = DirectXPixelFormat(55i32);
pub const R16UIntNormalized: DirectXPixelFormat = DirectXPixelFormat(56i32);
pub const R16UInt: DirectXPixelFormat = DirectXPixelFormat(57i32);
pub const R16IntNormalized: DirectXPixelFormat = DirectXPixelFormat(58i32);
pub const R16Int: DirectXPixelFormat = DirectXPixelFormat(59i32);
pub const R8Typeless: DirectXPixelFormat = DirectXPixelFormat(60i32);
pub const R8UIntNormalized: DirectXPixelFormat = DirectXPixelFormat(61i32);
pub const R8UInt: DirectXPixelFormat = DirectXPixelFormat(62i32);
pub const R8IntNormalized: DirectXPixelFormat = DirectXPixelFormat(63i32);
pub const R8Int: DirectXPixelFormat = DirectXPixelFormat(64i32);
pub const A8UIntNormalized: DirectXPixelFormat = DirectXPixelFormat(65i32);
pub const R1UIntNormalized: DirectXPixelFormat = DirectXPixelFormat(66i32);
pub const R9G9B9E5SharedExponent: DirectXPixelFormat = DirectXPixelFormat(67i32);
pub const R8G8B8G8UIntNormalized: DirectXPixelFormat = DirectXPixelFormat(68i32);
pub const G8R8G8B8UIntNormalized: DirectXPixelFormat = DirectXPixelFormat(69i32);
pub const BC1Typeless: DirectXPixelFormat = DirectXPixelFormat(70i32);
pub const BC1UIntNormalized: DirectXPixelFormat = DirectXPixelFormat(71i32);
pub const BC1UIntNormalizedSrgb: DirectXPixelFormat = DirectXPixelFormat(72i32);
pub const BC2Typeless: DirectXPixelFormat = DirectXPixelFormat(73i32);
pub const BC2UIntNormalized: DirectXPixelFormat = DirectXPixelFormat(74i32);
pub const BC2UIntNormalizedSrgb: DirectXPixelFormat = DirectXPixelFormat(75i32);
pub const BC3Typeless: DirectXPixelFormat = DirectXPixelFormat(76i32);
pub const BC3UIntNormalized: DirectXPixelFormat = DirectXPixelFormat(77i32);
pub const BC3UIntNormalizedSrgb: DirectXPixelFormat = DirectXPixelFormat(78i32);
pub const BC4Typeless: DirectXPixelFormat = DirectXPixelFormat(79i32);
pub const BC4UIntNormalized: DirectXPixelFormat = DirectXPixelFormat(80i32);
pub const BC4IntNormalized: DirectXPixelFormat = DirectXPixelFormat(81i32);
pub const BC5Typeless: DirectXPixelFormat = DirectXPixelFormat(82i32);
pub const BC5UIntNormalized: DirectXPixelFormat = DirectXPixelFormat(83i32);
pub const BC5IntNormalized: DirectXPixelFormat = DirectXPixelFormat(84i32);
pub const B5G6R5UIntNormalized: DirectXPixelFormat = DirectXPixelFormat(85i32);
pub const B5G5R5A1UIntNormalized: DirectXPixelFormat = DirectXPixelFormat(86i32);
pub const B8G8R8A8UIntNormalized: DirectXPixelFormat = DirectXPixelFormat(87i32);
pub const B8G8R8X8UIntNormalized: DirectXPixelFormat = DirectXPixelFormat(88i32);
pub const R10G10B10XRBiasA2UIntNormalized: DirectXPixelFormat = DirectXPixelFormat(89i32);
pub const B8G8R8A8Typeless: DirectXPixelFormat = DirectXPixelFormat(90i32);
pub const B8G8R8A8UIntNormalizedSrgb: DirectXPixelFormat = DirectXPixelFormat(91i32);
pub const B8G8R8X8Typeless: DirectXPixelFormat = DirectXPixelFormat(92i32);
pub const B8G8R8X8UIntNormalizedSrgb: DirectXPixelFormat = DirectXPixelFormat(93i32);
pub const BC6HTypeless: DirectXPixelFormat = DirectXPixelFormat(94i32);
pub const BC6H16UnsignedFloat: DirectXPixelFormat = DirectXPixelFormat(95i32);
pub const BC6H16Float: DirectXPixelFormat = DirectXPixelFormat(96i32);
pub const BC7Typeless: DirectXPixelFormat = DirectXPixelFormat(97i32);
pub const BC7UIntNormalized: DirectXPixelFormat = DirectXPixelFormat(98i32);
pub const BC7UIntNormalizedSrgb: DirectXPixelFormat = DirectXPixelFormat(99i32);
pub const Ayuv: DirectXPixelFormat = DirectXPixelFormat(100i32);
pub const Y410: DirectXPixelFormat = DirectXPixelFormat(101i32);
pub const Y416: DirectXPixelFormat = DirectXPixelFormat(102i32);
pub const NV12: DirectXPixelFormat = DirectXPixelFormat(103i32);
pub const P010: DirectXPixelFormat = DirectXPixelFormat(104i32);
pub const P016: DirectXPixelFormat = DirectXPixelFormat(105i32);
pub const Opaque420: DirectXPixelFormat = DirectXPixelFormat(106i32);
pub const Yuy2: DirectXPixelFormat = DirectXPixelFormat(107i32);
pub const Y210: DirectXPixelFormat = DirectXPixelFormat(108i32);
pub const Y216: DirectXPixelFormat = DirectXPixelFormat(109i32);
pub const NV11: DirectXPixelFormat = DirectXPixelFormat(110i32);
pub const AI44: DirectXPixelFormat = DirectXPixelFormat(111i32);
pub const IA44: DirectXPixelFormat = DirectXPixelFormat(112i32);
pub const P8: DirectXPixelFormat = DirectXPixelFormat(113i32);
pub const A8P8: DirectXPixelFormat = DirectXPixelFormat(114i32);
pub const B4G4R4A4UIntNormalized: DirectXPixelFormat = DirectXPixelFormat(115i32);
pub const P208: DirectXPixelFormat = DirectXPixelFormat(130i32);
pub const V208: DirectXPixelFormat = DirectXPixelFormat(131i32);
pub const V408: DirectXPixelFormat = DirectXPixelFormat(132i32);
pub const SamplerFeedbackMinMipOpaque: DirectXPixelFormat = DirectXPixelFormat(189i32);
pub const SamplerFeedbackMipRegionUsedOpaque: DirectXPixelFormat = DirectXPixelFormat(190i32);
}
impl ::core::convert::From<i32> for DirectXPixelFormat {
fn from(value: i32) -> Self {
Self(value)
}
}
unsafe impl ::windows::core::Abi for DirectXPixelFormat {
type Abi = Self;
}
unsafe impl ::windows::core::RuntimeType for DirectXPixelFormat {
const SIGNATURE: ::windows::core::ConstBuffer = ::windows::core::ConstBuffer::from_slice(b"enum(Windows.Graphics.DirectX.DirectXPixelFormat;i4)");
}
impl ::windows::core::DefaultType for DirectXPixelFormat {
type DefaultType = Self;
}
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: marker :: Copy, :: core :: clone :: Clone, :: core :: default :: Default, :: core :: fmt :: Debug)]
#[repr(transparent)]
pub struct DirectXPrimitiveTopology(pub i32);
impl DirectXPrimitiveTopology {
pub const Undefined: DirectXPrimitiveTopology = DirectXPrimitiveTopology(0i32);
pub const PointList: DirectXPrimitiveTopology = DirectXPrimitiveTopology(1i32);
pub const LineList: DirectXPrimitiveTopology = DirectXPrimitiveTopology(2i32);
pub const LineStrip: DirectXPrimitiveTopology = DirectXPrimitiveTopology(3i32);
pub const TriangleList: DirectXPrimitiveTopology = DirectXPrimitiveTopology(4i32);
pub const TriangleStrip: DirectXPrimitiveTopology = DirectXPrimitiveTopology(5i32);
}
impl ::core::convert::From<i32> for DirectXPrimitiveTopology {
fn from(value: i32) -> Self {
Self(value)
}
}
unsafe impl ::windows::core::Abi for DirectXPrimitiveTopology {
type Abi = Self;
}
unsafe impl ::windows::core::RuntimeType for DirectXPrimitiveTopology {
const SIGNATURE: ::windows::core::ConstBuffer = ::windows::core::ConstBuffer::from_slice(b"enum(Windows.Graphics.DirectX.DirectXPrimitiveTopology;i4)");
}
impl ::windows::core::DefaultType for DirectXPrimitiveTopology {
type DefaultType = Self;
}
|
#[doc = r"Value read from the register"]
pub struct R {
bits: u8,
}
#[doc = r"Value to write to the register"]
pub struct W {
bits: u8,
}
impl super::RXCSRH3 {
#[doc = r"Modifies the contents of the register"]
#[inline(always)]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
self.register.set(f(&R { bits }, &mut W { bits }).bits);
}
#[doc = r"Reads the contents of the register"]
#[inline(always)]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r"Writes to the register"]
#[inline(always)]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
self.register.set(
f(&mut W {
bits: Self::reset_value(),
})
.bits,
);
}
#[doc = r"Reset value of the register"]
#[inline(always)]
pub const fn reset_value() -> u8 {
0
}
#[doc = r"Writes the reset value to the register"]
#[inline(always)]
pub fn reset(&self) {
self.register.set(Self::reset_value())
}
}
#[doc = r"Value of the field"]
pub struct USB_RXCSRH3_INCOMPRXR {
bits: bool,
}
impl USB_RXCSRH3_INCOMPRXR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_RXCSRH3_INCOMPRXW<'a> {
w: &'a mut W,
}
impl<'a> _USB_RXCSRH3_INCOMPRXW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 0);
self.w.bits |= ((value as u8) & 1) << 0;
self.w
}
}
#[doc = r"Value of the field"]
pub struct USB_RXCSRH3_DTR {
bits: bool,
}
impl USB_RXCSRH3_DTR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_RXCSRH3_DTW<'a> {
w: &'a mut W,
}
impl<'a> _USB_RXCSRH3_DTW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 1);
self.w.bits |= ((value as u8) & 1) << 1;
self.w
}
}
#[doc = r"Value of the field"]
pub struct USB_RXCSRH3_DTWER {
bits: bool,
}
impl USB_RXCSRH3_DTWER {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_RXCSRH3_DTWEW<'a> {
w: &'a mut W,
}
impl<'a> _USB_RXCSRH3_DTWEW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 2);
self.w.bits |= ((value as u8) & 1) << 2;
self.w
}
}
#[doc = r"Value of the field"]
pub struct USB_RXCSRH3_DMAMODR {
bits: bool,
}
impl USB_RXCSRH3_DMAMODR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_RXCSRH3_DMAMODW<'a> {
w: &'a mut W,
}
impl<'a> _USB_RXCSRH3_DMAMODW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 3);
self.w.bits |= ((value as u8) & 1) << 3;
self.w
}
}
#[doc = r"Value of the field"]
pub struct USB_RXCSRH3_PIDERRR {
bits: bool,
}
impl USB_RXCSRH3_PIDERRR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_RXCSRH3_PIDERRW<'a> {
w: &'a mut W,
}
impl<'a> _USB_RXCSRH3_PIDERRW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 4);
self.w.bits |= ((value as u8) & 1) << 4;
self.w
}
}
#[doc = r"Value of the field"]
pub struct USB_RXCSRH3_DMAENR {
bits: bool,
}
impl USB_RXCSRH3_DMAENR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_RXCSRH3_DMAENW<'a> {
w: &'a mut W,
}
impl<'a> _USB_RXCSRH3_DMAENW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 5);
self.w.bits |= ((value as u8) & 1) << 5;
self.w
}
}
#[doc = r"Value of the field"]
pub struct USB_RXCSRH3_AUTORQR {
bits: bool,
}
impl USB_RXCSRH3_AUTORQR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_RXCSRH3_AUTORQW<'a> {
w: &'a mut W,
}
impl<'a> _USB_RXCSRH3_AUTORQW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 6);
self.w.bits |= ((value as u8) & 1) << 6;
self.w
}
}
#[doc = r"Value of the field"]
pub struct USB_RXCSRH3_AUTOCLR {
bits: bool,
}
impl USB_RXCSRH3_AUTOCLR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_RXCSRH3_AUTOCLW<'a> {
w: &'a mut W,
}
impl<'a> _USB_RXCSRH3_AUTOCLW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 7);
self.w.bits |= ((value as u8) & 1) << 7;
self.w
}
}
#[doc = r"Value of the field"]
pub struct USB_RXCSRH3_DISNYETR {
bits: bool,
}
impl USB_RXCSRH3_DISNYETR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_RXCSRH3_DISNYETW<'a> {
w: &'a mut W,
}
impl<'a> _USB_RXCSRH3_DISNYETW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 4);
self.w.bits |= ((value as u8) & 1) << 4;
self.w
}
}
#[doc = r"Value of the field"]
pub struct USB_RXCSRH3_ISOR {
bits: bool,
}
impl USB_RXCSRH3_ISOR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_RXCSRH3_ISOW<'a> {
w: &'a mut W,
}
impl<'a> _USB_RXCSRH3_ISOW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 6);
self.w.bits |= ((value as u8) & 1) << 6;
self.w
}
}
impl R {
#[doc = r"Value of the register as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
#[doc = "Bit 0 - Incomplete RX Transmission Status"]
#[inline(always)]
pub fn usb_rxcsrh3_incomprx(&self) -> USB_RXCSRH3_INCOMPRXR {
let bits = ((self.bits >> 0) & 1) != 0;
USB_RXCSRH3_INCOMPRXR { bits }
}
#[doc = "Bit 1 - Data Toggle"]
#[inline(always)]
pub fn usb_rxcsrh3_dt(&self) -> USB_RXCSRH3_DTR {
let bits = ((self.bits >> 1) & 1) != 0;
USB_RXCSRH3_DTR { bits }
}
#[doc = "Bit 2 - Data Toggle Write Enable"]
#[inline(always)]
pub fn usb_rxcsrh3_dtwe(&self) -> USB_RXCSRH3_DTWER {
let bits = ((self.bits >> 2) & 1) != 0;
USB_RXCSRH3_DTWER { bits }
}
#[doc = "Bit 3 - DMA Request Mode"]
#[inline(always)]
pub fn usb_rxcsrh3_dmamod(&self) -> USB_RXCSRH3_DMAMODR {
let bits = ((self.bits >> 3) & 1) != 0;
USB_RXCSRH3_DMAMODR { bits }
}
#[doc = "Bit 4 - PID Error"]
#[inline(always)]
pub fn usb_rxcsrh3_piderr(&self) -> USB_RXCSRH3_PIDERRR {
let bits = ((self.bits >> 4) & 1) != 0;
USB_RXCSRH3_PIDERRR { bits }
}
#[doc = "Bit 5 - DMA Request Enable"]
#[inline(always)]
pub fn usb_rxcsrh3_dmaen(&self) -> USB_RXCSRH3_DMAENR {
let bits = ((self.bits >> 5) & 1) != 0;
USB_RXCSRH3_DMAENR { bits }
}
#[doc = "Bit 6 - Auto Request"]
#[inline(always)]
pub fn usb_rxcsrh3_autorq(&self) -> USB_RXCSRH3_AUTORQR {
let bits = ((self.bits >> 6) & 1) != 0;
USB_RXCSRH3_AUTORQR { bits }
}
#[doc = "Bit 7 - Auto Clear"]
#[inline(always)]
pub fn usb_rxcsrh3_autocl(&self) -> USB_RXCSRH3_AUTOCLR {
let bits = ((self.bits >> 7) & 1) != 0;
USB_RXCSRH3_AUTOCLR { bits }
}
#[doc = "Bit 4 - Disable NYET"]
#[inline(always)]
pub fn usb_rxcsrh3_disnyet(&self) -> USB_RXCSRH3_DISNYETR {
let bits = ((self.bits >> 4) & 1) != 0;
USB_RXCSRH3_DISNYETR { bits }
}
#[doc = "Bit 6 - Isochronous Transfers"]
#[inline(always)]
pub fn usb_rxcsrh3_iso(&self) -> USB_RXCSRH3_ISOR {
let bits = ((self.bits >> 6) & 1) != 0;
USB_RXCSRH3_ISOR { bits }
}
}
impl W {
#[doc = r"Writes raw bits to the register"]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u8) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 0 - Incomplete RX Transmission Status"]
#[inline(always)]
pub fn usb_rxcsrh3_incomprx(&mut self) -> _USB_RXCSRH3_INCOMPRXW {
_USB_RXCSRH3_INCOMPRXW { w: self }
}
#[doc = "Bit 1 - Data Toggle"]
#[inline(always)]
pub fn usb_rxcsrh3_dt(&mut self) -> _USB_RXCSRH3_DTW {
_USB_RXCSRH3_DTW { w: self }
}
#[doc = "Bit 2 - Data Toggle Write Enable"]
#[inline(always)]
pub fn usb_rxcsrh3_dtwe(&mut self) -> _USB_RXCSRH3_DTWEW {
_USB_RXCSRH3_DTWEW { w: self }
}
#[doc = "Bit 3 - DMA Request Mode"]
#[inline(always)]
pub fn usb_rxcsrh3_dmamod(&mut self) -> _USB_RXCSRH3_DMAMODW {
_USB_RXCSRH3_DMAMODW { w: self }
}
#[doc = "Bit 4 - PID Error"]
#[inline(always)]
pub fn usb_rxcsrh3_piderr(&mut self) -> _USB_RXCSRH3_PIDERRW {
_USB_RXCSRH3_PIDERRW { w: self }
}
#[doc = "Bit 5 - DMA Request Enable"]
#[inline(always)]
pub fn usb_rxcsrh3_dmaen(&mut self) -> _USB_RXCSRH3_DMAENW {
_USB_RXCSRH3_DMAENW { w: self }
}
#[doc = "Bit 6 - Auto Request"]
#[inline(always)]
pub fn usb_rxcsrh3_autorq(&mut self) -> _USB_RXCSRH3_AUTORQW {
_USB_RXCSRH3_AUTORQW { w: self }
}
#[doc = "Bit 7 - Auto Clear"]
#[inline(always)]
pub fn usb_rxcsrh3_autocl(&mut self) -> _USB_RXCSRH3_AUTOCLW {
_USB_RXCSRH3_AUTOCLW { w: self }
}
#[doc = "Bit 4 - Disable NYET"]
#[inline(always)]
pub fn usb_rxcsrh3_disnyet(&mut self) -> _USB_RXCSRH3_DISNYETW {
_USB_RXCSRH3_DISNYETW { w: self }
}
#[doc = "Bit 6 - Isochronous Transfers"]
#[inline(always)]
pub fn usb_rxcsrh3_iso(&mut self) -> _USB_RXCSRH3_ISOW {
_USB_RXCSRH3_ISOW { w: self }
}
}
|
use std::fmt;
use std::str;
use thiserror::Error;
use super::term::prelude::{Atom, Encoded, Term, TypedTerm};
// The largest value as an integer of a latin-1 ASCII character
const MAX_LATIN1_CHAR: u16 = 255;
/// Represents the original encoding of a binary
///
/// In the case of `Raw`, there is no specific encoding and
/// while it may be valid Latin-1 or UTF-8 bytes, it should be
/// treated as neither without validation.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Encoding {
Raw,
Latin1,
Utf8,
}
impl Encoding {
pub fn from_str(s: &str) -> Self {
if s.is_ascii() {
Self::Latin1
} else {
Self::Utf8
}
}
}
impl fmt::Display for Encoding {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Self::Raw => f.write_str("raw"),
Self::Latin1 => f.write_str("latin1"),
Self::Utf8 => f.write_str("utf8"),
}
}
}
// Support converting from atom terms to `Encoding` type
impl TryFrom<Term> for Encoding {
type Error = InvalidEncodingNameError;
fn try_from(term: Term) -> Result<Self, Self::Error> {
match term.decode() {
Ok(TypedTerm::Atom(a)) => a.try_into(),
_ => Err(InvalidEncodingNameError::InvalidType(term)),
}
}
}
// Support converting from atom terms to `Encoding` type
impl TryFrom<Atom> for Encoding {
type Error = InvalidEncodingNameError;
fn try_from(term: Atom) -> Result<Self, Self::Error> {
match term.name() {
"unicode" | "utf8" => Ok(Self::Utf8),
"latin1" => Ok(Self::Latin1),
name => Err(InvalidEncodingNameError::InvalidEncoding(name)),
}
}
}
/// Represents an error that occurs when converting an atom encoding name to an `Encoding`
#[derive(Error, Debug)]
pub enum InvalidEncodingNameError {
#[error("invalid encoding name value: `{0}` is not an atom")]
InvalidType(Term),
#[error("invalid atom encoding name: '{0}' is not one of the supported values (latin1, unicode, or utf8)")]
InvalidEncoding(&'static str),
}
/// Represents the direction encoding is performed
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum Direction {
ToBytes,
ToString,
}
/// Represents an error which occurs when converting a string to bytes
/// in a given encoding; or vice versa, decoding bytes to a string.
#[derive(Debug)]
pub struct InvalidEncodingError {
code: u16,
index: usize,
encoding: Encoding,
direction: Direction,
}
impl InvalidEncodingError {
fn new(code: u16, index: usize, encoding: Encoding, direction: Direction) -> Self {
Self {
code,
index,
encoding,
direction,
}
}
}
impl fmt::Display for InvalidEncodingError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let to_bytes = self.direction == Direction::ToBytes;
match self.encoding {
Encoding::Latin1 if to_bytes => write!(
f,
"cannot encode string as latin-1 bytes (character code = {}, index = {})",
self.code, self.index
),
Encoding::Utf8 => write!(
f,
"cannot decode bytes to UTF-8 string (character code = {}, index = {})",
self.code, self.index
),
// It is never possible to fail decoding a slice of u8 to latin-1,
// or encoding Rust strings as UTF-8 bytes
_ => unreachable!(),
}
}
}
/// Returns true if the given `str` is encodable as latin-1 bytes
pub fn is_latin1(s: &str) -> bool {
s.chars().all(|c| c as u16 <= MAX_LATIN1_CHAR)
}
/// Converts a Latin-1 encoded binary slice to a `String`
pub fn to_latin1_string(bytes: &[u8]) -> String {
bytes.iter().copied().map(|b| b as char).collect()
}
/// Converts a `str` to valid Latin-1 bytes, if composed of Latin-1 encodable characters
///
/// Returns `InvalidEncodingError` if this `str` is not encodable as Latin-1
pub fn to_latin1_bytes(s: &str) -> Result<Vec<u8>, InvalidEncodingError> {
let mut bytes = Vec::with_capacity(s.len());
for (index, c) in s.char_indices() {
let code = c as u16;
if code > MAX_LATIN1_CHAR {
return Err(InvalidEncodingError::new(
code,
index,
Encoding::Latin1,
Direction::ToBytes,
));
}
bytes.push(code as u8);
}
Ok(bytes)
}
/// Converts a UTF-8 encoded binary slice to a `str`
///
/// Returns `Ok(str)` if successful, otherwise `Err(InvalidEncodingError)`
pub fn as_utf8_str(bytes: &[u8]) -> Result<&str, InvalidEncodingError> {
str::from_utf8(bytes).map_err(|err| {
let index = err.valid_up_to();
let code = bytes[index] as u16;
InvalidEncodingError::new(code, index, Encoding::Utf8, Direction::ToString)
})
}
/// Converts a UTF-8 encoded binary slice to a `String`
///
/// Returns `Ok(String)` if successful, otherwise `Err(InvalidEncodingError)`
pub fn to_utf8_string(bytes: &[u8]) -> Result<String, InvalidEncodingError> {
String::from_utf8(bytes.to_vec()).map_err(|err| {
let index = err.utf8_error().valid_up_to();
let code = bytes[index] as u16;
InvalidEncodingError::new(code, index, Encoding::Utf8, Direction::ToString)
})
}
|
pub mod add;
pub mod sub;
pub fn version() {
println!("math v0.0.1")
}
|
export t;
export create;
export union;
export intersect;
export copy;
export clone;
export get;
export equal;
export clear;
export set_all;
export invert;
export difference;
export set;
export is_true;
export is_false;
export to_vec;
export to_str;
export eq_vec;
// FIXME: With recursive object types, we could implement binary methods like
// union, intersection, and difference. At that point, we could write
// an optimizing version of this module that produces a different obj
// for the case where nbits <= 32.
type t = @{storage: uint[mutable ], nbits: uint};
// FIXME: this should be a constant once they work
fn uint_bits() -> uint { ret 32u + (1u << 32u >> 27u); }
fn create(nbits: uint, init: bool) -> t {
let elt = if init { !0u } else { 0u };
let storage = ivec::init_elt_mut[uint](elt, nbits / uint_bits() + 1u);
ret @{storage: storage, nbits: nbits};
}
fn process(op: &fn(uint, uint) -> uint , v0: &t, v1: &t) -> bool {
let len = ivec::len(v1.storage);
assert (ivec::len(v0.storage) == len);
assert (v0.nbits == v1.nbits);
let changed = false;
for each i: uint in uint::range(0u, len) {
let w0 = v0.storage.(i);
let w1 = v1.storage.(i);
let w = op(w0, w1);
if w0 != w { changed = true; v0.storage.(i) = w; }
}
ret changed;
}
fn lor(w0: uint, w1: uint) -> uint { ret w0 | w1; }
fn union(v0: &t, v1: &t) -> bool { let sub = lor; ret process(sub, v0, v1); }
fn land(w0: uint, w1: uint) -> uint { ret w0 & w1; }
fn intersect(v0: &t, v1: &t) -> bool {
let sub = land;
ret process(sub, v0, v1);
}
fn right(w0: uint, w1: uint) -> uint { ret w1; }
fn copy(v0: &t, v1: t) -> bool { let sub = right; ret process(sub, v0, v1); }
fn clone(v: t) -> t {
let storage = ivec::init_elt_mut[uint](0u, v.nbits / uint_bits() + 1u);
let len = ivec::len(v.storage);
for each i: uint in uint::range(0u, len) { storage.(i) = v.storage.(i); }
ret @{storage: storage, nbits: v.nbits};
}
fn get(v: &t, i: uint) -> bool {
assert (i < v.nbits);
let bits = uint_bits();
let w = i / bits;
let b = i % bits;
let x = 1u & v.storage.(w) >> b;
ret x == 1u;
}
fn equal(v0: &t, v1: &t) -> bool {
// FIXME: when we can break or return from inside an iterator loop,
// we can eliminate this painful while-loop
let len = ivec::len(v1.storage);
let i = 0u;
while i < len {
if v0.storage.(i) != v1.storage.(i) { ret false; }
i = i + 1u;
}
ret true;
}
fn clear(v: &t) {
for each i: uint in uint::range(0u, ivec::len(v.storage)) {
v.storage.(i) = 0u;
}
}
fn set_all(v: &t) {
for each i: uint in uint::range(0u, v.nbits) { set(v, i, true); }
}
fn invert(v: &t) {
for each i: uint in uint::range(0u, ivec::len(v.storage)) {
v.storage.(i) = !v.storage.(i);
}
}
/* v0 = v0 - v1 */
fn difference(v0: &t, v1: &t) -> bool {
invert(v1);
let b = intersect(v0, v1);
invert(v1);
ret b;
}
fn set(v: &t, i: uint, x: bool) {
assert (i < v.nbits);
let bits = uint_bits();
let w = i / bits;
let b = i % bits;
let flag = 1u << b;
v.storage.(w) =
if x { v.storage.(w) | flag } else { v.storage.(w) & !flag };
}
/* true if all bits are 1 */
fn is_true(v: &t) -> bool {
for i: uint in to_ivec(v) { if i != 1u { ret false; } }
ret true;
}
/* true if all bits are non-1 */
fn is_false(v: &t) -> bool {
for i: uint in to_ivec(v) { if i == 1u { ret false; } }
ret true;
}
fn init_to_vec(v: t, i: uint) -> uint { ret if get(v, i) { 1u } else { 0u }; }
fn to_vec(v: &t) -> vec[uint] {
let sub = bind init_to_vec(v, _);
ret vec::init_fn[uint](sub, v.nbits);
}
fn to_ivec(v: &t) -> uint[] {
let sub = bind init_to_vec(v, _);
ret ivec::init_fn[uint](sub, v.nbits);
}
fn to_str(v: &t) -> str {
let rs = "";
for i: uint in bitv::to_vec(v) {
if i == 1u { rs += "1"; } else { rs += "0"; }
}
ret rs;
}
// FIXME: can we just use structural equality on to_vec?
fn eq_vec(v0: &t, v1: &vec[uint]) -> bool {
assert (v0.nbits == vec::len[uint](v1));
let len = v0.nbits;
let i = 0u;
while i < len {
let w0 = get(v0, i);
let w1 = v1.(i);
if !w0 && w1 != 0u || w0 && w1 == 0u { ret false; }
i = i + 1u;
}
ret true;
}
fn eq_ivec(v0: &t, v1: &uint[]) -> bool {
assert (v0.nbits == ivec::len[uint](v1));
let len = v0.nbits;
let i = 0u;
while i < len {
let w0 = get(v0, i);
let w1 = v1.(i);
if !w0 && w1 != 0u || w0 && w1 == 0u { ret false; }
i = i + 1u;
}
ret true;
}
//
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// compile-command: "make -k -C $RBUILD 2>&1 | sed -e 's/\\/x\\//x:\\//g'";
// End:
//
|
extern crate hound;
extern crate num;
extern crate rustfft;
use std::f32::consts::PI;
use hound::{SampleFormat, WavReader, WavSamples, WavSpec, WavWriter};
use num::complex::Complex;
use rustfft::FFTplanner;
trait Signal {
fn energy(self) -> f64;
}
impl<'a, R> Signal for WavSamples<'a, R, i16>
where
R: std::io::Read,
{
fn energy(self) -> f64 {
self.map(|x| {
let sample = x.unwrap() as f64;
sample * sample
}).sum()
}
}
fn generate_sine(filename: &str, frequency: f32, duration: u32) {
let header = WavSpec {
channels: 1,
sample_rate: 44100,
bits_per_sample: 16,
sample_format: SampleFormat::Int,
};
let mut writer = WavWriter::create(filename, header).expect("Failed to created WAV writer");
let num_samples = duration * header.sample_rate;
let signal_amplitude = 16384f32;
for n in 0..num_samples {
let t: f32 = n as f32 / header.sample_rate as f32;
let x = signal_amplitude * (t * frequency * 2.0 * PI).sin();
writer.write_sample(x as i16).unwrap();
}
}
fn find_spectral_peak(filename: &str) -> Option<f32> {
let mut reader = WavReader::open(filename).expect("Failed to open WAV file");
let num_samples = reader.len() as usize;
let mut planner = FFTplanner::new(false);
let fft = planner.plan_fft(num_samples);
let mut signal = reader
.samples::<i16>()
.map(|x| Complex::new(x.unwrap() as f32, 0f32))
.collect::<Vec<_>>();
let mut spectrum = signal.clone();
fft.process(&mut signal[..], &mut spectrum[..]);
let max_peak = spectrum
.iter()
.take(num_samples / 2)
.enumerate()
.max_by_key(|&(_, freq)| freq.norm() as u32);
if let Some((i, _)) = max_peak {
let bin = 44100f32 / num_samples as f32;
Some(i as f32 * bin)
} else {
None
}
}
fn main() {
println!("24 Days of Rust vol. 2 - hound");
generate_sine("test.wav", 1000f32, 5);
let mut reader = WavReader::open("test.wav").expect("Failed to open WAV file");
let samples = reader.samples::<i16>();
println!("Signal energy: {}", samples.energy());
if let Some(peak) = find_spectral_peak("test.wav") {
println!("Max frequency: {} Hz", peak);
}
}
|
pub use std::{
fmt::{self, Display, Formatter},
process::Command,
};
pub use structopt::StructOpt;
pub use crate::{mode::Mode, opt::Opt};
|
use std::collections::HashMap;
use llvm::*;
use llvm::Attribute::*;
use parser::*;
use parser;
use llvm::Function;
pub fn generate_expression<'a, 'b>(node: &'b Expr,
values: &'a HashMap<&String, &'a Arg>,
builder: &'a CSemiBox<'a, Builder>,
module: &'a CSemiBox<'a, Module>,
context: &'a CBox<Context>) -> Result<&'a Value, String> {
match *node {
Expr::Number(n) => Ok(n.compile(&context)),
Expr::Variable(ref v) => Ok(values.get(v).ok_or(
format!("There is no variable named {}", v))?
),
Expr::Binary {op, ref lhs, ref rhs} => {
let l = generate_expression(&*lhs, &values, &builder, &module, &context)?;
let r = generate_expression(&*rhs, &values, &builder, &module, &context)?;
match op {
'+' => Ok(builder.build_add(&l, &r)),
'-' => Ok(builder.build_sub(&l, &r)),
'*' => Ok(builder.build_mul(&l, &r)),
'<' => {
let comp = builder.build_cmp(&l, &r, Predicate::LessThan);
let res = builder.build_bit_cast(&comp, &Type::get::<f64>(&context));
Ok(res)
}
_ => return Err(format!("{} is an invalid operator!", op))
}
},
Expr::Call {ref name, ref args} => {
let func = module.get_function(name).ok_or(format!("There is no function named {}!", name))?;
let passed_args = args.len();
let expected_args = func.get_signature().num_params();
if expected_args != passed_args {
return Err(format!("{} takes {} args, but you passed {}!", name, expected_args, passed_args))
}
let mut passed = Vec::new();
for arg in args {
passed.push(generate_expression(&arg, &values, &builder, &module, &context)?)
}
Ok(builder.build_call(&func, &passed))
}
}
}
pub fn generate_prototype<'a>(prototype: &Prototype,
module: &'a CSemiBox<'a, Module>,
context: &'a CBox<Context>) -> Result<&'a Function, String> {
let arg_types = vec![Type::get::<f64>(&context); prototype.args.len()];
let sig = FunctionType::new(Type::get::<f64>(&context), &arg_types);
let func = module.add_function(&prototype.name, sig);
for arg_index in 0..prototype.args.len() {
&func[arg_index].set_name(&prototype.args[arg_index]);
}
Ok(func)
}
pub fn generate_function<'a>(function_ast: &parser::Function,
builder: &'a CSemiBox<'a, Builder>,
module: &'a CSemiBox<'a, Module>,
context: &'a CBox<Context>) -> Result<&'a Function, String> {
let mut func = module.get_function(&function_ast.prototype.name);
let func = if func.is_none() {
generate_prototype(&function_ast.prototype, &module, &context)?
} else {
func.unwrap()
};
let block = func.append("entry");
builder.position_at_end(block);
let mut values = HashMap::new();
for (i, name) in function_ast.prototype.args.iter().enumerate() {
values.insert(name, &func[i]);
}
let ret = generate_expression(&function_ast.body, &values,
&builder, &module, &context)?;
builder.build_ret(ret);
module.verify().unwrap();
Ok(func)
}
#[cfg(test)]
mod tests {
use super::*;
use parser;
#[test]
fn test_codegen() {
let mut parser = parser::Parser::from_source("def foo(a) a + a");
let ast = parser.parse_definition().unwrap();
let ctx = Context::new();
let builder = Builder::new(&ctx);
let module = Module::new("test", &ctx);
let func = generate_function(&ast, &builder, &module, &ctx).unwrap();
module.write_bitcode("test.bitcode").unwrap();
}
#[test]
fn test_toplevel_codegen() {
let mut parser = parser::Parser::from_source("1 + 1");
let ast = parser.parse_top_level_expr().unwrap();
let ctx = Context::new();
let builder = Builder::new(&ctx);
let module = Module::new("test", &ctx);
let func = generate_function(&ast, &builder, &module, &ctx).unwrap();
module.write_bitcode("test.bitcode").unwrap();
}
}
|
//! This crate contains the API for the protocol of the analytics server. `Event` is the type which is sent to the
//! analytics server. Here are some examples of `Event` structures using different `Message` variants:
//!
//! All Channels: {"received_time":"2017-05-01T21:21:01.070462025Z","serviced_time":"2017-05-01T21:21:01.070464560Z","success":true,"message":{"AllChannels":{"num_channels":5}}}
//!
//! Create Channel: {"received_time":"2017-05-01T21:21:01.070521981Z","serviced_time":"2017-05-01T21:21:01.070522531Z","success":true,"message":{"CreateChannel":{"channel":"boo!"}}}
//!
//! Get Channel: {"received_time":"2017-05-01T21:21:01.070554580Z","serviced_time":"2017-05-01T21:21:01.070555092Z","success":true,"message":{"GetChannel":{"channel":"boo!","number_served":42}}}
//!
//! Delete Channel: {"received_time":"2017-05-01T21:21:01.070586852Z","serviced_time":"2017-05-01T21:21:01.070587314Z","success":true,"message":{"DelChannel":{"channel":"wah!"}}}
//!
//! Send Message: {"received_time":"2017-05-01T21:21:01.070615964Z","serviced_time":"2017-05-01T21:21:01.070616427Z","success":true,"message":{"SendMessage":{"channel":"boo!","message":"woah, you scared me!"}}}
//!
//! Get Message: {"received_time":"2017-05-01T21:21:01.070649678Z","serviced_time":"2017-05-01T21:21:01.070650200Z","success":true,"message":"GetMessage"}
//!
//! Update Message: {"received_time":"2017-05-01T21:21:01.070674641Z","serviced_time":"2017-05-01T21:21:01.070675175Z","success":true,"message":{"UpdateMessage":{"channel":"boo!","old_message":"woah, you scared me!","new_message":"woah, the channel name scared me!"}}}
//!
//! Delete Message: {"received_time":"2017-05-01T21:21:01.070721230Z","serviced_time":"2017-05-01T21:21:01.070721680Z","success":true,"message":{"DeleteMessage":{"channel":"boo!","message":"woah, the channel name scared me!"}}}
use chrono::{UTC, DateTime};
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Ord, PartialOrd, Hash)]
pub enum Message {
/// All Channels: {"received_time":"2017-05-01T21:21:01.070462025Z","serviced_time":"2017-05-01T21:21:01.070464560Z","success":true,"message":{"AllChannels":{"num_channels":5}}}
AllChannels {
num_channels: usize,
},
/// Create Channel: {"received_time":"2017-05-01T21:21:01.070521981Z","serviced_time":"2017-05-01T21:21:01.070522531Z","success":true,"message":{"CreateChannel":{"channel":"boo!"}}}
CreateChannel {
channel: String,
},
/// Get Channel: {"received_time":"2017-05-01T21:21:01.070554580Z","serviced_time":"2017-05-01T21:21:01.070555092Z","success":true,"message":{"GetChannel":{"channel":"boo!","number_served":42}}}
GetChannel {
channel: String,
number_served: usize,
},
/// Delete Channel: {"received_time":"2017-05-01T21:21:01.070586852Z","serviced_time":"2017-05-01T21:21:01.070587314Z","success":true,"message":{"DelChannel":{"channel":"wah!"}}}
DelChannel {
channel: String,
},
/// Send Message: {"received_time":"2017-05-01T21:21:01.070615964Z","serviced_time":"2017-05-01T21:21:01.070616427Z","success":true,"message":{"SendMessage":{"channel":"boo!","message":"woah, you scared me!"}}}
SendMessage {
channel: String,
message: String,
},
/// Get Message: {"received_time":"2017-05-01T21:21:01.070649678Z","serviced_time":"2017-05-01T21:21:01.070650200Z","success":true,"message":"GetMessage"}
GetMessage,
/// Update Message: {"received_time":"2017-05-01T21:21:01.070674641Z","serviced_time":"2017-05-01T21:21:01.070675175Z","success":true,"message":{"UpdateMessage":{"channel":"boo!","old_message":"woah, you scared me!","new_message":"woah, the channel name scared me!"}}}
UpdateMessage {
channel: String,
old_message: String,
new_message: String,
},
/// Delete Message: {"received_time":"2017-05-01T21:21:01.070721230Z","serviced_time":"2017-05-01T21:21:01.070721680Z","success":true,"message":{"DeleteMessage":{"channel":"boo!","message":"woah, the channel name scared me!"}}}
DeleteMessage {
channel: String,
message: String,
},
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Ord, PartialOrd, Hash)]
pub struct Event {
pub received_time: DateTime<UTC>,
pub serviced_time: DateTime<UTC>,
pub success: bool,
pub message: Message,
} |
//! Max object notifications and infrastructure.
use crate::symbol::SymbolRef;
use core::ffi::c_void;
/// A type that encapsulates the Max notify method signature, can be cast to
/// `MaxMethod` and supplied as the `"notify"` class method for a class.
pub type NotifyMethod<T> = unsafe extern "C" fn(
x: *mut T,
sender_name: *mut max_sys::t_symbol,
message: *mut max_sys::t_symbol,
sender: *mut c_void,
data: *mut c_void,
);
/// Encapsulated notification data.
pub struct Notification {
sender_name: SymbolRef,
message: SymbolRef,
sender: *mut c_void,
data: *mut c_void,
}
/// A max object registration object.
pub struct Registration {
inner: *mut core::ffi::c_void,
}
/// A max object subscription object.
pub struct Subscription {
namespace: SymbolRef,
name: SymbolRef,
client: *mut core::ffi::c_void,
class_name: SymbolRef,
}
/// A max object notification attachment.
pub struct Attachment {
namespace: SymbolRef,
name: SymbolRef,
client: *mut core::ffi::c_void,
}
/// Errors registering.
pub enum RegistrationError {
NameCollision,
}
/// Errors attaching.
pub enum AttachmentError {
NotFound,
}
impl Notification {
pub fn new(
sender_name: *mut max_sys::t_symbol,
message: *mut max_sys::t_symbol,
sender: *mut c_void,
data: *mut c_void,
) -> Self {
Self {
sender_name: sender_name.into(),
message: message.into(),
sender,
data,
}
}
/// Get the notification message.
pub fn message(&self) -> &SymbolRef {
&self.message
}
/// Get a pointer to the sender of the notification.
pub fn sender(&self) -> *mut c_void {
self.sender
}
/// Get the name of the sender of the notification.
pub fn sender_name(&self) -> &SymbolRef {
&self.sender_name
}
/// Get the data from the notification.
///
/// # Remarks
/// * Might be null.
pub fn data(&self) -> *mut c_void {
self.data
}
}
impl Registration {
/// Try go register the given object with the namespace and name.
pub unsafe fn try_register(
obj: *mut max_sys::t_object,
namespace: SymbolRef,
name: SymbolRef,
) -> Result<Registration, RegistrationError> {
if max_sys::object_findregistered(namespace.inner(), name.inner()).is_null() {
let inner = max_sys::object_register(namespace.inner(), name.inner(), obj as _);
assert!(!inner.is_null());
Ok(Self { inner })
} else {
Err(RegistrationError::NameCollision)
}
}
}
impl Subscription {
/// Subscribe the given client to be attached to the namespace, name and optional class_name.
pub unsafe fn new(
client: *mut max_sys::t_object,
namespace: SymbolRef,
name: SymbolRef,
class_name: Option<SymbolRef>,
) -> Self {
let client: *mut core::ffi::c_void = client as _;
let class_name = class_name.unwrap_or_default();
let _ =
max_sys::object_subscribe(namespace.inner(), name.inner(), class_name.inner(), client);
Self {
namespace,
name,
client,
class_name,
}
}
}
impl Attachment {
/// Try to attach the given client to the namespace and name.
pub unsafe fn try_attach(
client: *mut max_sys::t_object,
namespace: SymbolRef,
name: SymbolRef,
) -> Result<Self, AttachmentError> {
let p = max_sys::object_attach(namespace.inner(), name.inner(), client as _);
if p.is_null() {
Err(AttachmentError::NotFound)
} else {
Ok(Attachment {
namespace,
name,
client: client as _,
})
}
}
}
impl Drop for Registration {
fn drop(&mut self) {
unsafe {
let _ = max_sys::object_unregister(self.inner);
}
}
}
impl Drop for Subscription {
fn drop(&mut self) {
unsafe {
let _ = max_sys::object_unsubscribe(
self.namespace.inner(),
self.name.inner(),
self.class_name.inner(),
self.client,
);
}
}
}
impl Drop for Attachment {
fn drop(&mut self) {
unsafe {
let _ =
max_sys::object_detach(self.namespace.inner(), self.name.inner(), self.client as _);
}
}
}
unsafe impl Send for Registration {}
unsafe impl Send for Subscription {}
unsafe impl Send for Attachment {}
|
#![recursion_limit = "512"]
mod todo;
use vgtk::ext::*;
use vgtk::lib::gio::{ActionExt, ApplicationFlags, SimpleAction};
use vgtk::lib::gtk::*;
use vgtk::{gtk, gtk_if, run, Component, UpdateAction, VNode};
use crate::todo::about::AboutDialog;
use crate::todo::filter::Filter;
use crate::todo::menu::AppMenu;
use crate::todo::model::{Model, Task, TaskFilter};
use crate::todo::task_row::TaskRow;
#[derive(Clone, Debug)]
pub enum Message {
Exit,
About,
Toggle { index: usize },
Add { task: String },
Delete { index: usize },
Filter { filter: TaskFilter },
Cleanup,
}
impl Component for Model {
type Message = Message;
type Properties = ();
fn update(&mut self, msg: Self::Message) -> UpdateAction<Self> {
match msg {
Message::Exit => {
vgtk::quit();
UpdateAction::None
}
Message::Toggle { index } => {
self.tasks[index].done = !self.tasks[index].done;
UpdateAction::Render
}
Message::Add { task } => {
self.tasks.push(Task::new(task, false));
UpdateAction::Render
}
Message::Delete { index } => {
self.tasks.remove(index);
UpdateAction::Render
}
Message::Filter { filter } => {
self.filter = filter;
UpdateAction::Render
}
Message::Cleanup => {
self.tasks.retain(|task| !task.done);
UpdateAction::Render
}
Message::About => {
AboutDialog::run();
UpdateAction::None
}
}
}
fn view(&self) -> VNode<Model> {
gtk! {
<Application::new_unwrap(Some("org.ville.vgtk-todomvc"), ApplicationFlags::empty())>
<SimpleAction::new("quit", None)
Application::accels=["<Meta>q"].as_ref()
enabled=true
on activate=|_,_| Message::Exit />
<SimpleAction::new("about", None)
enabled=true
on activate=|_,_| Message::About />
<Window
default_width=800
default_height=600
border_width=20
on destroy=|_| Message::Exit title="Hello rust">
<@AppMenu />
<Box orientation=Orientation::Vertical spacing=18>
<Entry placeholder_text="What needs to be done?"
on activate=|entry| {
entry.select_region(0, -1);
Message::Add {
task: entry.get_text().unwrap().to_string()
}
}/>
<ScrolledWindow Box::fill=true Box::expand=true>
<ListBox selection_mode=SelectionMode::None>
{
self.tasks.iter()
.filter(|task| self.filter_task(task))
.enumerate()
.map(|(index, task)| gtk! {
<@TaskRow task=task
on changed=|_| Message::Toggle { index }
on deleted=|_| Message::Delete { index }
/>
})
}
</ListBox>
</ScrolledWindow>
<Box>
<Label label=self.items_left() />
<@Filter
Box::center_widget=true
active=&self.filter
filters=[("All", TaskFilter::All), ("Active", TaskFilter::Undone), ("Completed", TaskFilter::Done)].as_ref()
on changed=|filter| Message::Filter { filter } />
{
gtk_if!(self.count_completed() > 0 => {
<Button
label="Clear completed"
Box::pack_type=PackType::End
on clicked=|_| Message::Cleanup />
})
}
</Box>
</Box>
</Window>
</Application>
}
}
}
fn main() {
pretty_env_logger::init();
std::process::exit(run::<Model>());
}
|
use crate::PyObjectRef;
pub trait IntoObject
where
Self: Into<PyObjectRef>,
{
fn into_object(self) -> PyObjectRef {
self.into()
}
}
impl<T> IntoObject for T where T: Into<PyObjectRef> {}
|
use std::{usize, vec::Vec};
use std::any::Any;
use statrs::distribution::Beta;
use rand::random;
use rand::distributions::Distribution;
use rand::rngs::StdRng;
use rand::{Rng, SeedableRng, thread_rng};
trait Replayable {
fn as_any(&self) -> &Any;
fn initialize(&mut self);
fn play(&mut self) -> f64;
fn profile(&self) -> f64;
}
#[derive(Debug, Clone)]
struct BinarySlot {
prob: f64,
rng: StdRng,
seed: u64,
}
impl Replayable for BinarySlot {
fn as_any(&self) -> &Any {
self
}
fn initialize(&mut self) {
self.rng = StdRng::seed_from_u64(self.seed)
}
fn play(&mut self) -> f64 {
(self.rng.gen::<f64>() < self.prob) as u32 as f64
}
fn profile(&self) -> f64 {
self.prob
}
}
impl BinarySlot {
fn new() -> BinarySlot {
let seed: u64 = random();
BinarySlot {
prob: random(),
rng: StdRng::seed_from_u64(seed),
seed: seed
}
}
}
trait Factory<T: Replayable> {
fn gen(&mut self) -> T;
}
trait Storage<T: Replayable + Clone + 'static> {
fn hist(&mut self) -> &Vec<Box<dyn Replayable + Send>>;
fn nth(&mut self, n: usize) -> Option<T> {
if let Some(val) = self.hist().get(n) {
Some(val.as_any().downcast_ref::<T>().unwrap().clone());
}
None
}
}
struct SlotRepository {
storage: Vec<Box<dyn Replayable + Send>>
}
impl SlotRepository {
fn new() -> SlotRepository {
SlotRepository{ storage: vec![] }
}
}
impl Factory<BinarySlot> for SlotRepository {
fn gen(&mut self) -> BinarySlot {
self.storage.push(Box::new(BinarySlot::new()));
self.storage.last_mut().unwrap().as_any().downcast_ref::<BinarySlot>().unwrap().clone()
}
}
impl Storage<BinarySlot> for SlotRepository {
fn hist(&mut self) -> &Vec<Box<dyn Replayable + Send>> {
&mut self.storage
}
fn nth(&mut self, n: usize) -> Option<BinarySlot> {
if let Some(val) = self.storage.get(n) {
Some(val.as_any().downcast_ref::<BinarySlot>().unwrap().clone());
}
None
}
}
struct SlotMachine {
slot: Box<dyn Replayable + Send>,
rewards: Vec<f64>,
repository: SlotRepository,
}
impl SlotMachine {
fn new() -> SlotMachine {
let mut repository = SlotRepository::new();
let slot = repository.gen();
SlotMachine {
slot: Box::new(slot),
rewards: vec![],
repository
}
}
fn play(&mut self) -> f64 {
let v = self.slot.play();
self.rewards.push(v);
v
}
fn set(&mut self, slot: Box<dyn Replayable + Send>) {
self.slot = slot;
}
fn reset(&mut self) {
self.slot.initialize()
}
fn set_nth(&mut self, index: usize) -> Result<(), String> {
let new = self.repository.nth(index);
match new {
Some(slot) => {
self.set(Box::new(slot));
Ok(())
}
None => Err("Index out of range".to_string())
}
}
}
enum State {
End,
Playing { cnt: usize, max: Option<usize>},
}
impl State {
fn new(n_games: Option<usize>) -> State {
match n_games {
Some(n @ 1..=usize::MAX) => State::Playing { cnt: 0, max: Some(n) },
Some(_) => State::End,
None => State::Playing { cnt: 0, max: None }
}
}
fn play(&self) -> State {
match *self {
State::Playing {cnt, max: Some(max)} => {
if cnt < max {
State::Playing { cnt: cnt + 1, max: Some(max)}
} else { State::End }
}
State::Playing { cnt, max: None } => State::Playing { cnt, max: None },
_ => panic!()
}
}
}
pub struct Game {
slot_machines: Vec<SlotMachine>,
state: State,
scores: Vec<f64>
}
impl Game {
pub fn new(n_machines: usize) -> Game {
let mut slot_machines = Vec::with_capacity(n_machines);
for _ in 0..n_machines {
slot_machines.push(SlotMachine::new())
}
Game {
slot_machines,
state: State::End,
scores: Vec::new(),
}
}
pub fn start(&mut self, n_games: Option<usize>) {
self.state = State::new(n_games);
self.slot_machines.iter_mut().map(|x| x.reset()).collect::<Vec<()>>();
}
pub fn play(&mut self, index: usize) -> Result<f64, String> {
let max_index = self.slot_machines.len();
if let State::End = self.state {
return Err("A game is not started. Please start a game.".to_string())
}
match self.slot_machines.get_mut(index) {
Some(slot) => {
let state = self.state.play();
let reword = slot.play();
self.scores.push(reword);
Ok(reword)
}
None => Err("Index out of range.".to_string())
}
}
pub fn score(&self) -> f64 {
self.scores.iter().sum()
}
pub fn play_count(&self) -> usize {
self.scores.len()
}
pub fn profiles(&self) -> Vec<f64> {
self.slot_machines.iter().map(|x| x.slot.profile())
.collect::<Vec<f64>>()
}
}
|
use criterion::{black_box, criterion_group, criterion_main, Criterion};
use parser::lexer::Lexer;
use parser::parser::Parser;
use parser::resolver::Resolver;
use parser::types::Pass;
const PROGRAM: &str = "print \"HOLA\" and \"CHAU\";
print nil and \"HOLA\";
print \"HOLA\" and false;
print false and \"HOLA\";
print nil and nil;
fun thrice(fn) {
for (var i = 1; i <= 3; i = i + 1) {
fn(i);
}
}
thrice(fun (a) {
print a;
});
var array1 = [0; 3];
var array2 = [0, \"hola\", true];
print array1;
print array2;
print array1[0];
print array2[2];
array1[0] = 2;
print array1[0];
print \"BEFORE\";
{
print \"ON THE BLOCK\";
{
print \"ASDASD\";
}
var a = nil;
print a;
print a=3;
print a+2;
}
print \"AFTER\";
var a = \"global a\";
var b = \"global b\";
var c = \"global c\";
{
var a = \"outer a\";
var b = \"outer b\";
{
var a = \"inner a\";
print a;
print b;
print c;
}
print a;
print b;
print c;
}
print a;
print b;
print c;
var i = 0;
print \"CACA\";
while (i < 5) {
print \"HOLA\";
break;
i = i + 1;
}
print \"CHAU\";
for (var i = 0; i < 5; i = i + 1) {
print \"HOLA0\";
break;
}
print \"CHAU0\";
for (var i = 0; i < 5; i = i + 1) {
print \"HOLA1\";
if (i == 1) break;
print \"HOLA1.1\";
}
print \"CHAU1\";
for (var i = 0; i < 5; i = i + 1) {
print \"HOLA2\";
for (var i = 0; i < 5; i = i + 1) {
print \"INNER HOLA2\";
break;
}
print \"AFTER INNER HOLA2\";
break;
print \"DO NOT SEE2\";
}
print \"CHAU2\";
for (var i = 0; i < 5; i = i + 1) {
print \"HOLA3\";
for (var i = 0; i < 5; i = i + 1) {
print \"INNER HOLA3\";
if (i == 1) break;
print \"SEE ONCE3\";
}
print \"AFTER INNER HOLA3\";
break;
print \"DO NOT SEE3\";
}
print \"CHAU3\";
class DevonshireCream {
serveOn() {
return \"Scones\";
}
}
print DevonshireCream;
print DevonshireCream();
fun makeCounter() {
var i = 0;
fun count() {
i = i + 1;
print i;
}
return count;
}
var counter = makeCounter();
counter();
counter();
class Test {
init(a) {
print a;
}
}
Test(\"hola\");
print a;
print a=2;
print a+4;
print \"HOLA\";
var a1;
a1 = 1;
for (var i = 0; i < 5; i = i + 1)
print \"HOLA MUNDO\";
fun test () {
print \"hola\";
}
fun test1(num, num1) {
print num + num1;
}
test1(1, 2);
test();
if (true)
print \"HOLA\";
else
print \"CHAU\";
if (false)
print \"HOLA\";
else
print \"CHAU\";
if (true)
if (false)
print \"HOLA\";
else
print \"CHAU\";
import module;
module::test();
trait MyTrait {
mytraitmethod();
}
class Impl < module::TestClass {
}
trait MyTrait for module::TestClass {
mytraitmethod() {
print \"my trait method\";
}
}
var m = module::TestClass();
m.testmethod();
m.mytraitmethod();
trait module::TestTrait for Impl {
echo(a) {
print a;
}
}
var im = Impl();
im.echo(\"pepe\");
im.testmethod();
print module::variable;
fun testfunction() {
}
trait Trait1 {
}
trait Trait2 {
}
class Class1 {
}
class Class2 {
}
class Class3 < Class2 {
}
trait Trait1 for Class1 {
}
trait Trait2 for Class2 {
}
print nil istype Nil;
print nil istype Integer;
print true istype Boolean;
print true istype Integer;
print 1 istype Integer;
print 1 istype Float;
print 1.0 istype Float;
print 1.0 istype Integer;
print module istype Module;
print module istype Integer;
print \"\" istype String;
print \"\" istype Integer;
print [] istype Array;
print [] istype String;
print testfunction istype Function;
print testfunction istype Module;
print Trait1 istype Trait;
print Trait1 istype Class;
print Class1 istype Class;
print Class1 istype Trait;
print Class1() istype Class1;
print Class1() istype Class2;
print Class1() istype Trait1;
print Class2() istype Trait1;
print Class3() istype Class2;
print Class3() istype Class1;
print Class3() istype Trait2;
class Class11 {
}
trait Trait11 {
}
trait Trait11 for Class11 {
}
fun withmatch(v) {
match v {
1 => {
print \"uno\";
},
2 => {
print \"dos\";
},
nil => {
print \"nulo\";
},
true => {
print \"verdadero\";
},
* => {
print v;
},
}
}
fun withtypematch(t) {
match t {
Integer => {
print \"integer\";
},
Boolean => {
print \"boolean\";
},
String => {
print \"String\";
},
Trait1 => {
print \"Trait1\";
},
* => {
print v;
},
}
}
withmatch(1);
withmatch(2);
withmatch(nil);
withmatch(true);
withmatch(\"hola\");
withtypematch(1);
withtypematch(true);
withtypematch(\"\");
withtypematch(Class1());
print 1 + 1;
print 1.2 + 1.2;
print \"HOLA\" or \"CHAU\";
print nil or \"HOLA\";
print \"HOLA\" or false;
print false or \"HOLA\";
print nil or nil;
class DevonshireCream1 {
serveOn() {
return \"Scones\";
}
echo(a) {
return a;
}
}
var cc = DevonshireCream1();
cc.test = 1;
print cc.test;
cc.testFunction = fun () {
print \"Scones!\";
};
cc.testFunction();
print cc.serveOn();
print cc.echo(\"hola\");
var a4 = 1;
var a5 = \"global\";
{
fun showA() {
print a5;
}
showA();
var a5 = \"block\";
showA();
print a5;
}
fun testf(a, b) {
return a + b;
}
print testf(1,2);
class Test2 {
setter prop(a) {
this.secret = a;
}
getter prop() {
return this.secret;
}
}
var t = Test2();
t.prop = \"HOLA\";
print t.prop;
print \"HOLA\";
class Math {
class square(n) {
return n * n;
}
}
print Math.square(3);
class Doughnut1 {
cook() {
print \"Fry until golden brown.\";
}
}
class BostonCream < Doughnut1 {
}
BostonCream().cook();
class Doughnut2 {
cook() {
print \"Fry until golden brown.\";
}
}
class BostonCream2 < Doughnut2 {
cook() {
this.super.cook();
print \"Pipe full of custard and coat with chocolate.\";
}
}
BostonCream2().cook();
class Cake {
taste() {
var adjective = \"delicious\";
print \"The \" + this.flavor + \" cake is \" + adjective + \"!\";
}
getCallback() {
fun localFunction() {
print this.flavor;
}
return localFunction;
}
}
var cake = Cake();
cake.flavor = \"German chocolate\";
cake.taste();
var cb = cake.getCallback();
cb();
trait Test11 {
test(a);
class staticmethod();
getter prop();
setter prop(a);
}
class TestImpl {
pepe() {
}
}
trait Test11 for TestImpl {
test(a) {
print a;
}
class staticmethod() {
print \"static\";
}
getter prop() {
print this.a;
}
setter prop(a) {
this.a = a;
}
}
var obj = TestImpl();
obj.test(\"method\");
TestImpl.staticmethod();
obj.prop = \"apa\";
print obj.prop;
var counter1 = 0;
while (counter1 < 5) {
print \"HOLA\";
counter1 = counter1 + 1;
}
print \"CHAU\";";
fn lex_and_parse(extra: usize) {
let program = format!("{} + {};", extra, extra) + PROGRAM;
let mut lexer = Lexer::new(program.as_str(), "stdin");
let ss = lexer
.parse()
.and_then(|ts| {
let parser = Parser::new(ts.into_iter().peekable());
parser.parse()
}).unwrap();
let mut resolver = Resolver::new();
let _ = resolver.run(&ss).unwrap();
}
fn criterion_benchmark(c: &mut Criterion) {
c.bench_function("simple parsing", |b| {
b.iter(|| lex_and_parse(black_box(4)))
});
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
|
use quicksilver::{
graphics::{Font, Image},
Error, Future,
};
pub struct Images {
pub empty_mino: Image,
pub i_mino: Image,
pub o_mino: Image,
pub j_mino: Image,
pub l_mino: Image,
pub s_mino: Image,
pub z_mino: Image,
pub t_mino: Image,
}
pub struct Resources {
pub font: Font,
pub images: Images,
}
pub type ResourceFuture = dyn Future<Item = Resources, Error = Error>;
pub fn load_resources() -> impl Future<Item = Resources, Error = Error> {
Font::load("Roboto-Medium.ttf")
.join(Image::load("empty_mino.png"))
.join(Image::load("i_mino.png"))
.join(Image::load("o_mino.png"))
.join(Image::load("j_mino.png"))
.join(Image::load("l_mino.png"))
.join(Image::load("s_mino.png"))
.join(Image::load("z_mino.png"))
.join(Image::load("t_mino.png"))
.and_then(|big_future| {
let ((((((((font, empty), i), o), j), l), s), z), t) = big_future;
Ok(Resources {
font: font,
images: Images {
empty_mino: empty,
i_mino: i,
o_mino: o,
j_mino: j,
l_mino: l,
s_mino: s,
z_mino: z,
t_mino: t,
},
})
})
}
|
#[doc = "Reader of register CONFCHR1"]
pub type R = crate::R<u32, super::CONFCHR1>;
#[doc = "Writer for register CONFCHR1"]
pub type W = crate::W<u32, super::CONFCHR1>;
#[doc = "Register CONFCHR1 `reset()`'s with value 0"]
impl crate::ResetValue for super::CONFCHR1 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `CONFCH7`"]
pub type CONFCH7_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `CONFCH7`"]
pub struct CONFCH7_W<'a> {
w: &'a mut W,
}
impl<'a> CONFCH7_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 28)) | (((value as u32) & 0x03) << 28);
self.w
}
}
#[doc = "Reader of field `CONFCH6`"]
pub type CONFCH6_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `CONFCH6`"]
pub struct CONFCH6_W<'a> {
w: &'a mut W,
}
impl<'a> CONFCH6_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 24)) | (((value as u32) & 0x03) << 24);
self.w
}
}
#[doc = "Reader of field `CONFCH5`"]
pub type CONFCH5_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `CONFCH5`"]
pub struct CONFCH5_W<'a> {
w: &'a mut W,
}
impl<'a> CONFCH5_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 20)) | (((value as u32) & 0x03) << 20);
self.w
}
}
#[doc = "Reader of field `CONFCH4`"]
pub type CONFCH4_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `CONFCH4`"]
pub struct CONFCH4_W<'a> {
w: &'a mut W,
}
impl<'a> CONFCH4_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 16)) | (((value as u32) & 0x03) << 16);
self.w
}
}
#[doc = "Reader of field `CONFCH3`"]
pub type CONFCH3_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `CONFCH3`"]
pub struct CONFCH3_W<'a> {
w: &'a mut W,
}
impl<'a> CONFCH3_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 12)) | (((value as u32) & 0x03) << 12);
self.w
}
}
#[doc = "Reader of field `CONFCH2`"]
pub type CONFCH2_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `CONFCH2`"]
pub struct CONFCH2_W<'a> {
w: &'a mut W,
}
impl<'a> CONFCH2_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 8)) | (((value as u32) & 0x03) << 8);
self.w
}
}
#[doc = "Reader of field `CONFCH1`"]
pub type CONFCH1_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `CONFCH1`"]
pub struct CONFCH1_W<'a> {
w: &'a mut W,
}
impl<'a> CONFCH1_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 4)) | (((value as u32) & 0x03) << 4);
self.w
}
}
#[doc = "Reader of field `CONFCH0`"]
pub type CONFCH0_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `CONFCH0`"]
pub struct CONFCH0_W<'a> {
w: &'a mut W,
}
impl<'a> CONFCH0_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x03) | ((value as u32) & 0x03);
self.w
}
}
impl R {
#[doc = "Bits 28:29 - CONFCH7"]
#[inline(always)]
pub fn confch7(&self) -> CONFCH7_R {
CONFCH7_R::new(((self.bits >> 28) & 0x03) as u8)
}
#[doc = "Bits 24:25 - CONFCH6"]
#[inline(always)]
pub fn confch6(&self) -> CONFCH6_R {
CONFCH6_R::new(((self.bits >> 24) & 0x03) as u8)
}
#[doc = "Bits 20:21 - CONFCH5"]
#[inline(always)]
pub fn confch5(&self) -> CONFCH5_R {
CONFCH5_R::new(((self.bits >> 20) & 0x03) as u8)
}
#[doc = "Bits 16:17 - CONFCH4"]
#[inline(always)]
pub fn confch4(&self) -> CONFCH4_R {
CONFCH4_R::new(((self.bits >> 16) & 0x03) as u8)
}
#[doc = "Bits 12:13 - CONFCH3"]
#[inline(always)]
pub fn confch3(&self) -> CONFCH3_R {
CONFCH3_R::new(((self.bits >> 12) & 0x03) as u8)
}
#[doc = "Bits 8:9 - CONFCH2"]
#[inline(always)]
pub fn confch2(&self) -> CONFCH2_R {
CONFCH2_R::new(((self.bits >> 8) & 0x03) as u8)
}
#[doc = "Bits 4:5 - CONFCH1"]
#[inline(always)]
pub fn confch1(&self) -> CONFCH1_R {
CONFCH1_R::new(((self.bits >> 4) & 0x03) as u8)
}
#[doc = "Bits 0:1 - CONFCH0"]
#[inline(always)]
pub fn confch0(&self) -> CONFCH0_R {
CONFCH0_R::new((self.bits & 0x03) as u8)
}
}
impl W {
#[doc = "Bits 28:29 - CONFCH7"]
#[inline(always)]
pub fn confch7(&mut self) -> CONFCH7_W {
CONFCH7_W { w: self }
}
#[doc = "Bits 24:25 - CONFCH6"]
#[inline(always)]
pub fn confch6(&mut self) -> CONFCH6_W {
CONFCH6_W { w: self }
}
#[doc = "Bits 20:21 - CONFCH5"]
#[inline(always)]
pub fn confch5(&mut self) -> CONFCH5_W {
CONFCH5_W { w: self }
}
#[doc = "Bits 16:17 - CONFCH4"]
#[inline(always)]
pub fn confch4(&mut self) -> CONFCH4_W {
CONFCH4_W { w: self }
}
#[doc = "Bits 12:13 - CONFCH3"]
#[inline(always)]
pub fn confch3(&mut self) -> CONFCH3_W {
CONFCH3_W { w: self }
}
#[doc = "Bits 8:9 - CONFCH2"]
#[inline(always)]
pub fn confch2(&mut self) -> CONFCH2_W {
CONFCH2_W { w: self }
}
#[doc = "Bits 4:5 - CONFCH1"]
#[inline(always)]
pub fn confch1(&mut self) -> CONFCH1_W {
CONFCH1_W { w: self }
}
#[doc = "Bits 0:1 - CONFCH0"]
#[inline(always)]
pub fn confch0(&mut self) -> CONFCH0_W {
CONFCH0_W { w: self }
}
}
|
#![allow(non_snake_case, non_camel_case_types, non_upper_case_globals, clashing_extern_declarations, clippy::all)]
#[link(name = "windows")]
extern "system" {}
pub type DevicePortalConnection = *mut ::core::ffi::c_void;
pub type DevicePortalConnectionClosedEventArgs = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct DevicePortalConnectionClosedReason(pub i32);
impl DevicePortalConnectionClosedReason {
pub const Unknown: Self = Self(0i32);
pub const ResourceLimitsExceeded: Self = Self(1i32);
pub const ProtocolError: Self = Self(2i32);
pub const NotAuthorized: Self = Self(3i32);
pub const UserNotPresent: Self = Self(4i32);
pub const ServiceTerminated: Self = Self(5i32);
}
impl ::core::marker::Copy for DevicePortalConnectionClosedReason {}
impl ::core::clone::Clone for DevicePortalConnectionClosedReason {
fn clone(&self) -> Self {
*self
}
}
pub type DevicePortalConnectionRequestReceivedEventArgs = *mut ::core::ffi::c_void;
|
use flo_script::*;
use flo_script::gluon_host::*;
use futures::stream;
use futures::executor;
#[test]
pub fn read_input_stream_as_output() {
let host = GluonScriptHost::new();
let input_x = FloScriptSymbol::with_name("x");
host.editor().set_input_type::<i32>(input_x);
// Start reading the stream before attaching some output
let mut output_x_stream = executor::spawn(host.notebook().receive_output::<i32>(input_x).expect("output stream"));
// Send some data to the input
let input_data = stream::iter_ok::<_, ()>(vec![1, 2, 3]);
host.notebook().attach_input(input_x, input_data).expect("attaching input");
// Should be able to read the items from the input stream
assert!(output_x_stream.wait_stream() == Some(Ok(1)));
assert!(output_x_stream.wait_stream() == Some(Ok(2)));
assert!(output_x_stream.wait_stream() == Some(Ok(3)));
assert!(output_x_stream.wait_stream() == None);
}
#[test]
fn cannot_read_input_stream_as_wrong_type() {
let host = GluonScriptHost::new();
let input_x = FloScriptSymbol::with_name("x");
host.editor().set_input_type::<i32>(input_x);
assert!(host.notebook().receive_output::<u32>(input_x).err().unwrap() == FloScriptError::IncorrectType);
}
#[test]
fn cannot_read_input_after_undefining() {
let host = GluonScriptHost::new();
let input_x = FloScriptSymbol::with_name("x");
host.editor().set_input_type::<i32>(input_x);
assert!(host.notebook().receive_output::<i32>(input_x).is_ok());
host.editor().undefine_symbol(input_x);
assert!(host.notebook().receive_output::<u32>(input_x).err().unwrap() == FloScriptError::UndefinedSymbol(input_x));
}
#[test]
fn cannot_read_missing_input_stream() {
let host = GluonScriptHost::new();
let input_x = FloScriptSymbol::with_name("x");
assert!(host.notebook().receive_output::<i32>(input_x).err().unwrap() == FloScriptError::UndefinedSymbol(input_x));
}
|
use std::sync::Arc;
use std::sync::RwLock;
use std::sync::mpsc::Receiver;
use event::InputEvent;
use buffer::Buffer;
use client::Client;
pub struct GenericClient {
reciever: Receiver,
buffers: Arc<RwLock<Vec<Buffer>>>
}
pub impl Client for GenericClient {
fn new(buffers: Arc<RwLock<Vec<Buffer>>>, reciever: Receiver) {GenericClient{reciever: reciever, buffers: buffers}}
fn reciever(&self) {self.reciever}
fn handle_input(&self, event: InputEvent) {
}
}
|
#[doc = r"Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r"Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::CC {
#[doc = r"Modifies the contents of the register"]
#[inline(always)]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
self.register.set(f(&R { bits }, &mut W { bits }).bits);
}
#[doc = r"Reads the contents of the register"]
#[inline(always)]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r"Writes to the register"]
#[inline(always)]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
self.register.set(
f(&mut W {
bits: Self::reset_value(),
})
.bits,
);
}
#[doc = r"Reset value of the register"]
#[inline(always)]
pub const fn reset_value() -> u32 {
0
}
#[doc = r"Writes the reset value to the register"]
#[inline(always)]
pub fn reset(&self) {
self.register.set(Self::reset_value())
}
}
#[doc = r"Value of the field"]
pub struct USB_CC_CLKDIVR {
bits: u8,
}
impl USB_CC_CLKDIVR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r"Proxy"]
pub struct _USB_CC_CLKDIVW<'a> {
w: &'a mut W,
}
impl<'a> _USB_CC_CLKDIVW<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits &= !(15 << 0);
self.w.bits |= ((value as u32) & 15) << 0;
self.w
}
}
#[doc = r"Value of the field"]
pub struct USB_CC_CSDR {
bits: bool,
}
impl USB_CC_CSDR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_CC_CSDW<'a> {
w: &'a mut W,
}
impl<'a> _USB_CC_CSDW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 8);
self.w.bits |= ((value as u32) & 1) << 8;
self.w
}
}
#[doc = r"Value of the field"]
pub struct USB_CC_CLKENR {
bits: bool,
}
impl USB_CC_CLKENR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_CC_CLKENW<'a> {
w: &'a mut W,
}
impl<'a> _USB_CC_CLKENW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 9);
self.w.bits |= ((value as u32) & 1) << 9;
self.w
}
}
impl R {
#[doc = r"Value of the register as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 0:3 - PLL Clock Divisor"]
#[inline(always)]
pub fn usb_cc_clkdiv(&self) -> USB_CC_CLKDIVR {
let bits = ((self.bits >> 0) & 15) as u8;
USB_CC_CLKDIVR { bits }
}
#[doc = "Bit 8 - Clock Source/Direction"]
#[inline(always)]
pub fn usb_cc_csd(&self) -> USB_CC_CSDR {
let bits = ((self.bits >> 8) & 1) != 0;
USB_CC_CSDR { bits }
}
#[doc = "Bit 9 - USB Clock Enable"]
#[inline(always)]
pub fn usb_cc_clken(&self) -> USB_CC_CLKENR {
let bits = ((self.bits >> 9) & 1) != 0;
USB_CC_CLKENR { bits }
}
}
impl W {
#[doc = r"Writes raw bits to the register"]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 0:3 - PLL Clock Divisor"]
#[inline(always)]
pub fn usb_cc_clkdiv(&mut self) -> _USB_CC_CLKDIVW {
_USB_CC_CLKDIVW { w: self }
}
#[doc = "Bit 8 - Clock Source/Direction"]
#[inline(always)]
pub fn usb_cc_csd(&mut self) -> _USB_CC_CSDW {
_USB_CC_CSDW { w: self }
}
#[doc = "Bit 9 - USB Clock Enable"]
#[inline(always)]
pub fn usb_cc_clken(&mut self) -> _USB_CC_CLKENW {
_USB_CC_CLKENW { w: self }
}
}
|
extern crate jlib;
use jlib::address::traits::seed::SeedI;
use jlib::address::types::seed::SeedBuilder;
use jlib::wallet::wallet::{
WalletType
};
fn main() {
let passphrase = Some("Masterphrase");
let seed_builder = SeedBuilder::new(WalletType::SM2P256V1);
let master_seed_hex = seed_builder.get_seed(passphrase);
let master_seed = seed_builder.human_seed(&master_seed_hex);
println!("readable seed : {}", master_seed);
}
|
use super::*;
use smart_pointer::ref_cell_demo::LimitTracker;
use smart_pointer::ref_cell_demo::Messenger;
use std::cell::RefCell;
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
#[test]
fn case_sensitive() {
let query = "duct";
let contents = "\
Rust:
safe, fast, productive.
Pick three.";
assert_eq!(vec!["safe, fast, productive."], search(query, contents));
}
#[test]
fn case_insensitive() {
let query = "rUsT";
let contents = "\
Rust:
safe, fast, productive.
Pick three.
Trust me.";
assert_eq!(
vec!["Rust:", "Trust me."],
search_case_insensitive(query, contents)
)
}
#[test]
fn iterator_demonstration() {
let v1 = vec![1, 2, 3];
let mut v1_iter = v1.iter();
assert_eq!(v1_iter.next(), Some(&1));
assert_eq!(v1_iter.next(), Some(&2));
assert_eq!(v1_iter.next(), Some(&3));
assert_eq!(v1_iter.next(), None);
}
#[derive(PartialEq, Debug)]
struct Shoe {
size: u32,
style: String,
}
fn shoes_in_my_size(shoes: Vec<Shoe>, shoe_size: u32) -> Vec<Shoe> {
shoes.into_iter().filter(|s| s.size == shoe_size).collect()
}
#[test]
fn filters_by_size() {
let shoes = vec![
Shoe {
size: 10,
style: String::from("sneaker"),
},
Shoe {
size: 13,
style: String::from("sandal"),
},
Shoe {
size: 10,
style: String::from("boot"),
},
];
let in_my_size = shoes_in_my_size(shoes, 10);
assert_eq!(
in_my_size,
vec![
Shoe {
size: 10,
style: String::from("sneaker")
},
Shoe {
size: 10,
style: String::from("boot")
},
]
)
}
struct Counter {
count: u32,
}
impl Counter {
fn new() -> Counter {
Counter { count: 0 }
}
}
impl Iterator for Counter {
type Item = u32;
fn next(&mut self) -> Option<Self::Item> {
self.count += 1;
if self.count < 6 {
Some(self.count)
} else {
None
}
}
}
#[test]
fn calling_next_directly() {
let mut counter = Counter::new();
assert_eq!(counter.next(), Some(1));
assert_eq!(counter.next(), Some(2));
assert_eq!(counter.next(), Some(3));
assert_eq!(counter.next(), Some(4));
assert_eq!(counter.next(), Some(5));
assert_eq!(counter.next(), None);
}
#[test]
fn using_other_iterator_trait_methods() {
let sum: u32 = Counter::new()
.zip(Counter::new().skip(1))
.map(|(a, b)| {
println!("a:{}, b:{}", a, b);
a * b
}).filter(|x| x % 3 == 0)
.sum();
assert_eq!(18, sum);
}
struct MockMessenger {
sent_messages: RefCell<Vec<String>>,
}
impl MockMessenger {
fn new() -> MockMessenger {
MockMessenger {
sent_messages: RefCell::new(vec![]),
}
}
}
impl Messenger for MockMessenger {
fn send(&self, message: &str) {
self.sent_messages.borrow_mut().push(String::from(message));
}
}
#[test]
fn it_send_an_over_75_percent_warning_message() {
let mock_messenger = MockMessenger::new();
let mut limit_tracker = LimitTracker::new(&mock_messenger, 100);
limit_tracker.set_value(80);
assert_eq!(mock_messenger.sent_messages.borrow().len(), 1);
}
|
// treap aka randomized binary search tree
// treap is a data structure that stores pairs (X, Y) in a binary tree in such a way:
// it is a binary search tree by X and a binary heap by Y
// references:
// https://cp-algorithms.com/data_structures/treap.html
use rand::Rng;
use rand::distributions::{Distribution, Standard};
use std::cmp::Ordering::{Less, Greater, Equal};
use std::mem::swap;
struct Node<T> {
key: T,
priority: u64,
size: usize,
left: Vertex<T>,
right: Vertex<T>,
}
type Vertex<T> = Option<Box<Node<T>>>;
fn size<T>(v: &Vertex<T>) -> usize {
match v {
Some(n) => n.size,
None => 0,
}
}
fn update_size<T>(v: Vertex<T>) -> Vertex<T> {
if let Some(mut n) = v {
n.size = size(&n.left) + 1 + size(&n.right);
Some(n)
} else {
None
}
}
// split v into left part and right part, so that:
// all elements in the left part has key less than v.
// all elements in the right part has key greater than or equal to v.
fn split<T>(v: Vertex<T>, target: &T) -> (Vertex<T>, Vertex<T>) where T: Ord {
if let Some(mut node) = v {
match target.cmp(&node.key) {
Less => {
let (l, r) = split(node.left, target);
node.left = r;
(l, update_size(Some(node)))
},
Greater | Equal => {
let (l, r) = split(node.right, target);
node.right = l;
(update_size(Some(node)), r)
},
}
} else {
(None, None)
}
}
// prerequisite: all elements in v1 is less than v2
fn merge<T>(v1: Vertex<T>, v2: Vertex<T>) -> Vertex<T> {
match (v1, v2) {
(Some(mut n1), Some(mut n2)) => {
match n1.priority.cmp(&n2.priority) {
Less => {
n2.left = merge(Some(n1), n2.left);
update_size(Some(n2))
},
Greater => {
n1.right = merge(n1.right, Some(n2));
update_size(Some(n1))
},
Equal => panic!("nodes cannot have equal priority"),
}
},
(v @ Some(_), None) | (None, v @ Some(_)) => v,
(None, None) => None,
}
}
fn insert<T>(v: Vertex<T>, mut target: Node<T>) -> Vertex<T> where T: Ord {
if let Some(mut n) = v {
match target.priority.cmp(&n.priority) {
Less => {
match target.key.cmp(&n.key) {
Less => {
n.left = insert(n.left, target);
update_size(Some(n))
},
Greater | Equal => {
n.right = insert(n.right, target);
update_size(Some(n))
},
}
},
Greater => {
let (l, r) = split(Some(n), &target.key);
target.left = l; target.right = r;
update_size(Some(Box::new(target)))
},
Equal => panic!("nodes cannot have equal priority"),
}
} else {
Some(Box::new(target))
}
}
fn delete<T>(v: Vertex<T>, target: T) -> Vertex<T> where T: Ord {
if let Some(mut n) = v {
match target.cmp(&n.key) {
Less => {
n.left = delete(n.left, target);
update_size(Some(n))
},
Greater => {
n.right = delete(n.right, target);
update_size(Some(n))
},
Equal => {
merge(n.left, n.right)
},
}
} else {
None
}
}
fn union<T>(v1: Vertex<T>, v2: Vertex<T>) -> Vertex<T> where T: Ord {
match (v1, v2) {
(Some(mut n1), Some(mut n2)) => {
match n1.priority.cmp(&n2.priority) {
Less => {
let (n1_left, n1_right) = split(Some(n1), &n2.key);
n2.left = union(n2.left, n1_left);
n2.right = union(n2.right, n1_right);
update_size(Some(n2))
},
Greater => {
let (n2_left, n2_right) = split(Some(n2), &n1.key);
n1.left = union(n1.left, n2_left);
n1.right = union(n1.right, n2_right);
update_size(Some(n1))
},
Equal => panic!("nodes cannot have equal priority"),
}
},
(v @ Some(_), None) | (None, v @ Some(_)) => v,
(None, None) => None,
}
}
fn get_priority<T>(v: &Vertex<T>) -> u64 {
if let Some(ref n) = v {
n.priority
} else {
0
}
}
fn heapify<T>(v: &mut Vertex<T>) {
if let Some(ref mut n) = v {
let mut max = &mut n.left;
if get_priority(max) < get_priority(&n.right) {
max = &mut n.right;
}
if n.priority < get_priority(max){
swap(&mut n.priority, &mut max.as_mut().unwrap().priority);
heapify(max);
}
}
}
fn build<T>(source: &[T]) -> Vertex<T> where T: Ord + Copy {
if !source.is_empty() {
let mid = source.len()/2;
let mut n = Node::new(source[mid]);
n.left = build(&source[0..mid]);
n.right = build(&source[mid+1..source.len()]);
n.size = size(&n.left) + 1 + size(&n.right);
let mut v = Some(Box::new(n));
heapify(&mut v);
v
} else {
None
}
}
fn generate_random_priority<T>() -> T where Standard: Distribution<T> {
rand::thread_rng().gen()
}
impl<T> Node<T> where T: Ord {
fn new(key: T) -> Self {
Node {
key: key,
priority: generate_random_priority(),
size: 1,
left: None,
right: None,
}
}
}
pub struct Treap<T> {
root: Vertex<T>,
}
impl<T> Treap<T> where T: Ord {
pub fn new() -> Self {
Treap {
root: None,
}
}
pub fn size(&self) -> usize {
size(&self.root)
}
pub fn insert(mut self, target: T) -> Self {
self.root = insert(self.root, Node::new(target));
self
}
pub fn delete(mut self, target: T) -> Self {
self.root = delete(self.root, target);
self
}
}
impl<T> Treap<T> where T: Ord + Copy {
// construct a treap from sorted slice of T
pub fn from(source: &[T]) -> Self {
Treap {
root: build(source),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
fn assert_helper<T>(v: &Vertex<T>) -> bool where T: Ord {
if let Some(ref n) = v {
match (&n.left, &n.right) {
(lv @ Some(_), rv @ Some(_)) => {
if n.key > lv.as_ref().unwrap().key && n.priority > lv.as_ref().unwrap().priority &&
n.key <= rv.as_ref().unwrap().key && n.priority > rv.as_ref().unwrap().priority {
assert_helper(lv) && assert_helper(rv)
} else {
false
}
},
(lv @ Some(_), None) => {
if n.key > lv.as_ref().unwrap().key && n.priority > lv.as_ref().unwrap().priority {
assert_helper(lv)
} else {
false
}
},
(None, rv @ Some(_)) => {
if n.key <= rv.as_ref().unwrap().key && n.priority > rv.as_ref().unwrap().priority {
assert_helper(rv)
} else {
false
}
},
(None, None) => true,
}
} else {
true
}
}
fn treap_assert<T>(t: &Treap<T>) -> bool where T: Ord{
if t.root.is_some() {
assert_helper(&t.root)
} else {
true
}
}
#[test]
fn test_from() {
let t = Treap::from(&vec![1,2,3,4,5,6,7,8,9]);
assert!(treap_assert(&t));
assert_eq!(t.size(), 9);
}
#[test]
fn test_treap_insertion() {
let mut t = Treap::new();
for i in 0..1000 {
t = t.insert(i);
}
assert!(treap_assert(&t));
assert_eq!(t.size(), 1000);
}
#[test]
fn test_treap_insert_and_delete() {
let mut t = Treap::new();
t = t.insert(10).insert(5).insert(1);
assert!(treap_assert(&t));
assert_eq!(t.size(), 3);
t = t.delete(5);
assert!(treap_assert(&t));
assert_eq!(t.size(), 2);
t = t.delete(3);
assert!(treap_assert(&t));
assert_eq!(t.size(), 2);
t = t.delete(10).delete(1);
assert!(treap_assert(&t));
assert_eq!(t.size(), 0);
}
} |
/*
* Copyright (c) 2019, Piotr Pszczółkowski
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
extern crate rand;
pub mod blowfish;
pub mod twofish;
pub mod gost;
pub mod way3;
/// Converts block of bytes to two u32 words
#[inline]
fn bytes2block(data: &[u8]) -> (u32, u32) {
let xl = (data[3] as u32).wrapping_shl(24) |
(data[2] as u32).wrapping_shl(16) |
(data[1] as u32).wrapping_shl(8) |
(data[0] as u32);
let xr = (data[7] as u32).wrapping_shl(24) |
(data[6] as u32).wrapping_shl(16) |
(data[5] as u32).wrapping_shl(8) |
(data[4] as u32);
(xl, xr)
}
fn block2bytes(x: (u32, u32), data: &mut [u8]) {
words2bytes(x.0, x.1, data);
}
#[inline]
fn words2bytes(xl: u32, xr: u32, data: &mut [u8]) {
data[3] = xl.wrapping_shr(24) as u8;
data[2] = xl.wrapping_shr(16) as u8;
data[1] = xl.wrapping_shr(8) as u8;
data[0] = xl as u8;
data[7] = xr.wrapping_shr(24) as u8;
data[6] = xr.wrapping_shr(16) as u8;
data[5] = xr.wrapping_shr(8) as u8;
data[4] = xr as u8;
}
pub fn padding(nbytes: usize) -> Vec<u8> {
let mut s = Vec::with_capacity(nbytes);
s.resize(nbytes, 0);
s[0] = 128;
s
}
pub fn padding_index(data: &[u8]) -> Option<usize> {
let mut i = data.len();
if i > 0 {
loop {
i -= 1;
let c = data[i];
if c != 0 {
if data[i] == 128 {
return Some(i);
}
else {
break;
}
}
}
}
None
}
|
fn sum (arr:&[u32]) -> Option<u32>{
let mut sum:u32 = 0;
let max:u32 = 2147483648;
let mut flag =false;
for element in arr.iter(){
if max - sum < *element{
flag = true;
break;
}
sum = sum + element;
}
if flag == true{
None
}else{
Some(sum)
}
}
fn main() {
let a = [10,20,30,40,50,2147483648];
let b = [10,20,30,40,50,60,70];
let output1 = sum(&a);
let output2 = sum(&b);
println!("This sumA is {:?}", output1);
println!("This sumB is {:?}", output2);
}
|
//! mount provides a Datastore that has other Datastores
//! mounted at various key prefixes and is threadsafe
mod async_results;
mod sync_results;
use std::cmp::Ordering;
use crate::datastore::{Datastore as DatastoreT, Read, Write};
use crate::error::DSError;
use crate::key::Key;
use crate::query::{self, QResult, Query};
pub struct Mount<D: DatastoreT> {
pub prefix: Key,
pub datastore: D,
}
pub struct Datastore<D: DatastoreT> {
pub mounts: Vec<Mount<D>>,
}
impl<D: DatastoreT> Datastore<D> {
pub fn new(mounts: Vec<Mount<D>>) -> Datastore<D> {
let mut mounts = mounts;
mounts.sort_by(|a, b| a.prefix.cmp(&b.prefix).reverse());
Datastore::<D> { mounts }
}
pub fn lookup(&self, key: &Key) -> Option<(D, Key, Key)> {
for m in self.mounts.iter() {
if &m.prefix == key || m.prefix.is_ancestor_of(key) {
// trim prefix
let s = &key.as_bytes()[..m.prefix.as_bytes().len()];
let s = unsafe { std::str::from_utf8_unchecked(s) };
let k = Key::new(s);
// TODO
// return Some(m.datastor, m.prefix.clone(), k)
}
}
None
}
}
// TODO
/*
struct QueryResults {
mount: Key,
results: Box<dyn AsyncResults>,
next: QResult,
}
fn advance(mount: &Key, results: &mut Box<dyn AsyncResults>) -> Option<QResult> {
let mut r = results.next_sync();
match r {
None => {
// TODO set results?
None
}
Some(mut query_result) => {
if let Ok(ref mut entry) = query_result {
// add mount prefix to entry.key
let s: String = mount.child(Key::from_raw(entry.key.clone())).into();
entry.key = s;
}
Some(query_result)
}
}
}
impl QueryResults {
fn new_with_advance(mount: Key, results: impl AsyncResults + 'static) -> Option<Self> {
let mut results: Box<dyn AsyncResults> = Box::new(results);
advance(&mount, &mut results).map(|next| QueryResults {
mount,
results,
next,
})
}
}
struct QuerySet {
query: Query,
heads: Vec<QueryResults>,
}
impl QuerySet {
fn len(&self) -> usize {
self.heads.len()
}
fn less(&self, i: usize, j: usize) -> bool {
let i = self.heads[i].next.as_ref().expect("");
let j = self.heads[j].next.as_ref().expect("");
query::order::less(&self.query.orders, i, j) == Ordering::Less
}
fn swap(&mut self, i: usize, j: usize) {
if i >= self.heads.len() || j >= self.heads.len() {
return;
}
self.heads.swap(i, j);
}
fn push(&mut self, x: QueryResults) {
self.heads.push(x);
}
fn pop(&mut self) -> Option<QueryResults> {
self.heads.pop()
}
fn add_results(&mut self, mount: Key, results: impl AsyncResults + 'static) {
if let Some(r) = QueryResults::new_with_advance(mount, results) {
self.push(r);
}
}
fn fix(&mut self, i: usize) {
if !self.down(i, self.len()) {
self.up(i);
}
}
fn remove(&mut self, i: usize) -> Option<QueryResults> {
if self.len() == 0 {
return None;
}
let n = self.len() - 1;
if n != i {
self.swap(i, n);
if !self.down(i, n) {
self.up(i)
}
}
self.pop()
}
fn down(&mut self, i0: usize, n: usize) -> bool {
let mut i = i0;
loop {
let j1 = 2 * i + 1;
if j1 >= n || j1 < 0 {
// j1 < 0 after int overflow
break;
}
let mut j = j1; // left child
let j2 = j1 + 1;
if j2 < n && self.less(j2, j1) {
j = j2; // = 2*i + 2 // right child
}
if !self.less(j, i) {
break;
}
self.swap(i, j);
i = j;
}
i > i0
}
fn up(&mut self, j: usize) {
let mut j = j;
loop {
let i = (j - 1) / 2; // parent
if i == j || !self.less(j, i) {
break;
}
self.swap(i, j);
j = i
}
}
}
impl Iterator for QuerySet {
type Item = QResult;
fn next(&mut self) -> Option<Self::Item> {
if self.heads.is_empty() {
return None;
}
let head = &mut self.heads[0];
let mut maybe = advance(&head.mount, &mut head.results);
if let Some(mut r) = maybe {
// use new advance next to replace old next, and return old, store new in `self.heads[0]`
std::mem::swap(&mut r, &mut head.next);
self.fix(0);
Some(r)
} else {
self.remove(0).map(|r| r.next)
}
}
}
*/
/*
impl Read for Datastore {
fn get(&self, key: &Key) -> Result< Vec<u8>, Error> {
unimplemented!()
}
fn has(&self, key: &Key) -> Result<bool, Error> {
unimplemented!()
}
fn get_size(&self, key: &Key) -> Result<usize, Error> {
unimplemented!()
}
}
impl Write for Datastore {
fn put(&self, key: Key, value: Vec<u8>) -> Result<(), Error> {
unimplemented!()
}
fn delete(&self, key: &Key) -> Result<(), Error> {
unimplemented!()
}
}
*/
|
use crate::{
structs::{CustomerData,DriverData}
};
use solana_program::{
account_info::{next_account_info,AccountInfo},
entrypoint::ProgramResult,
msg,
program_error::ProgramError,
pubkey::Pubkey,
};
use borsh::{BorshSerialize,BorshDeserialize};
use std::io::ErrorKind::InvalidData;
use crate::helper;
//init customer
pub fn step0_1(account: &AccountInfo,program_id: &Pubkey) -> ProgramResult{
if account.owner!=program_id{
msg!("This account {} is not owned by program {}",account.key,program_id);
}
//get customer account
let mut customer_account = match CustomerData::try_from_slice(&account.data.borrow_mut()) {
Ok(data) => data,
Err(err) => {
if err.kind() == InvalidData {
msg!("InvalidData so initializing account data");
crate::helper::get_init_acccount()
} else {
panic!("Unknown error decoding account data {:?}", err)
}
}
};
//update hash
//customer_account.profile_hash=instruction.profile_hash;
customer_account=crate::helper::get_init_acccount();
//save account
customer_account.serialize(&mut &mut account.data.borrow_mut()[..]).map_err(|e| e.into())
// msg!("customer_account data after saving {:?}",customer_account);
}
//init driver
pub fn step0_2(account: &AccountInfo,program_id: &Pubkey) -> ProgramResult{
if account.owner!=program_id{
msg!("This account {} is not owned by program {}",account.key,program_id);
}
//get customer account
let mut customer_account = match DriverData::try_from_slice(&account.data.borrow_mut()) {
Ok(data) => data,
Err(err) => {
if err.kind() == InvalidData {
msg!("InvalidData so initializing account data");
crate::helper::get_init_driver()
} else {
panic!("Unknown error decoding account data {:?}", err)
}
}
};
//update hash
//customer_account.profile_hash=instruction.profile_hash;
customer_account=crate::helper::get_init_driver();
//save account
customer_account.serialize(&mut &mut account.data.borrow_mut()[..]).map_err(|e| e.into())
// msg!("customer_account data after saving {:?}",customer_account);
}
pub fn step1(account: &AccountInfo,program_id: &Pubkey, full_instruction: String) -> ProgramResult{
if account.owner!=program_id{
msg!("This account {} is not owned by program {}",account.key,program_id);
}
let instr_profile_hash= &full_instruction[2..45];
//msg!("Instruction to update profile hash {}",instruction.profile_hash);
msg!("Instruction to update profile hash {}",instr_profile_hash);
//get customer account
let mut customer_account = match CustomerData::try_from_slice(&account.data.borrow_mut()) {
Ok(data) => data,
Err(err) => {
if err.kind() == InvalidData {
msg!("InvalidData so initializing account data");
crate::helper::get_init_acccount()
} else {
panic!("Unknown error decoding account data {:?}", err)
}
}
};
//update hash
//customer_account.profile_hash=instruction.profile_hash;
customer_account.profile_hash=String::from(instr_profile_hash);
//save account
customer_account.serialize(&mut &mut account.data.borrow_mut()[..]).map_err(|e| e.into())
// msg!("customer_account data after saving {:?}",customer_account);
}
pub fn step2(account: &AccountInfo,program_id: &Pubkey, full_instruction: String) -> ProgramResult{
if account.owner!=program_id{
msg!("This account {} is not owned by program {}",account.key,program_id);
}
//msg!("Instruction to update ride details {}",instruction.profile_hash);
//get customer account
let mut customer_account = match CustomerData::try_from_slice(&account.data.borrow_mut()) {
Ok(data) => data,
Err(err) => {
if err.kind() == InvalidData {
msg!("InvalidData so initializing account data");
crate::helper::get_init_acccount()
} else {
panic!("Unknown error decoding account data {:?}", err)
}
}
};
msg!("customer_account data decoded");
//update ride
let from_lat= &full_instruction[45..53];
let to_lat= &full_instruction[53..61];
let from_long= &full_instruction[61..69];
let to_long= &full_instruction[69..77];
let dist= &full_instruction[77..81];
customer_account.from_lat=String::from(from_lat);
customer_account.to_lat=String::from(to_lat);
customer_account.from_long=String::from(from_long);
customer_account.to_long=String::from(to_long);
customer_account.distance=String::from(dist);
// customer_account.from_lat=instruction.from_lat;
// customer_account.to_lat=instruction.to_lat;
// customer_account.from_long=instruction.from_long;
// customer_account.to_long=instruction.to_long;
// customer_account.distance = instruction.dist;
//save account
customer_account.serialize(&mut &mut account.data.borrow_mut()[..]).map_err(|e| e.into())
//msg!("customer_account data after saving {:?}",customer_account)
}
pub fn step3(account: &AccountInfo,program_id: &Pubkey, full_instruction: String) -> ProgramResult{
if account.owner!=program_id{
msg!("This account {} is not owned by program {}",account.key,program_id);
}
//msg!("Instruction to update driver account hash {}",instruction.profile_hash);
//get customer account
let mut driver_account = match DriverData::try_from_slice(&account.data.borrow_mut()) {
Ok(data) => data,
Err(err) => {
if err.kind() == InvalidData {
msg!("InvalidData so initializing account data");
crate::helper::get_init_driver()
} else {
panic!("Unknown error decoding account data {:?}", err)
}
}
};
let instr_profile_hash= &full_instruction[2..45];
//update hash
driver_account.profile_hash=String::from(instr_profile_hash);
msg!("driver_Account data before saving {:?}",driver_account);
//save driver_account
driver_account.serialize(&mut &mut account.data.borrow_mut()[..]).map_err(|e| e.into())
//msg!("customer_account data after saving {:?}",customer_account);
}
pub fn step4(driver_account: &AccountInfo,customer_account_1: &AccountInfo,program_id: &Pubkey, full_instruction: String) -> ProgramResult{
let dist_str=&full_instruction[77..81];
//driver covered distance
let dist_f = String::from(dist_str).parse::<f32>().unwrap();
let rides_dist_str=&full_instruction[186..190];
//total rides distance
let rides_dist_f = String::from(rides_dist_str).parse::<f32>().unwrap();
//max for both
let dist_str_max=&full_instruction[190..194];
let rides_max_dist_str=&full_instruction[194..198];
let dist_f_max = String::from(dist_str_max).parse::<f32>().unwrap();
let rides_dist_max_f = String::from(rides_max_dist_str).parse::<f32>().unwrap();
//let dist_f=0.0; Driver cost
let total_cost=dist_f*helper::KM_RATE;
let total_cost_max=dist_f_max*helper::KM_RATE;
let driver_base58=String::from(&full_instruction[98..142]);
let customer_base58=String::from(&full_instruction[142..186]);
if driver_account.owner!=program_id{
msg!("This account {} is not owned by program {}",driver_account.key,program_id);
}
//let driver_account_key:String =String::from(driver_account.key);
msg!("driver_account.key {}",driver_base58);
let mut driver_account_data = DriverData::try_from_slice(&driver_account.data.borrow_mut()).map_err(|err|{
msg!("Attempt to deserialize data failed {:?}",err);
ProgramError::InvalidInstructionData
})?;
//customer 1
if customer_account_1.owner!=program_id{
msg!("This account {} is not owned by program {}",customer_account_1.key,program_id);
}
let mut customer_account_1_data = CustomerData::try_from_slice(&customer_account_1.data.borrow_mut()).map_err(|err|{
msg!("Attempt to deserialize data failed {:?}",err);
ProgramError::InvalidInstructionData
})?;
// //let customer_account_1_key=String::from(customer_account_1.key);
// //let customer_distance: String = customer_account_1_data.distance;
let cust_dist_f=customer_account_1_data.distance.parse::<f32>().unwrap();
//total ride cost for driver dist_f*kmrate = total_cost
//customers kms covered rides_dist_f
//customer cost = customer * total cost/ total rides distance
let c1_cost=cust_dist_f*total_cost/rides_dist_f;
let c_cost_max=cust_dist_f*total_cost_max/rides_dist_max_f;
let padded_cost=helper::pad_cost(c1_cost,helper::DUMMY_COST);
let mut customer_mapped: bool = false;
if customer_account_1_data.driver_address_1==String::from(helper::DUMMY_ADDRESS)
{
customer_account_1_data.driver_address_1=driver_base58;
customer_account_1_data.cost_1=padded_cost;
customer_account_1_data.cost_1_max=helper::pad_cost(c_cost_max,helper::DUMMY_COST);
customer_mapped=true;
}else if customer_account_1_data.driver_address_2==String::from(helper::DUMMY_ADDRESS)
{
customer_account_1_data.driver_address_2=driver_base58;
customer_account_1_data.cost_2=padded_cost;
customer_account_1_data.cost_2_max=helper::pad_cost(c_cost_max,helper::DUMMY_COST);
customer_mapped=true;
}else if customer_account_1_data.driver_address_3==String::from(helper::DUMMY_ADDRESS){
customer_account_1_data.driver_address_3=driver_base58;
customer_account_1_data.cost_3=padded_cost;
customer_account_1_data.cost_3_max=helper::pad_cost(c_cost_max,helper::DUMMY_COST);
customer_mapped=true;
}else if customer_account_1_data.driver_address_4==String::from(helper::DUMMY_ADDRESS) {
customer_account_1_data.driver_address_4=driver_base58;
customer_account_1_data.cost_4=padded_cost;
customer_account_1_data.cost_4_max=helper::pad_cost(c_cost_max,helper::DUMMY_COST);
customer_mapped=true;
}
if customer_mapped==true{
if driver_account_data.customer_1== String::from(helper::DUMMY_ADDRESS){
driver_account_data.customer_1=customer_base58;
}else if driver_account_data.customer_2== String::from(helper::DUMMY_ADDRESS){
driver_account_data.customer_2=customer_base58;
}else if driver_account_data.customer_3== String::from(helper::DUMMY_ADDRESS){
driver_account_data.customer_3=customer_base58;
}
driver_account_data.ride_state=String::from("1"); //pending
}
msg!("driver_Account data before saving {:?}",driver_account);
//save driver_account
driver_account_data.serialize(&mut &mut driver_account.data.borrow_mut()[..])?;
customer_account_1_data.serialize(&mut &mut customer_account_1.data.borrow_mut()[..]).map_err(|e2| e2.into())
}
//step5 customer selects ride
pub fn step5(customer_account: &AccountInfo,driver_account: &AccountInfo,program_id: &Pubkey, full_instruction: String) -> ProgramResult{
let startSize1:usize=98;
let mut endSize1:usize=142;
if String::from(&full_instruction[141..142])==String::from("0"){
endSize1=141;
}
// if String::from(&full_instruction[141..142])==String::from("0"){
// driver_base58=String::from(&full_instruction[98..141]);
// }
let driver_base58=String::from(&full_instruction[startSize1..endSize1]);
let startSize2:usize=142;
let mut endSize2:usize=186;
if String::from(&full_instruction[185..186])==String::from("0"){
endSize2=185;
}
let customer_base58=String::from(&full_instruction[startSize2..endSize2]);
//customer 1
if customer_account.owner!=program_id{
msg!("This account {} is not owned by program {}",customer_account.key,program_id);
}
let mut customer_account_data = CustomerData::try_from_slice(&customer_account.data.borrow_mut()).map_err(|err|{
msg!("Attempt to deserialize data failed {:?}",err);
ProgramError::InvalidInstructionData
})?;
if driver_account.owner!=program_id{
msg!("This account {} is not owned by program {}",driver_account.key,program_id);
}
//let driver_account_key:String =String::from(driver_account.key);
msg!("driver_account.key {}",driver_base58);
let mut driver_account_data = DriverData::try_from_slice(&driver_account.data.borrow_mut()).map_err(|err|{
msg!("Attempt to deserialize data failed {:?}",err);
ProgramError::InvalidInstructionData
})?;
//update customer selection
if customer_account_data.driver_address_1==driver_base58
{
customer_account_data.driver_select=String::from("1")
}else if customer_account_data.driver_address_2==driver_base58
{
customer_account_data.driver_select=String::from("2")
}else if customer_account_data.driver_address_3==driver_base58
{
customer_account_data.driver_select=String::from("3")
}else if customer_account_data.driver_address_4==driver_base58
{
customer_account_data.driver_select=String::from("4")
}
//confirm to driver
if driver_account_data.customer_1== customer_base58{
driver_account_data.customer_1_confirm=String::from("1");
}else if driver_account_data.customer_2== customer_base58{
driver_account_data.customer_2_confirm=String::from("1");
}else if driver_account_data.customer_3== customer_base58{
driver_account_data.customer_3_confirm=String::from("1");
}
msg!("driver_Account data before saving {:?}",driver_account);
//save driver_account
driver_account_data.serialize(&mut &mut driver_account.data.borrow_mut()[..])?;
customer_account_data.serialize(&mut &mut customer_account.data.borrow_mut()[..]).map_err(|e2| e2.into())
}
//step6 on confirmation from 2 starts ride, clears one from the one not confirmed
// pub fn step6(driver_account: &AccountInfo,customer_account_1: &AccountInfo, customer_account_2: &AccountInfo,
// customer_account_3: &AccountInfo,
// program_id: &Pubkey, full_instruction: String) -> ProgramResult{
// let driver_confirm=String::from(&full_instruction[190..194]);
// let dist_str=&full_instruction[77..81];
// //driver covered distance
// let dist_f = String::from(dist_str).parse::<f32>().unwrap();
// let rides_dist_str=&full_instruction[186..190];
// //total rides distance
// let rides_dist_f = String::from(rides_dist_str).parse::<f32>().unwrap();
// //let dist_f=0.0; Driver cost
// let total_cost=dist_f*helper::KM_RATE;
// //let mut driver_base58=String::from(&full_instruction[198..242]);
// //if String::from(&full_instruction[141..142])==String::from("0"){
// // driver_base58=String::from(&full_instruction[198..241]);
// //}
// let startSize1:usize=198;
// let mut endSize1:usize=242;
// if String::from(&full_instruction[241..242])==String::from("0"){
// endSize1=241;
// }
// //let driver_base58=String::from(&full_instruction[startSize1..endSize1]);
// //driver
// if driver_account.owner!=program_id{
// msg!("This account {} is not owned by program {}",driver_account.key,program_id);
// }
// //let driver_account_key:String =String::from(driver_account.key);
// msg!("driver_confirm {}",driver_confirm);
// let mut driver_account_data = DriverData::try_from_slice(&driver_account.data.borrow_mut()).map_err(|err|{
// msg!("Attempt to deserialize data failed {:?}",err);
// ProgramError::InvalidInstructionData
// })?;
// //customer 1
// if customer_account_1.owner!=program_id{
// msg!("This account {} is not owned by program {}",customer_account_1.key,program_id);
// }
// let mut customer_account_data_1 = CustomerData::try_from_slice(&customer_account_1.data.borrow_mut()).map_err(|err|{
// msg!("Attempt to deserialize data failed {:?}",err);
// ProgramError::InvalidInstructionData
// })?;
// //customer 2
// if customer_account_2.owner!=program_id{
// msg!("This account {} is not owned by program {}",customer_account_2.key,program_id);
// }
// let mut customer_account_data_2 = CustomerData::try_from_slice(&customer_account_2.data.borrow_mut()).map_err(|err|{
// msg!("Attempt to deserialize data failed {:?}",err);
// ProgramError::InvalidInstructionData
// })?;
// //customer 3
// if customer_account_3.owner!=program_id{
// msg!("This account {} is not owned by program {}",customer_account_3.key,program_id);
// }
// let mut customer_account_data_3 = CustomerData::try_from_slice(&customer_account_3.data.borrow_mut()).map_err(|err|{
// msg!("Attempt to deserialize data failed {:?}",err);
// ProgramError::InvalidInstructionData
// })?;
// if &driver_confirm[0..1]==String::from("1")
// {
// customer_account_data_1.driver_confirm=String::from("1");
// customer_account_data_1.ride_state=String::from("2"); //start
// customer_account_data_1.driver_pub_key=String::from(&full_instruction[startSize1..endSize1]);
// customer_account_data_1.final_cost=customer_account_data_1.cost_1;
// }else
// {
// customer_account_data_1.driver_select=String::from("0");
// customer_account_data_1.driver_confirm=String::from("0");
// customer_account_data_1.ride_state=String::from("0");
// driver_account_data.customer_1=String::from(helper::DUMMY_ADDRESS);
// driver_account_data.customer_1_confirm=String::from("0");
// }
// if &driver_confirm[1..2]==String::from("1")
// {
// customer_account_data_2.driver_confirm=String::from("1");
// customer_account_data_2.ride_state=String::from("2") ; //start
// customer_account_data_2.driver_pub_key=String::from(&full_instruction[startSize1..endSize1]);
// }else
// {
// customer_account_data_2.driver_select=String::from("0");
// customer_account_data_2.driver_confirm=String::from("0");
// customer_account_data_2.ride_state=String::from("0");
// driver_account_data.customer_2=String::from(helper::DUMMY_ADDRESS);
// driver_account_data.customer_2_confirm=String::from("0");
// }
// if &driver_confirm[2..3]==String::from("1")
// {
// customer_account_data_3.driver_confirm=String::from("1");
// customer_account_data_3.ride_state=String::from("2"); //start
// customer_account_data_3.driver_pub_key=String::from(&full_instruction[startSize1..endSize1]);
// }else
// {
// customer_account_data_3.driver_select=String::from("0");
// customer_account_data_3.driver_confirm=String::from("0");
// customer_account_data_3.ride_state=String::from("0");
// driver_account_data.customer_3=String::from(helper::DUMMY_ADDRESS);
// driver_account_data.customer_3_confirm=String::from("0");
// }
// driver_account_data.customer_rides_to_finish=driver_confirm;
// driver_account_data.ride_state=String::from("2"); //start
// msg!("driver_Account data before saving {:?}",driver_account);
// //save driver_account
// driver_account_data.serialize(&mut &mut driver_account.data.borrow_mut()[..])?;
// //save customer
// customer_account_data_1.serialize(&mut &mut customer_account_1.data.borrow_mut()[..])?;
// customer_account_data_2.serialize(&mut &mut customer_account_2.data.borrow_mut()[..])?;
// customer_account_data_3.serialize(&mut &mut customer_account_3.data.borrow_mut()[..]).map_err(|e2| e2.into())
// }
pub fn step6_2(driver_account: &AccountInfo,customer_account: &AccountInfo,
program_id: &Pubkey, full_instruction: String) -> ProgramResult{
let driver_confirm=String::from(&full_instruction[190..191]);
let driver_customer_id=String::from(&full_instruction[194..195]);
let dist_str=&full_instruction[77..81];
let start_size1:usize=98;
let mut end_size1:usize=142;
if String::from(&full_instruction[141..142])==String::from("0"){
end_size1=141;
}
// let driver_base58=String::from(&full_instruction[start_size1..end_size1]);
let start_size2:usize=198;
let mut end_size2:usize=242;
if String::from(&full_instruction[241..242])==String::from("0"){
end_size2=241;
}
//driver covered distance
let dist_f = String::from(dist_str).parse::<f32>().unwrap();
let total_cost_f=dist_f*helper::KM_RATE;
let rides_dist_str=&full_instruction[186..190];
//total rides distance
let rides_dist_f = String::from(rides_dist_str).parse::<f32>().unwrap();
// let driver_pubkey58=String::from(&full_instruction[start_size2..end_size2]);
//driver
if driver_account.owner!=program_id{
msg!("This account {} is not owned by program {}",driver_account.key,program_id);
}
//let driver_account_key:String =String::from(driver_account.key);
msg!("driver_confirm {}",driver_confirm);
let mut driver_account_data = DriverData::try_from_slice(&driver_account.data.borrow_mut()).map_err(|err|{
msg!("Attempt to deserialize data failed {:?}",err);
ProgramError::InvalidInstructionData
})?;
//customer 1
if customer_account.owner!=program_id{
msg!("This account {} is not owned by program {}",customer_account.key,program_id);
}
let mut customer_account_data = CustomerData::try_from_slice(&customer_account.data.borrow_mut()).map_err(|err|{
msg!("Attempt to deserialize data failed {:?}",err);
ProgramError::InvalidInstructionData
})?;
driver_account_data.dist=String::from(dist_str);
if driver_confirm==String::from("1")
{
let cust_dist_f=customer_account_data.distance.parse::<f32>().unwrap();
let c1_cost=cust_dist_f*total_cost_f/rides_dist_f;
//let padded_cost=helper::pad_cost(c1_cost,helper::DUMMY_COST);
customer_account_data.final_cost=helper::pad_cost(c1_cost,helper::DUMMY_COST);
customer_account_data.to_pay=helper::pad_cost(c1_cost,helper::DUMMY_COST);
customer_account_data.driver_confirm=String::from("1");
customer_account_data.ride_state=String::from("2"); //start
customer_account_data.driver_pub_key=String::from(&full_instruction[start_size2..end_size2]);
}else{
customer_account_data.driver_select=String::from("0");
customer_account_data.driver_confirm=String::from("0");
customer_account_data.ride_state=String::from("0");
if customer_account_data.driver_address_1==String::from(&full_instruction[start_size1..end_size1])
{
customer_account_data.driver_address_1=String::from(helper::DUMMY_ADDRESS);
customer_account_data.cost_1=String::from(helper::DUMMY_COST);
}else if customer_account_data.driver_address_2==String::from(&full_instruction[start_size1..end_size1])
{
customer_account_data.driver_address_2=String::from(helper::DUMMY_ADDRESS);
customer_account_data.cost_2=String::from(helper::DUMMY_COST);
}else if customer_account_data.driver_address_3==String::from(&full_instruction[start_size1..end_size1])
{
customer_account_data.driver_address_3=String::from(helper::DUMMY_ADDRESS);
customer_account_data.cost_3=String::from(helper::DUMMY_COST);
}else if customer_account_data.driver_address_4==String::from(&full_instruction[start_size1..end_size1])
{
customer_account_data.driver_address_4=String::from(helper::DUMMY_ADDRESS);
customer_account_data.cost_4=String::from(helper::DUMMY_COST);
}
if driver_customer_id==String::from("1")
{
driver_account_data.customer_1=String::from(helper::DUMMY_ADDRESS);
driver_account_data.customer_1_confirm=String::from("0");
}
if driver_customer_id==String::from("2")
{
driver_account_data.customer_2=String::from(helper::DUMMY_ADDRESS);
driver_account_data.customer_2_confirm=String::from("0");
}
if driver_customer_id==String::from("3")
{
driver_account_data.customer_3=String::from(helper::DUMMY_ADDRESS);
driver_account_data.customer_3_confirm=String::from("0");
}
}
//update cost and distance to driver
driver_account_data.dist=String::from(dist_str);
driver_account_data.cost=helper::pad_cost(total_cost_f,helper::DUMMY_COST);;
//driver_account_data.customer_rides_to_finish=driver_confirm;
//driver_account_data.ride_state=String::from("2"); //start
msg!("driver_Account data before saving {:?}",driver_account_data);
//save driver_account
driver_account_data.serialize(&mut &mut driver_account.data.borrow_mut()[..])?;
//save customer
customer_account_data.serialize(&mut &mut customer_account.data.borrow_mut()[..]).map_err(|e2| e2.into())
}
pub fn step7(driver_account: &AccountInfo,
program_id: &Pubkey, full_instruction: String) -> ProgramResult{
let driver_confirm=String::from(&full_instruction[190..194]);
//driver
if driver_account.owner!=program_id{
msg!("This account {} is not owned by program {}",driver_account.key,program_id);
}
//let driver_account_key:String =String::from(driver_account.key);
msg!("driver_confirm {}",driver_confirm);
let mut driver_account_data = DriverData::try_from_slice(&driver_account.data.borrow_mut()).map_err(|err|{
msg!("Attempt to deserialize data failed {:?}",err);
ProgramError::InvalidInstructionData
})?;
driver_account_data.customer_rides_to_finish=driver_confirm;
driver_account_data.ride_state=String::from("2"); //start
msg!("driver_Account data before saving {:?}",driver_account_data);
//save driver_account
driver_account_data.serialize(&mut &mut driver_account.data.borrow_mut()[..]).map_err(|e2| e2.into())
}
//step8 driver finishes customer ride
pub fn step8(driver_account: &AccountInfo,customer_account: &AccountInfo,program_id: &Pubkey, full_instruction: String) -> ProgramResult{
let driver_finished=String::from(&full_instruction[190..194]);
//customer 1
if customer_account.owner!=program_id{
msg!("This account {} is not owned by program {}",customer_account.key,program_id);
}
let mut customer_account_data = CustomerData::try_from_slice(&customer_account.data.borrow_mut()).map_err(|err|{
msg!("Attempt to deserialize data failed {:?}",err);
ProgramError::InvalidInstructionData
})?;
if driver_account.owner!=program_id{
msg!("This account {} is not owned by program {}",driver_account.key,program_id);
}
//let driver_account_key:String =String::from(driver_account.key);
let mut driver_account_data = DriverData::try_from_slice(&driver_account.data.borrow_mut()).map_err(|err|{
msg!("Attempt to deserialize data failed {:?}",err);
ProgramError::InvalidInstructionData
})?;
// if driver_finished==String::from("0000"){
// driver_account_data.ride_state=String::from("3"); //start
// }
//customer ride finished
customer_account_data.ride_state=String::from("3"); //finished
driver_account_data.customer_rides_to_finish =driver_finished; //update driver finished rides
msg!("driver_Account data before saving {:?}",driver_account_data);
//save driver_account
driver_account_data.serialize(&mut &mut driver_account.data.borrow_mut()[..])?;
customer_account_data.serialize(&mut &mut customer_account.data.borrow_mut()[..]).map_err(|e2| e2.into())
}
//customer pays driver
pub fn step9(customer_account: &AccountInfo,driver_account: &AccountInfo,program_id: &Pubkey, full_instruction: String) -> ProgramResult{
//money
//let driver_paid=String::from(&full_instruction[194..198]);
let start_size:usize=142;
let mut end_size:usize=186;
if String::from(&full_instruction[185..186])==String::from("0"){
end_size=185;
}
let customer_base58=String::from(&full_instruction[start_size..end_size]);
//customer 1
if customer_account.owner!=program_id{
msg!("This account {} is not owned by program {}",customer_account.key,program_id);
}
let mut customer_account_data = CustomerData::try_from_slice(&customer_account.data.borrow_mut()).map_err(|err|{
msg!("Attempt to deserialize data failed {:?} customer_account_data",err);
ProgramError::InvalidInstructionData
})?;
if driver_account.owner!=program_id{
msg!("This account {} is not owned by program {}",driver_account.key,program_id);
}
//let driver_account_key:String =String::from(driver_account.key);
let mut driver_account_data = DriverData::try_from_slice(&driver_account.data.borrow_mut()).map_err(|err|{
msg!("Attempt to deserialize data failed {:?} driver_account_data",err);
ProgramError::InvalidInstructionData
})?;
//let customer_rides_paid=String::from(&driver_account_data.customer_rides_paid[194..198]);
let mut customers_paid1=String::from(&driver_account_data.customer_rides_paid[0..1]);
let mut customers_paid2=String::from(&driver_account_data.customer_rides_paid[1..2]);
let mut customers_paid3=String::from(&driver_account_data.customer_rides_paid[2..3]);
let mut customers_paid4=String::from(&driver_account_data.customer_rides_paid[3..4]);
if driver_account_data.customer_1== customer_base58 {
customers_paid1=String::from("1");
}
else if driver_account_data.customer_2== customer_base58 {
customers_paid2=String::from("1");
}
else if driver_account_data.customer_3== customer_base58 {
customers_paid3=String::from("1");
}
if driver_account_data.customer_rides_to_finish==String::from("0000"){
driver_account_data.ride_state=String::from("3"); //end
}
//customer ride finished
customer_account_data.ride_state=String::from("4"); //finished
driver_account_data.customer_rides_paid =[customers_paid1,customers_paid2,customers_paid3,customers_paid4].join(""); //update driver finished rides
msg!("driver_Account data before saving {:?}",driver_account);
//save driver_account
driver_account_data.serialize(&mut &mut driver_account.data.borrow_mut()[..])?;
customer_account_data.serialize(&mut &mut customer_account.data.borrow_mut()[..]).map_err(|e2| e2.into())
}
//step8 customer pays, all customerp paid ride finishes
|
fn add(x: i32, y: i32) -> i32 {
x + y
}
fn add2() -> fn(i32, i32) -> i32 {
|x, y| x + y
}
fn log(s: String) {
println!("log: {}", s);
}
fn main() {
log("sample".to_string());
println!("5 + 6 = {}", add(5, 6));
log( format!("7 + 10 = {}", add2()(7, 10)) );
}
|
#[derive(Debug)]
pub struct Address {
addr: u16
}
impl From<[u8; 2]> for Address {
fn from(opcode: [u8; 2]) -> Address {
let [hi, lo] = opcode;
Address {
addr: ((hi as u16 & 0x0F) << 8) ^ lo as u16
}
}
}
#[derive(Debug)]
pub struct RegisterAndValue {
register: usize,
value: u8,
}
impl From<[u8; 2]> for RegisterAndValue {
fn from(opcode: [u8; 2]) -> RegisterAndValue {
RegisterAndValue {
register: (opcode[0] & 0x0F) as usize,
value: opcode[1],
}
}
}
#[derive(Debug)]
pub struct OneRegister {
register: usize
}
impl From<[u8; 2]> for OneRegister {
fn from(opcode: [u8; 2]) -> OneRegister {
OneRegister {
register: (opcode[0] & 0x0F) as usize
}
}
}
#[derive(Debug)]
pub struct TwoRegisters {
register1: usize,
register2: usize
}
impl From<[u8; 2]> for TwoRegisters {
fn from(opcode: [u8; 2]) -> TwoRegisters {
TwoRegisters {
register1: (opcode[0] & 0x0F) as usize,
register2: (opcode[1] >> 4) as usize,
}
}
} |
use std::fmt;
use std::iter::FromIterator;
use std::collections::BTreeSet;
const DIMS: usize = 9;
const AREA: usize = DIMS * DIMS;
struct Sudoku {
board: [u32; AREA],
unsolved_cells: Vec<usize>,
coords: [(usize, usize); AREA],
cols: [[u32; DIMS]; DIMS],
rows: [[u32; DIMS]; DIMS],
blocks: [[u32; DIMS]; DIMS],
}
impl fmt::Display for Sudoku {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
for i in 0..AREA {
let space = if i % 3 == 0 { " " } else { "" };
write!(f, "{}{}", space, self.board[i]);
if (i + 1) % 9 == 0 { write!(f, "\n"); }
if (i + 1) % 27 == 0 { write!(f, "\n"); }
}
write!(f, "\n")
}
}
impl Sudoku {
fn get_cell(&self, row: usize, col: usize) -> u32 {
let idx = row * 9 + col;
self.board[idx]
}
fn set_cell(&mut self, row: usize, col: usize, value: u32) {
let idx = row * 9 + col;
self.board[idx] = value;
}
fn is_valid_cell(&self, row: usize, col: usize, value: u32) -> bool {
let idx = (row * 9 + col) as u32;
// check row
for r in self.rows[row].iter() {
if *r == idx { continue; }
if value == self.board[*r as usize] { return false; }
}
// check column
for c in self.cols[col].iter() {
if *c == idx { continue; }
if value == self.board[*c as usize] { return false; }
}
// check local box
let block = (row / 3) * 3 + (col / 3);
for b in self.blocks[block].iter() {
if *b == idx { continue; }
if value == self.board[*b as usize] { return false; }
}
true
}
fn find_solution(&self, row: usize, col: usize) -> Option<u32> {
let cell = self.get_cell(row, col) + 1;
for i in cell..=9 {
if self.is_valid_cell(row, col, i) { return Some(i); }
}
None
}
fn solve(&mut self) {
let mut backtrack_idx = 0;
while backtrack_idx < self.unsolved_cells.len() {
let (row, col) = self.coords[self.unsolved_cells[backtrack_idx]];
match self.find_solution(row, col) {
Some(s) => {
self.set_cell(row, col, s);
backtrack_idx += 1;
},
None => {
self.set_cell(row, col, 0);
backtrack_idx -= 1;
}
}
}
}
}
impl FromIterator<(usize, u32)> for Sudoku {
fn from_iter<T>(iter: T) -> Self
where T: IntoIterator<Item=(usize, u32)>
{
let mut board = [0; AREA];
let mut filled_slots = BTreeSet::new();
for (i, n) in iter {
board[i] = n;
filled_slots.insert(i);
}
let mut unsolved_cells: Vec<_> = (0..AREA).collect();
unsolved_cells.retain(|x| !filled_slots.contains(x));
let mut board_coords:[(usize, usize); AREA] = [(0, 0); AREA];
board_coords.copy_from_slice((0..AREA).map(|i| (i / 9, i % 9)).collect::<Vec<_>>().as_slice());
let board_rows: [[u32; DIMS]; DIMS] = {
let mut rows = [[0; DIMS]; DIMS];
for x in 0..DIMS {
for y in 0..DIMS {
rows[x][y] = (x * DIMS + y) as u32;
}
}
rows
};
let board_cols: [[u32; DIMS]; DIMS] = {
let mut cols = [[0; DIMS]; DIMS];
for x in 0..DIMS {
for y in 0..DIMS {
cols[y][x] = (x * DIMS + y) as u32;
}
}
cols
};
let board_blocks: [[u32; DIMS]; DIMS] = {
let mut blocks = [[0; DIMS]; DIMS];
for x in 0..DIMS {
for y in 0..DIMS {
let idx = x * DIMS + y;
let block = (x / 3) * 3 + (y / 3);
let idx_in_block = (x % 3) * 3 + (y % 3);
blocks[block][idx_in_block] = idx as u32;
}
}
blocks
};
Sudoku {
board,
unsolved_cells,
cols: board_cols,
rows: board_rows,
coords: board_coords,
blocks: board_blocks,
}
}
}
fn main() {
let mut sudoku1 = Sudoku::from_iter(vec![
(2, 7),(6, 3),(7, 1),(9, 6),(13, 9),(15, 7),(19, 1),(23, 8),(27, 2),(29, 6),(30, 8),
(32, 9),(37, 4),(39, 6),(41, 1),(43, 9),(48, 3),(50, 7),(51, 8),(53, 6),(57, 7),
(61, 3),(65, 1),(67, 8),(71, 2),(73, 2),(74, 5),(78, 6)
]);
println!("{}", sudoku1);
sudoku1.solve();
println!("{}", sudoku1);
let mut sudoku2 = Sudoku::from_iter(vec![
(2,4),(9,9),(10,5),(12,4),(17,8),(22,1),(24,5),(26,6),(28,3),(30,6),
(35,5),(37,1),(39,3),(41,8),(43,6),(45,4),(50,5),(52,7),(54,8),(56,9),
(58,4),(63,3),(68,2),(70,5),(71,4),(78,2)
]);
println!("{}", sudoku2);
sudoku2.solve();
println!("{}", sudoku2);
} |
mod ch1 {
pub mod ex1;
pub mod read_lines;
}
use ch1::ex1::filter_duplicates;
use ch1::ex1::filter_duplicates_sort_by_length;
use ch1::ex1::only_duplicates;
use ch1::ex1::queue_n_till_blank_line;
use ch1::ex1::reverse_lines;
use ch1::ex1::reverse_n_lines;
fn main() {
// reverse_lines("./poem.txt");
// reverse_n_lines("./poem.txt", 3);
// queue_n_till_blank_line("./poem.txt", 3);
// filter_duplicates("./poem.txt");
// only_duplicates("./poem.txt");
filter_duplicates_sort_by_length("./poem.txt");
}
|
use rand;
use rand::Rng;
command!(vend(_ctx, msg, args) {
let mut goods = "some cheesy crackers/some nacho cheese Doritos/a Hershey bar/a Milky Way/a day old donut/a water bottle/a pack of condoms/a can of bepis/a Rice Krispy treat/a Reeses peanut butter cup/the latest One Punch Man volume/nude leaks of Raini/a stick of RAM, sharpened for use in combat/a vending machine/a CalArts degree/das Abitur/a p a r a b o l a g n u l i n u x - l i b r e live usb/a solenoid/...wait, what is this?/a pony/sunshine in a bag/lily\'s webcomic, finally/an acoustic guitar/a cassette with nightcore on it/Lapis Lazuli/the entire CalArts campus/Pumpkin/heelies, to escape from their feelies/several anti-child abuse PSAs";
let mut list = goods.split("/");
let mut list2 = goods.split("/");
//for item in list {
// println!("{}", item)
//}
let mut num = rand::thread_rng().gen_range(0, list.count());
let item = list2.nth(num).unwrap();
if args.len() == 0 {
let _ = msg.channel_id.say(&format!("*vends {} {}*", msg.author, item));
} else {
println!("{}", args[0]);
let _ = msg.channel_id.say(&format!("*vends {} {}*", args[0], item));
};
}); |
use std::fs;
pub fn sum_fuel_01() -> i64 {
let filename = "./src/aoc01/input.txt";
let contents = fs::read_to_string(filename).expect("Something went wrong reading the file");
let inputs = contents.lines().map(|num| num.parse::<i64>().unwrap());
let sum = sum_fuel_requirements_01(inputs);
sum
}
pub fn sum_fuel_02() -> i64 {
let filename = "./src/aoc01/input.txt";
let contents = fs::read_to_string(filename).expect("Something went wrong reading the file");
let inputs = contents.lines().map(|num| num.parse::<i64>().unwrap());
let sum = sum_fuel_requirements_02(inputs);
sum
}
fn sum_fuel_requirements_01(inputs: impl Iterator<Item = i64>) -> i64 {
let sum = inputs.fold(0, |sum, mass| sum + fuel_requirement_of_mass(mass));
sum
}
fn sum_fuel_requirements_02(inputs: impl Iterator<Item = i64>) -> i64 {
let sum = inputs.fold(0, |sum, mass| {
sum + fuel_requirement_of_mass_with_fuel(mass)
});
sum
}
fn fuel_requirement_of_mass_with_fuel(mass: i64) -> i64 {
let fuel_for_mass = fuel_requirement_of_mass(mass);
if fuel_for_mass > 0 {
fuel_for_mass + fuel_requirement_of_mass_with_fuel(fuel_for_mass)
} else {
0
}
}
fn fuel_requirement_of_mass(mass: i64) -> i64 {
mass / 3 - 2
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn sums_fuel_requirements() {
let expected_result = 3363033;
assert_eq!(expected_result, sum_fuel_01())
}
#[test]
fn sums_fuel_requirements_harder() {
let expected_result = 5041680;
assert_eq!(expected_result, sum_fuel_02())
}
#[test]
fn complete_fuel_for_12_is_2() {
let mass = 12;
assert_eq!(2, fuel_requirement_of_mass_with_fuel(mass));
}
#[test]
fn complete_fuel_for_14_is_2() {
let mass = 14;
assert_eq!(2, fuel_requirement_of_mass_with_fuel(mass));
}
#[test]
fn complete_fuel_for_1969_is_966() {
let mass = 1969;
assert_eq!(966, fuel_requirement_of_mass_with_fuel(mass));
}
#[test]
fn complete_fuel_for_100756_is_50346() {
let mass = 100756;
assert_eq!(50346, fuel_requirement_of_mass_with_fuel(mass));
}
#[test]
fn fuel_for_12_is_2() {
let mass = 12;
assert_eq!(2, fuel_requirement_of_mass(mass));
}
#[test]
fn fuel_for_14_is_2() {
let mass = 14;
assert_eq!(2, fuel_requirement_of_mass(mass));
}
#[test]
fn fuel_for_1969_is_654() {
let mass = 1969;
assert_eq!(654, fuel_requirement_of_mass(mass));
}
#[test]
fn fuel_for_100756_is_33583() {
let mass = 100756;
assert_eq!(33583, fuel_requirement_of_mass(mass));
}
}
|
#[must_use]
pub fn timeit(label: impl Into<String>) -> impl Drop {
use std::time::Instant;
struct Guard {
label: String,
start: Instant,
}
impl Drop for Guard {
fn drop(&mut self) {
eprintln!("{}: {:.2?}", self.label, self.start.elapsed())
}
}
Guard { label: label.into(), start: Instant::now() }
}
|
use crate::prelude::*;
use std::os::raw::c_void;
use std::ptr;
#[repr(C)]
#[derive(Debug)]
pub struct VkPipelineColorBlendStateCreateInfo {
pub sType: VkStructureType,
pub pNext: *const c_void,
pub flags: VkPipelineColorBlendStateCreateFlagBits,
pub logicOpEnable: VkBool32,
pub logicOp: VkLogicOp,
pub attachmentCount: u32,
pub pAttachments: *const VkPipelineColorBlendAttachmentState,
pub blendConstants: [f32; 4],
}
impl VkPipelineColorBlendStateCreateInfo {
pub fn new<T>(
flags: T,
logic_op_enable: bool,
logic_op: VkLogicOp,
attachments: &[VkPipelineColorBlendAttachmentState],
blend_constants: [f32; 4],
) -> VkPipelineColorBlendStateCreateInfo
where
T: Into<VkPipelineColorBlendStateCreateFlagBits>,
{
VkPipelineColorBlendStateCreateInfo {
sType: VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
pNext: ptr::null(),
flags: flags.into(),
logicOpEnable: logic_op_enable.into(),
logicOp: logic_op,
attachmentCount: attachments.len() as u32,
pAttachments: attachments.as_ptr(),
blendConstants: blend_constants,
}
}
pub fn set_attachments(&mut self, attachments: &[VkPipelineColorBlendAttachmentState]) {
self.attachmentCount = attachments.len() as u32;
self.pAttachments = attachments.as_ptr();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.