text stringlengths 8 4.13M |
|---|
use super::color::Color;
use super::object::Object;
use super::ray::Ray;
use std::cmp::Ordering;
pub struct Scene {
pub background: Color,
pub objects: Vec<Object>
}
pub struct Intersection<'a> {
pub object: &'a Object,
pub distance: f64
}
impl<'a> Intersection<'a> {
pub fn new(object: &'a Object, distance: f64) -> Intersection<'a> {
Intersection {
object: object,
distance: distance
}
}
}
#[derive(PartialEq,PartialOrd)]
struct NonNan(f64);
impl NonNan {
fn new(val: f64) -> Option<NonNan> {
if val.is_nan() {
None
} else {
Some(NonNan(val))
}
}
}
impl Eq for NonNan {}
impl Ord for NonNan {
fn cmp(&self, other: &NonNan) -> Ordering {
self.partial_cmp(other).unwrap()
}
}
impl Scene {
pub fn nearest_interection(&self, r: &Ray) -> Option<Intersection> {
self.objects.iter()
.filter_map(|o| {
match o.intersect(&r) {
Some(t) => Some(Intersection::new(o, t)),
_ => None
}
})
.min_by_key(|o| NonNan::new(o.distance))
}
}
#[cfg(test)]
mod tests {
use super::Scene;
use super::super::color::Color;
use super::super::point::Point;
use super::super::ray::Ray;
use super::super::vector::Vector;
use super::super::object::Object;
use super::super::shapes::sphere::Sphere;
#[test]
fn test_nearest_intersection_no_intersection() {
let s = Scene {
background: Color::new(0, 0, 0),
objects: vec![]
};
let result = s.nearest_interection(&Ray::new(Point::new(0.0, 0.0, 0.0), Vector::new(0.0, 0.0, 1.0)));
assert!(result.is_none());
}
#[test]
fn test_nearest_intersection_with_intersection() {
let nearest_object = Object {
shape: Box::new(Sphere::new(
Point::new(0.0, 0.0, -50.0),
10.0
)),
color: Color::new(0, 255, 0)
};
let scene = Scene {
background: Color::new(255, 0, 0),
objects: vec![
nearest_object,
Object {
shape: Box::new(Sphere::new(
Point::new(0.0, 0.0, -100.0),
10.0
)),
color: Color::new(0, 255, 0)
}
]
};
let result = scene.nearest_interection(&Ray::new(Point::new(0.0, 0.0, 0.0), Vector::new(0.0, 0.0, -1.0)));
assert_eq!(result.unwrap().distance, 40.0);
}
}
|
/*
cell.rs
Implementation of cell for Scrabble board
*/
use crate::tile::Tile;
pub enum Bonus {
None,
DoubleLetter,
DoubleWord,
TripleLetter,
TripleWord,
}
pub struct Cell {
pub _tile: Option<Tile>,
pub _bonus: Bonus,
}
impl Cell {
pub fn normal_cell() -> Cell {
Cell {
_tile: None,
_bonus: Bonus::None,
}
}
pub fn double_letter() -> Cell {
Cell {
_tile: None,
_bonus: Bonus::DoubleLetter,
}
}
pub fn double_word() -> Cell {
Cell {
_tile: None,
_bonus: Bonus::DoubleWord,
}
}
pub fn triple_letter() -> Cell {
Cell {
_tile: None,
_bonus: Bonus::TripleLetter,
}
}
pub fn triple_word() -> Cell {
Cell {
_tile: None,
_bonus: Bonus::TripleWord,
}
}
} |
fn foo() {
use auto_impl::auto_impl;
#[auto_impl(Fn)]
trait Foo<'a, T> {
fn execute<'b>(
&'a self,
arg1: &'b T,
arg3: &'static str,
) -> Result<T, String>;
}
#[auto_impl(&, &mut, Box, Rc, Arc)]
trait Bar<'a, T> {
fn execute<'b>(
&'a self,
arg1: &'b T,
arg3: &'static str,
) -> Result<T, String>;
}
println!("yooo");
}
fn main() {}
|
use super::blockid::BlockId;
use super::logmanager::LogMgr;
use super::logrecord::SETSTRING;
use super::page::Page;
use std::cell::RefCell;
use std::fmt;
use std::mem;
use std::sync::Arc;
use anyhow::Result;
pub struct SetStringRecord {
txnum: i32,
offset: i32,
val: String,
blk: BlockId,
}
impl fmt::Display for SetStringRecord {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"<SETSTRING {} {} {} {}>",
self.txnum, self.blk, self.offset, self.val
)
}
}
/**
* tpos fpos bpos opos vpos
* | SetString | txnum | filename | blknum | offset | val |
* int int int + stirng int int int + string
**/
impl SetStringRecord {
pub fn new(p: &Page) -> Result<SetStringRecord> {
let tpos = mem::size_of::<i32>();
let txnum = p.get_int(tpos)?;
let fpos = tpos + mem::size_of::<i32>();
let filename = p.get_string(fpos)?;
let bpos = fpos + Page::max_length(filename.len());
let blknum = p.get_int(bpos)?;
let blk = BlockId::new(&filename, blknum as u64);
let opos = bpos + mem::size_of::<i32>();
let offset = p.get_int(opos)?;
let vpos = opos + mem::size_of::<i32>();
let val = p.get_string(vpos)?;
Ok(SetStringRecord {
txnum,
offset,
val,
blk,
})
}
pub fn op(&self) -> i32 {
SETSTRING
}
pub fn tx_number(&self) -> i32 {
self.txnum
}
pub fn write_to_log(
lm: Arc<RefCell<LogMgr>>,
txnum: i32,
blk: BlockId,
offset: i32,
val: String,
) -> Result<u64> {
let tpos = mem::size_of::<i32>();
let fpos = tpos + mem::size_of::<i32>();
let bpos = fpos + Page::max_length(blk.filename().len());
let opos = bpos + mem::size_of::<i32>();
let vpos = opos + mem::size_of::<i32>();
let reclen = vpos + Page::max_length(val.len());
let mut p = Page::new_from_size(reclen as usize);
p.set_int(0, SETSTRING)?;
p.set_int(tpos, txnum)?;
p.set_string(fpos, blk.filename())?;
p.set_int(bpos, blk.number() as i32)?;
p.set_int(opos, offset)?;
p.set_string(vpos, val)?;
lm.borrow_mut().append(p.contents())
}
}
|
use std::str;
use std::slice;
use vm::api::{Getable, Pushable};
use vm::types::VMIndex;
use vm::vm::{RootedThread, Thread, Value, VMInt};
use super::Compiler;
// TODO What should the c api return as errors
// TODO How should error messages be returned
#[repr(C)]
pub enum Error {
Ok,
Unknown,
}
pub extern "C" fn new_vm() -> *const Thread {
let vm = RootedThread::new();
vm.into_raw()
}
pub unsafe extern "C" fn free_vm(vm: &Thread) {
RootedThread::from_raw(vm);
}
pub unsafe extern "C" fn run_expr(vm: &Thread,
module: &u8,
module_len: usize,
expr: &u8,
expr_len: usize)
-> Error {
let module = match str::from_utf8(slice::from_raw_parts(module, module_len)) {
Ok(s) => s,
Err(_) => return Error::Unknown,
};
let expr = match str::from_utf8(slice::from_raw_parts(expr, expr_len)) {
Ok(s) => s,
Err(_) => return Error::Unknown,
};
let result: Result<Value, _> = Compiler::new().run_expr(&vm, module, expr);
match result {
Ok(_) => Error::Ok,
Err(_) => Error::Unknown,
}
}
pub unsafe extern "C" fn load_script(vm: &Thread,
module: &u8,
module_len: usize,
expr: &u8,
expr_len: usize)
-> Error {
let module = match str::from_utf8(slice::from_raw_parts(module, module_len)) {
Ok(s) => s,
Err(_) => return Error::Unknown,
};
let expr = match str::from_utf8(slice::from_raw_parts(expr, expr_len)) {
Ok(s) => s,
Err(_) => return Error::Unknown,
};
let result = Compiler::new().load_script(vm, module, expr);
match result {
Ok(_) => Error::Ok,
Err(_) => Error::Unknown,
}
}
pub extern "C" fn push_int(vm: &Thread, int: VMInt) {
Thread::push(vm, Value::Int(int));
}
pub extern "C" fn push_float(vm: &Thread, float: f64) {
Thread::push(vm, Value::Float(float));
}
/// Push a string to the stack. The string must be valid utf-8 or an error will be returned
pub unsafe extern "C" fn push_string(vm: &Thread, s: &u8, len: usize) -> Error {
let s = match str::from_utf8(slice::from_raw_parts(s, len)) {
Ok(s) => s,
Err(_) => return Error::Unknown,
};
s.push(vm, &mut vm.current_frame());
Error::Ok
}
/// Push a string to the stack. If the string is not utf-8 this function will trigger undefined
/// behaviour.
pub unsafe extern "C" fn push_string_unchecked(vm: &Thread, s: &u8, len: usize) {
let s = str::from_utf8_unchecked(slice::from_raw_parts(s, len));
s.push(vm, &mut vm.current_frame());
}
pub unsafe extern "C" fn get_int(vm: &Thread, index: VMIndex, out: &mut VMInt) -> Error {
get_value(vm, index, out)
}
pub unsafe extern "C" fn get_float(vm: &Thread, index: VMIndex, out: &mut f64) -> Error {
get_value(vm, index, out)
}
/// The returned string is garbage collected and may not be valid after the string is removed from
/// its slot in the stack
pub unsafe extern "C" fn get_string(vm: &Thread,
index: VMIndex,
out: &mut &u8,
out_len: &mut usize)
-> Error {
let stack = vm.current_frame();
match stack.get_variants(index).and_then(|value| <&str>::from_value(vm, value)) {
Some(value) => {
*out = &*value.as_ptr();
*out_len = value.len();
Error::Ok
}
None => Error::Unknown,
}
}
unsafe fn get_value<T>(vm: &Thread, index: VMIndex, out: &mut T) -> Error
where T: for<'vm> Getable<'vm>
{
let stack = vm.current_frame();
match stack.get_variants(index).and_then(|value| T::from_value(vm, value)) {
Some(value) => {
*out = value;
Error::Ok
}
None => Error::Unknown,
}
}
|
extern crate inkwell;
use self::inkwell::context::Context;
use std::env::temp_dir;
use std::fs::{File, remove_file};
use std::io::Read;
#[test]
fn test_write_bitcode_to_path() {
let mut path = temp_dir();
path.push("temp.bc");
let context = Context::create();
let module = context.create_module("my_module");
let void_type = context.void_type();
let fn_type = void_type.fn_type(&[], false);
module.add_function("my_fn", &fn_type, None);
module.write_bitcode_to_path(&path);
let mut contents = Vec::new();
let mut file = File::open(&path).expect("Could not open temp file");
file.read_to_end(&mut contents).expect("Unable to verify written file");
assert!(!contents.is_empty());
remove_file(&path).unwrap();
}
// REVIEW: This test infrequently fails. Seems to happen more often on travis.
// Possibly a LLVM bug? Wrapper is really straightforward. See issue #6 on GH
// #[test]
// fn test_write_bitcode_to_file() {
// use context::Context;
// use std::env::temp_dir;
// use std::fs::{File, remove_file};
// use std::io::{Read, Seek, SeekFrom};
// let mut path = temp_dir();
// path.push("temp2.bc");
// let mut file = File::create(&path).unwrap();
// let context = Context::create();
// let module = context.create_module("my_module");
// let void_type = context.void_type();
// let fn_type = void_type.fn_type(&[], false);
// module.add_function("my_fn", &fn_type, None);
// module.write_bitcode_to_file(&file, true, false);
// let mut contents = Vec::new();
// let mut file2 = File::open(&path).expect("Could not open temp file");
// file.read_to_end(&mut contents).expect("Unable to verify written file");
// assert!(contents.len() > 0);
// remove_file(&path).unwrap();
// }
#[test]
fn test_get_function() {
let context = Context::create();
let module = context.create_module("my_module");
assert_eq!(*module.get_context(), context);
assert!(module.get_first_function().is_none());
assert!(module.get_last_function().is_none());
assert!(module.get_function("some_fn").is_none());
let void_type = context.void_type();
let some_fn_type = void_type.fn_type(&[], false);
let some_fn = module.add_function("some_fn", &some_fn_type, None);
let first_fn = module.get_first_function().unwrap();
let last_fn = module.get_last_function().unwrap();
let named_fn = module.get_function("some_fn").unwrap();
assert_eq!(first_fn, some_fn);
assert_eq!(last_fn, some_fn);
assert_eq!(named_fn, some_fn);
}
#[test]
fn test_owned_data_layout_disposed_safely() {
let context = Context::create();
context.create_module("test");
}
|
// Copyright 2018 Vlad Yermakov
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use super::Real;
use std::cmp::Ordering;
use std::fmt::{self, Display, Formatter};
use std::ops::{Add, Div, Mul, Neg, Sub};
#[derive(PartialEq, Debug, Copy, Clone)]
pub struct Complex {
pub(crate) real: Real,
pub(crate) imag: Real,
}
impl Complex {
pub fn new<U, V>(real: U, imag: V) -> Complex
where
U: Into<Real>,
V: Into<Real>,
{
Complex {
real: real.into(),
imag: imag.into(),
}
}
pub fn abs(&self) -> Real {
self.real * self.real + self.imag * self.imag
}
pub fn inv(&self) -> Complex {
Complex::new(Real::new(1.), Real::new(0.)) / *self
}
pub fn conj(&self) -> Complex {
Complex::new(self.real, -self.imag)
}
pub fn is_real(&self) -> bool {
self.imag == Real::zero()
}
}
impl Display for Complex {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
self.real.fmt(f)?;
if self.imag >= Real::zero() {
" + ".fmt(f)?;
self.imag.fmt(f)?;
} else {
" - ".fmt(f)?;
(-self.imag).fmt(f)?;
}
"i".fmt(f)
}
}
impl PartialOrd for Complex {
fn partial_cmp(&self, other: &Complex) -> Option<Ordering> {
if self.real == other.real {
return self.imag.partial_cmp(&other.imag);
}
if self.imag == other.imag {
return self.real.partial_cmp(&other.real);
}
return None;
}
}
impl Neg for Complex {
type Output = Complex;
fn neg(self) -> Complex {
Complex::new(-self.real, -self.imag)
}
}
impl Add for Complex {
type Output = Complex;
fn add(self, other: Complex) -> Complex {
Complex {
real: self.real + other.real,
imag: self.imag + other.imag,
}
}
}
impl Sub for Complex {
type Output = Complex;
fn sub(self, other: Complex) -> Complex {
Complex {
real: self.real - other.real,
imag: self.imag - other.imag,
}
}
}
impl Mul for Complex {
type Output = Complex;
fn mul(self, other: Complex) -> Complex {
Complex {
real: self.real * other.real - self.imag * other.imag,
imag: self.imag * other.real + other.imag * self.real,
}
}
}
impl Div for Complex {
type Output = Complex;
fn div(self, other: Complex) -> Complex {
Complex {
real: (self * other.conj()).real / (other.abs()),
imag: (self * other.conj()).imag / (other.abs()),
}
}
}
#[macro_export]
macro_rules! complex {
($a:tt + $b:tt.i) => {
$crate::numbers::Complex::new($a as f64, $b as f64)
};
(- $a:tt + $b:tt.i) => {
$crate::numbers::Complex::new(-$a as f64, $b as f64)
};
($a:tt - $b:tt.i) => {
$crate::numbers::Complex::new($a as f64, -$b as f64)
};
(- $a:tt - $b:tt.i) => {
$crate::numbers::Complex::new(-$a as f64, -$b as f64)
};
($a:tt) => {
$crate::numbers::Complex::new($a as f64, 0 as f64)
};
(- $a:tt) => {
$crate::numbers::Complex::new(-$a as f64, 0 as f64)
};
($b:tt.i) => {
$crate::numbers::Complex::new(0 as f64, $b as f64)
};
(- $b:tt.i) => {
$crate::numbers::Complex::new(0 as f64, -$b as f64)
};
}
impl_default! { Complex, complex!(0) }
|
pub use super::instr::{self, Instruction, Label, Register};
pub fn optimize(program: &mut Vec<(Label, Instruction)>) {
eliminate_zero_loads(program);
eliminate_noplike_jumps(program);
}
fn eliminate_zero_loads(program: &mut [(Label, Instruction)]) {
// Loads of a constant zero can be transformed into a 'clear' instruction
// and the constant can be eliminated.
for &mut (ref mut label, ref mut instruction) in program.iter_mut() {
// Load from '#0'?
let mut target_reg: Option<Register> = None;
if let &mut Instruction::Load(ref label, ref reg) = instruction {
if let &Label::Name(ref name) = label {
if name == "#0" {
target_reg = Some(*reg);
}
}
}
// If yes, replace with clear
if let Some(reg) = target_reg {
*instruction = Instruction::Clear(reg);
}
// Definition of #0?
let remove_def = match label {
&mut Label::Name(ref n) if n == "#0" => true,
_ => false,
};
if remove_def {
// If yes, eliminate it
assert_eq!(*instruction, Instruction::Value(0));
*instruction = Instruction::Nop;
*label = Label::None;
}
}
}
fn eliminate_noplike_jumps(program: &mut [(Label, Instruction)]) {
// Nop-like jumps (which go to the following instruction) are useless,
// and not permitted by the assembler.
//
// This implementation is kind of inefficient.
loop {
let mut modified = false;
for i in 0..program.len() {
// Find a branch and record its target
let target_label = {
let (_, ref mut instruction) = program[i];
match instruction {
&mut Instruction::BranchNotEqual(ref l)
| &mut Instruction::BranchGreater(ref l)
| &mut Instruction::Jump(ref l) => {
// Must move so we no longer borrow from `program`
l.clone()
}
// Any other instruction is ignored
_ => continue,
}
};
// Find the target label
let mut label_idx = None;
for (idx, &(ref label, _)) in program.iter().enumerate() {
if label == &target_label {
label_idx = Some(idx);
break;
}
}
let label_idx = label_idx.expect("Jump targeted label which was not found");
debug!(
"Jump with target {}, span={}..{} ",
target_label, i, label_idx
);
if label_idx <= i {
// Does not jump forward, cannot be elided
continue;
}
// Can elide jump if there are only nops between it and the destination
let all_nops = program[i + 1..label_idx]
.iter()
.all(|&(_, ref instr)| match instr {
&Instruction::Nop => true,
_ => false,
});
if all_nops {
debug!("Range is NOP; stripping jump at index {}", i);
program[i].1 = Instruction::Nop;
modified = true;
}
}
// Terminate when we make a pass without making any replacements.
if !modified {
break;
}
}
}
|
use fal::{read_u32, read_u64};
use crate::{ObjPhys, ObjectIdentifier, TransactionIdentifier};
#[derive(Debug)]
pub struct ReaperPhys {
pub header: ObjPhys,
pub next_reaper_id: u64,
pub completed_id: u64,
pub head: ObjectIdentifier,
pub tail: ObjectIdentifier,
pub flags: u32,
pub rlcount: u32,
pub ty: u32,
pub size: u32,
pub fs_oid: ObjectIdentifier,
pub oid: ObjectIdentifier,
pub xid: TransactionIdentifier,
pub nrle_flags: u32,
pub state_buffer_size: u32,
pub state_buffer: Vec<u8>,
}
impl ReaperPhys {
pub fn parse(bytes: &[u8]) -> Self {
let state_buffer_size = read_u32(bytes, 108);
Self {
header: ObjPhys::parse(&bytes),
next_reaper_id: read_u64(bytes, 32),
completed_id: read_u64(bytes, 40),
head: read_u64(bytes, 48).into(),
tail: read_u64(bytes, 56).into(),
flags: read_u32(bytes, 64),
rlcount: read_u32(bytes, 68),
ty: read_u32(bytes, 72),
size: read_u32(bytes, 76),
fs_oid: read_u64(bytes, 80).into(),
oid: read_u64(bytes, 88).into(),
xid: read_u64(bytes, 96),
nrle_flags: read_u32(bytes, 104),
state_buffer_size,
state_buffer: bytes[112..112 + state_buffer_size as usize].to_owned(),
}
}
}
|
#[doc = "Reader of register FDCAN_TEST"]
pub type R = crate::R<u32, super::FDCAN_TEST>;
#[doc = "Writer for register FDCAN_TEST"]
pub type W = crate::W<u32, super::FDCAN_TEST>;
#[doc = "Register FDCAN_TEST `reset()`'s with value 0"]
impl crate::ResetValue for super::FDCAN_TEST {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "LBCK\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum LBCK_A {
#[doc = "0: Reset value, Loop Back mode is\r\n disabled"]
B_0X0 = 0,
#[doc = "1: Loop Back mode is enabled (see Test\r\n modes)"]
B_0X1 = 1,
}
impl From<LBCK_A> for bool {
#[inline(always)]
fn from(variant: LBCK_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `LBCK`"]
pub type LBCK_R = crate::R<bool, LBCK_A>;
impl LBCK_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LBCK_A {
match self.bits {
false => LBCK_A::B_0X0,
true => LBCK_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == LBCK_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == LBCK_A::B_0X1
}
}
#[doc = "Write proxy for field `LBCK`"]
pub struct LBCK_W<'a> {
w: &'a mut W,
}
impl<'a> LBCK_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: LBCK_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Reset value, Loop Back mode is disabled"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(LBCK_A::B_0X0)
}
#[doc = "Loop Back mode is enabled (see Test modes)"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(LBCK_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "TX\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum TX_A {
#[doc = "0: Reset value , FDCANx_TX TX is\r\n controlled by the CAN core, updated at the end of\r\n the CAN bit time"]
B_0X0 = 0,
#[doc = "1: Sample point can be monitored at pin\r\n FDCANx_TX"]
B_0X1 = 1,
#[doc = "2: Dominant ( 0 ) level at pin\r\n FDCANx_TX"]
B_0X2 = 2,
#[doc = "3: Recessive ( 1 ) at pin\r\n FDCANx_TX"]
B_0X3 = 3,
}
impl From<TX_A> for u8 {
#[inline(always)]
fn from(variant: TX_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `TX`"]
pub type TX_R = crate::R<u8, TX_A>;
impl TX_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TX_A {
match self.bits {
0 => TX_A::B_0X0,
1 => TX_A::B_0X1,
2 => TX_A::B_0X2,
3 => TX_A::B_0X3,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == TX_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == TX_A::B_0X1
}
#[doc = "Checks if the value of the field is `B_0X2`"]
#[inline(always)]
pub fn is_b_0x2(&self) -> bool {
*self == TX_A::B_0X2
}
#[doc = "Checks if the value of the field is `B_0X3`"]
#[inline(always)]
pub fn is_b_0x3(&self) -> bool {
*self == TX_A::B_0X3
}
}
#[doc = "Write proxy for field `TX`"]
pub struct TX_W<'a> {
w: &'a mut W,
}
impl<'a> TX_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: TX_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "Reset value , FDCANx_TX TX is controlled by the CAN core, updated at the end of the CAN bit time"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(TX_A::B_0X0)
}
#[doc = "Sample point can be monitored at pin FDCANx_TX"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(TX_A::B_0X1)
}
#[doc = "Dominant ( 0 ) level at pin FDCANx_TX"]
#[inline(always)]
pub fn b_0x2(self) -> &'a mut W {
self.variant(TX_A::B_0X2)
}
#[doc = "Recessive ( 1 ) at pin FDCANx_TX"]
#[inline(always)]
pub fn b_0x3(self) -> &'a mut W {
self.variant(TX_A::B_0X3)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 5)) | (((value as u32) & 0x03) << 5);
self.w
}
}
#[doc = "RX\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum RX_A {
#[doc = "0: The CAN bus is dominant (FDCANx_RX =\r\n 0 )"]
B_0X0 = 0,
#[doc = "1: The CAN bus is recessive (FDCANx_RX\r\n = 1 )"]
B_0X1 = 1,
}
impl From<RX_A> for bool {
#[inline(always)]
fn from(variant: RX_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `RX`"]
pub type RX_R = crate::R<bool, RX_A>;
impl RX_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RX_A {
match self.bits {
false => RX_A::B_0X0,
true => RX_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == RX_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == RX_A::B_0X1
}
}
#[doc = "Write proxy for field `RX`"]
pub struct RX_W<'a> {
w: &'a mut W,
}
impl<'a> RX_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RX_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "The CAN bus is dominant (FDCANx_RX = 0 )"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(RX_A::B_0X0)
}
#[doc = "The CAN bus is recessive (FDCANx_RX = 1 )"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(RX_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
impl R {
#[doc = "Bit 4 - LBCK"]
#[inline(always)]
pub fn lbck(&self) -> LBCK_R {
LBCK_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bits 5:6 - TX"]
#[inline(always)]
pub fn tx(&self) -> TX_R {
TX_R::new(((self.bits >> 5) & 0x03) as u8)
}
#[doc = "Bit 7 - RX"]
#[inline(always)]
pub fn rx(&self) -> RX_R {
RX_R::new(((self.bits >> 7) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 4 - LBCK"]
#[inline(always)]
pub fn lbck(&mut self) -> LBCK_W {
LBCK_W { w: self }
}
#[doc = "Bits 5:6 - TX"]
#[inline(always)]
pub fn tx(&mut self) -> TX_W {
TX_W { w: self }
}
#[doc = "Bit 7 - RX"]
#[inline(always)]
pub fn rx(&mut self) -> RX_W {
RX_W { w: self }
}
}
|
use anyhow::{anyhow, Result};
use jsonrpc_core::Params;
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use crate::datastore::RECENT_FILES_IN_MEMORY;
use crate::stdio_server::types::GlobalEnv;
use crate::stdio_server::GLOBAL_ENV;
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
#[serde(deny_unknown_fields)]
pub struct Notification {
pub method: String,
pub params: Params,
pub session_id: u64,
}
impl Notification {
/// Process the notification message from Vim.
pub async fn process(self) -> Result<()> {
match self.method.as_str() {
"initialize_global_env" => self.initialize_global_env(), // should be called only once.
"note_recent_files" => self.note_recent_file().await,
_ => Err(anyhow!("Unknown notification: {:?}", self)),
}
}
pub fn parse<T: DeserializeOwned>(self) -> Result<T> {
self.params.parse().map_err(Into::into)
}
pub fn parse_unsafe<T: DeserializeOwned>(self) -> T {
self.parse()
.unwrap_or_else(|e| panic!("Couldn't deserialize params: {:?}", e))
}
fn initialize_global_env(self) -> Result<()> {
#[derive(Deserialize)]
struct InnerParams {
is_nvim: Option<bool>,
enable_icon: Option<bool>,
clap_preview_size: serde_json::Value,
}
let InnerParams {
is_nvim,
enable_icon,
clap_preview_size,
} = self.params.parse()?;
let is_nvim = is_nvim.unwrap_or(false);
let enable_icon = enable_icon.unwrap_or(false);
let global_env = GlobalEnv::new(is_nvim, enable_icon, clap_preview_size.into());
if let Err(e) = GLOBAL_ENV.set(global_env) {
tracing::debug!(error = ?e, "Failed to initialized GLOBAL_ENV");
} else {
tracing::debug!("GLOBAL_ENV initialized successfully");
}
Ok(())
}
async fn note_recent_file(self) -> Result<()> {
#[derive(Deserialize)]
struct InnerParams {
file: String,
}
let InnerParams { file } = self.params.parse()?;
tracing::debug!(?file, "Receive a recent file");
if file.is_empty() || !std::path::Path::new(&file).exists() {
return Ok(());
}
let mut recent_files = RECENT_FILES_IN_MEMORY.lock();
recent_files.upsert(file);
Ok(())
}
}
|
// Copyright 2017 pdb Developers
//
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
//! The `pdb` create parses Microsoft PDB (Program Database) files. PDB files contain debugging
//! information produced by most compilers that target Windows, including information about symbols,
//! types, modules, and so on.
//!
//! # Usage
//!
//! PDB files are accessed via the [`pdb::PDB`] object.
//!
//! # Example
//!
//! ```
//! # use pdb::FallibleIterator;
//! #
//! # fn test() -> pdb::Result<usize> {
//! let file = std::fs::File::open("fixtures/self/foo.pdb")?;
//! let mut pdb = pdb::PDB::open(file)?;
//!
//! let symbol_table = pdb.global_symbols()?;
//! let address_map = pdb.address_map()?;
//!
//! # let mut count: usize = 0;
//! let mut symbols = symbol_table.iter();
//! while let Some(symbol) = symbols.next()? {
//! match symbol.parse() {
//! Ok(pdb::SymbolData::Public(data)) if data.function => {
//! // we found the location of a function!
//! let rva = data.offset.to_rva(&address_map).unwrap_or_default();
//! println!("{} is {}", rva, data.name);
//! # count += 1;
//! }
//! _ => {}
//! }
//! }
//!
//! # Ok(count)
//! # }
//! # assert!(test().expect("test") > 2000);
//! ```
#![warn(missing_docs)]
// modules
mod common;
mod dbi;
mod framedata;
mod modi;
mod msf;
mod omap;
mod pdb;
mod pdbi;
mod pe;
mod source;
mod strings;
mod symbol;
mod tpi;
// exports
pub use crate::common::*;
pub use crate::dbi::*;
pub use crate::framedata::*;
pub use crate::modi::*;
pub use crate::omap::*;
pub use crate::pdb::*;
pub use crate::pdbi::*;
pub use crate::pe::*;
pub use crate::source::*;
pub use crate::strings::*;
pub use crate::symbol::*;
pub use crate::tpi::*;
// re-export FallibleIterator for convenience
#[doc(no_inline)]
pub use fallible_iterator::FallibleIterator;
|
use config::Config;
use api::TellerClient;
use api::inform::{Outgoings, GetOutgoings};
use cli::arg_types::{AccountType, OutputFormat, Interval, Timeframe};
use command::representations::represent_list_amounts;
use command::timeframe_to_date_range;
fn represent_list_outgoings(hac: &Outgoings, output: &OutputFormat) {
represent_list_amounts("outgoing", &hac, &output)
}
pub fn list_outgoings_command(teller: &TellerClient,
config: &Config,
account: &AccountType,
interval: &Interval,
timeframe: &Timeframe,
output: &OutputFormat)
-> i32 {
info!("Calling the list outgoings command");
let account_id = config.get_account_id(&account);
let (from, to) = timeframe_to_date_range(&timeframe);
teller.get_outgoings(&account_id, &interval, &from, &to)
.map(|outgoings| {
represent_list_outgoings(&outgoings, &output);
0
})
.unwrap_or_else(|err| {
error!("Unable to list outgoings: {}", err);
1
})
}
|
#![feature(proc_macro_diagnostic)]
extern crate proc_macro;
use darling::FromMeta;
use proc_macro::TokenStream;
use proc_macro2::Span;
use quote::{format_ident, quote};
use std::iter;
use syn::spanned::Spanned;
use syn::{parse, Type, Visibility};
use syn::{ItemFn, ReturnType};
mod path;
use path::ModulePrefix;
#[derive(Debug, FromMeta)]
struct TaskArgs {
#[darling(default)]
pool_size: Option<usize>,
#[darling(default)]
send: bool,
#[darling(default)]
embassy_prefix: ModulePrefix,
}
#[proc_macro_attribute]
pub fn task(args: TokenStream, item: TokenStream) -> TokenStream {
let macro_args = syn::parse_macro_input!(args as syn::AttributeArgs);
let mut task_fn = syn::parse_macro_input!(item as syn::ItemFn);
let macro_args = match TaskArgs::from_list(¯o_args) {
Ok(v) => v,
Err(e) => {
return TokenStream::from(e.write_errors());
}
};
let embassy_prefix = macro_args.embassy_prefix.append("embassy");
let embassy_path = embassy_prefix.path();
let pool_size: usize = macro_args.pool_size.unwrap_or(1);
let mut fail = false;
if task_fn.sig.asyncness.is_none() {
task_fn
.sig
.span()
.unwrap()
.error("task functions must be async")
.emit();
fail = true;
}
if !task_fn.sig.generics.params.is_empty() {
task_fn
.sig
.span()
.unwrap()
.error("task functions must not be generic")
.emit();
fail = true;
}
if pool_size < 1 {
return parse::Error::new(Span::call_site(), "pool_size must be 1 or greater")
.to_compile_error()
.into();
}
let mut arg_names: syn::punctuated::Punctuated<syn::Ident, syn::Token![,]> =
syn::punctuated::Punctuated::new();
let mut args = task_fn.sig.inputs.clone();
for arg in args.iter_mut() {
match arg {
syn::FnArg::Receiver(_) => {
arg.span()
.unwrap()
.error("task functions must not have receiver arguments")
.emit();
fail = true;
}
syn::FnArg::Typed(t) => match t.pat.as_mut() {
syn::Pat::Ident(i) => {
arg_names.push(i.ident.clone());
i.mutability = None;
}
_ => {
arg.span()
.unwrap()
.error("pattern matching in task arguments is not yet supporteds")
.emit();
fail = true;
}
},
}
}
if fail {
return TokenStream::new();
}
let name = task_fn.sig.ident.clone();
let visibility = &task_fn.vis;
task_fn.sig.ident = format_ident!("task");
let impl_ty = if macro_args.send {
quote!(impl ::core::future::Future + Send + 'static)
} else {
quote!(impl ::core::future::Future + 'static)
};
let result = quote! {
#visibility fn #name(#args) -> #embassy_path::executor::SpawnToken<#impl_ty> {
use #embassy_path::executor::raw::Task;
#task_fn
type F = #impl_ty;
const NEW_TASK: Task<F> = Task::new();
static POOL: [Task<F>; #pool_size] = [NEW_TASK; #pool_size];
unsafe { Task::spawn_pool(&POOL, move || task(#arg_names)) }
}
};
result.into()
}
#[proc_macro_attribute]
pub fn interrupt(args: TokenStream, input: TokenStream) -> TokenStream {
let mut f: ItemFn = syn::parse(input).expect("`#[interrupt]` must be applied to a function");
if !args.is_empty() {
return parse::Error::new(Span::call_site(), "This attribute accepts no arguments")
.to_compile_error()
.into();
}
let fspan = f.span();
let ident = f.sig.ident.clone();
let ident_s = ident.to_string();
// XXX should we blacklist other attributes?
let valid_signature = f.sig.constness.is_none()
&& f.vis == Visibility::Inherited
&& f.sig.abi.is_none()
&& f.sig.inputs.is_empty()
&& f.sig.generics.params.is_empty()
&& f.sig.generics.where_clause.is_none()
&& f.sig.variadic.is_none()
&& match f.sig.output {
ReturnType::Default => true,
ReturnType::Type(_, ref ty) => match **ty {
Type::Tuple(ref tuple) => tuple.elems.is_empty(),
Type::Never(..) => true,
_ => false,
},
};
if !valid_signature {
return parse::Error::new(
fspan,
"`#[interrupt]` handlers must have signature `[unsafe] fn() [-> !]`",
)
.to_compile_error()
.into();
}
f.block.stmts = iter::once(
syn::parse2(quote! {{
// Check that this interrupt actually exists
let __irq_exists_check: interrupt::#ident;
}})
.unwrap(),
)
.chain(f.block.stmts)
.collect();
quote!(
#[doc(hidden)]
#[export_name = #ident_s]
#[allow(non_snake_case)]
#f
)
.into()
}
#[proc_macro]
pub fn interrupt_declare(item: TokenStream) -> TokenStream {
let name = syn::parse_macro_input!(item as syn::Ident);
let name = format_ident!("{}", name);
let name_interrupt = format_ident!("{}", name);
let name_handler = format!("__EMBASSY_{}_HANDLER", name);
let result = quote! {
#[allow(non_camel_case_types)]
pub struct #name_interrupt(());
unsafe impl ::embassy::interrupt::Interrupt for #name_interrupt {
type Priority = crate::interrupt::Priority;
fn number(&self) -> u16 {
use cortex_m::interrupt::InterruptNumber;
let irq = InterruptEnum::#name;
irq.number() as u16
}
unsafe fn steal() -> Self {
Self(())
}
unsafe fn __handler(&self) -> &'static ::embassy::interrupt::Handler {
#[export_name = #name_handler]
static HANDLER: ::embassy::interrupt::Handler = ::embassy::interrupt::Handler::new();
&HANDLER
}
}
unsafe impl ::embassy::util::Unborrow for #name_interrupt {
type Target = #name_interrupt;
unsafe fn unborrow(self) -> #name_interrupt {
self
}
}
};
result.into()
}
#[proc_macro]
pub fn interrupt_take(item: TokenStream) -> TokenStream {
let name = syn::parse_macro_input!(item as syn::Ident);
let name = format!("{}", name);
let name_interrupt = format_ident!("{}", name);
let name_handler = format!("__EMBASSY_{}_HANDLER", name);
let result = quote! {
{
#[allow(non_snake_case)]
#[export_name = #name]
pub unsafe extern "C" fn trampoline() {
extern "C" {
#[link_name = #name_handler]
static HANDLER: ::embassy::interrupt::Handler;
}
let func = HANDLER.func.load(::embassy::export::atomic::Ordering::Relaxed);
let ctx = HANDLER.ctx.load(::embassy::export::atomic::Ordering::Relaxed);
let func: fn(*mut ()) = ::core::mem::transmute(func);
func(ctx)
}
static TAKEN: ::embassy::export::atomic::AtomicBool = ::embassy::export::atomic::AtomicBool::new(false);
if TAKEN.compare_exchange(false, true, ::embassy::export::atomic::Ordering::AcqRel, ::embassy::export::atomic::Ordering::Acquire).is_err() {
panic!("IRQ Already taken");
}
let irq: interrupt::#name_interrupt = unsafe { ::core::mem::transmute(()) };
irq
}
};
result.into()
}
#[cfg(feature = "stm32")]
#[path = "chip/stm32.rs"]
mod chip;
#[cfg(feature = "nrf")]
#[path = "chip/nrf.rs"]
mod chip;
#[cfg(feature = "rp")]
#[path = "chip/rp.rs"]
mod chip;
#[derive(Debug, FromMeta)]
struct MainArgs {
#[darling(default)]
embassy_prefix: ModulePrefix,
#[darling(default)]
config: Option<syn::LitStr>,
}
#[cfg(any(feature = "nrf", feature = "rp", feature = "stm32"))]
#[proc_macro_attribute]
pub fn main(args: TokenStream, item: TokenStream) -> TokenStream {
let macro_args = syn::parse_macro_input!(args as syn::AttributeArgs);
let task_fn = syn::parse_macro_input!(item as syn::ItemFn);
let macro_args = match MainArgs::from_list(¯o_args) {
Ok(v) => v,
Err(e) => {
return TokenStream::from(e.write_errors());
}
};
let mut fail = false;
if task_fn.sig.asyncness.is_none() {
task_fn
.sig
.span()
.unwrap()
.error("task functions must be async")
.emit();
fail = true;
}
if !task_fn.sig.generics.params.is_empty() {
task_fn
.sig
.span()
.unwrap()
.error("main function must not be generic")
.emit();
fail = true;
}
let args = task_fn.sig.inputs.clone();
if args.len() != 2 {
task_fn
.sig
.span()
.unwrap()
.error("main function must have 2 arguments")
.emit();
fail = true;
}
if fail {
return TokenStream::new();
}
let embassy_prefix = macro_args.embassy_prefix;
let embassy_prefix_lit = embassy_prefix.literal();
let embassy_path = embassy_prefix.append("embassy").path();
let task_fn_body = task_fn.block;
let config = macro_args
.config
.map(|s| s.parse::<syn::Expr>().unwrap())
.unwrap_or_else(|| {
syn::Expr::Verbatim(quote! {
Default::default()
})
});
let chip_setup = chip::generate(&embassy_prefix, config);
let result = quote! {
#[#embassy_path::task(embassy_prefix = #embassy_prefix_lit)]
async fn __embassy_main(#args) {
#task_fn_body
}
#[cortex_m_rt::entry]
fn main() -> ! {
unsafe fn make_static<T>(t: &mut T) -> &'static mut T {
::core::mem::transmute(t)
}
let mut executor = #embassy_path::executor::Executor::new();
let executor = unsafe { make_static(&mut executor) };
#chip_setup
executor.run(|spawner| {
spawner.spawn(__embassy_main(spawner, p)).unwrap();
})
}
};
result.into()
}
#[cfg(feature = "std")]
#[proc_macro_attribute]
pub fn main(args: TokenStream, item: TokenStream) -> TokenStream {
let macro_args = syn::parse_macro_input!(args as syn::AttributeArgs);
let task_fn = syn::parse_macro_input!(item as syn::ItemFn);
let macro_args = match MainArgs::from_list(¯o_args) {
Ok(v) => v,
Err(e) => {
return TokenStream::from(e.write_errors());
}
};
let embassy_path = macro_args.embassy_prefix.append("embassy");
let embassy_std_path = macro_args.embassy_prefix.append("embassy_std");
let mut fail = false;
if task_fn.sig.asyncness.is_none() {
task_fn
.sig
.span()
.unwrap()
.error("task functions must be async")
.emit();
fail = true;
}
if !task_fn.sig.generics.params.is_empty() {
task_fn
.sig
.span()
.unwrap()
.error("main function must not be generic")
.emit();
fail = true;
}
let args = task_fn.sig.inputs.clone();
if args.len() != 1 {
task_fn
.sig
.span()
.unwrap()
.error("main function must have one argument")
.emit();
fail = true;
}
if fail {
return TokenStream::new();
}
let task_fn_body = task_fn.block.clone();
let embassy_path = embassy_path.path();
let embassy_std_path = embassy_std_path.path();
let embassy_prefix_lit = macro_args.embassy_prefix.literal();
let result = quote! {
#[#embassy_path::task(embassy_prefix = #embassy_prefix_lit)]
async fn __embassy_main(#args) {
#task_fn_body
}
fn main() -> ! {
unsafe fn make_static<T>(t: &mut T) -> &'static mut T {
::core::mem::transmute(t)
}
let mut executor = #embassy_std_path::Executor::new();
let executor = unsafe { make_static(&mut executor) };
executor.run(|spawner| {
spawner.spawn(__embassy_main(spawner)).unwrap();
})
}
};
result.into()
}
|
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT license.
*/
#![warn(missing_debug_implementations, missing_docs)]
//! Vertex
use std::array::TryFromSliceError;
use vector::{FullPrecisionDistance, Metric};
/// Vertex with data type T and dimension N
#[derive(Debug)]
pub struct Vertex<'a, T, const N: usize>
where
[T; N]: FullPrecisionDistance<T, N>,
{
/// Vertex value
val: &'a [T; N],
/// Vertex Id
id: u32,
}
impl<'a, T, const N: usize> Vertex<'a, T, N>
where
[T; N]: FullPrecisionDistance<T, N>,
{
/// Create the vertex with data
pub fn new(val: &'a [T; N], id: u32) -> Self {
Self {
val,
id,
}
}
/// Compare the vertex with another.
#[inline(always)]
pub fn compare(&self, other: &Vertex<'a, T, N>, metric: Metric) -> f32 {
<[T; N]>::distance_compare(self.val, other.val, metric)
}
/// Get the vector associated with the vertex.
#[inline]
pub fn vector(&self) -> &[T; N] {
self.val
}
/// Get the vertex id.
#[inline]
pub fn vertex_id(&self) -> u32 {
self.id
}
}
impl<'a, T, const N: usize> TryFrom<(&'a [T], u32)> for Vertex<'a, T, N>
where
[T; N]: FullPrecisionDistance<T, N>,
{
type Error = TryFromSliceError;
fn try_from((mem_slice, id): (&'a [T], u32)) -> Result<Self, Self::Error> {
let array: &[T; N] = mem_slice.try_into()?;
Ok(Vertex::new(array, id))
}
}
|
pub enum EntityBlock {
Player(u8),
Trap,
}
pub struct Entity {
pub pos: (u16, u16),
pub block: EntityBlock,
}
impl Entity {
pub fn new(block: EntityBlock, pos: (u16, u16)) -> Self {
Self { block, pos }
}
}
|
#![feature(alloc_system)]
#![feature(test)]
extern crate alloc_system;
extern crate test;
#[macro_use]
extern crate itertools;
extern crate problems;
extern crate utils;
use std::env;
use std::process;
use std::fmt::Display;
// use problems;
fn run_problem<P,A: Display> (problem: P,name: &str) where P: Fn() -> A{
let ans = problem();
println!("The answer to {} is {}", name, ans);
}
fn main(){
let mut argv = env::args().collect::<Vec<_>>();
if argv.len() < 2 {
println!("Usage: {} <problems>", argv[0]);
process::exit(1);
}
let problems = argv.split_off(1);
for arg in problems.iter(){
match &arg[..] {
"problem_001" => run_problem(problems::problem_001::problem_001, arg),
"problem_002" => run_problem(problems::problem_002::problem_002, arg),
"problem_003" => run_problem(problems::problem_003::problem_003, arg),
"problem_004" => run_problem(problems::problem_004::problem_004, arg),
"problem_005" => run_problem(problems::problem_005::problem_005, arg),
"problem_006" => run_problem(problems::problem_006::problem_006, arg),
"problem_007" => run_problem(problems::problem_007::problem_007, arg),
"problem_008" => run_problem(problems::problem_008::problem_008, arg),
"problem_009" => run_problem(problems::problem_009::problem_009, arg),
"problem_010" => run_problem(problems::problem_010::problem_010, arg),
"problem_011" => run_problem(problems::problem_011::problem_011, arg),
"problem_012" => run_problem(problems::problem_012::problem_012, arg),
"problem_013" => run_problem(problems::problem_013::problem_013, arg),
"problem_014" => run_problem(problems::problem_014::problem_014, arg),
"problem_015" => run_problem(problems::problem_015::problem_015, arg),
"problem_016" => run_problem(problems::problem_016::problem_016, arg),
"problem_017" => run_problem(problems::problem_017::problem_017, arg),
"problem_018" => run_problem(problems::problem_018::problem_018, arg),
"problem_019" => run_problem(problems::problem_019::problem_019, arg),
"problem_020" => run_problem(problems::problem_020::problem_020, arg),
"problem_021" => run_problem(problems::problem_021::problem_021, arg),
"problem_022" => run_problem(problems::problem_022::problem_022, arg),
"problem_023" => run_problem(problems::problem_023::problem_023, arg),
"problem_024" => run_problem(problems::problem_024::problem_024, arg),
"problem_025" => run_problem(problems::problem_025::problem_025, arg),
"problem_026" => run_problem(problems::problem_026::problem_026, arg),
"problem_027" => run_problem(problems::problem_027::problem_027, arg),
"problem_028" => run_problem(problems::problem_028::problem_028, arg),
"problem_029" => run_problem(problems::problem_029::problem_029, arg),
"problem_030" => run_problem(problems::problem_030::problem_030, arg),
"problem_031" => run_problem(problems::problem_031::problem_031, arg),
"problem_032" => run_problem(problems::problem_032::problem_032, arg),
"problem_033" => run_problem(problems::problem_033::problem_033, arg),
"problem_034" => run_problem(problems::problem_034::problem_034, arg),
"problem_035" => run_problem(problems::problem_035::problem_035, arg),
"problem_036" => run_problem(problems::problem_036::problem_036, arg),
"problem_037" => run_problem(problems::problem_037::problem_037, arg),
"problem_038" => run_problem(problems::problem_038::problem_038, arg),
"problem_039" => run_problem(problems::problem_039::problem_039, arg),
"problem_040" => run_problem(problems::problem_040::problem_040, arg),
"problem_041" => run_problem(problems::problem_041::problem_041, arg),
"problem_042" => run_problem(problems::problem_042::problem_042, arg),
"problem_043" => run_problem(problems::problem_043::problem_043, arg),
"problem_044" => run_problem(problems::problem_044::problem_044, arg),
"problem_045" => run_problem(problems::problem_045::problem_045, arg),
"problem_046" => run_problem(problems::problem_046::problem_046, arg),
"problem_047" => run_problem(problems::problem_047::problem_047, arg),
"problem_148" => run_problem(problems::problem_148::problem_148, arg),
_ => println!("Haven't done {} yet",arg)
}
}
} |
use std::io;
fn main() {
println!(
"This program can helps you to convert temperatures between Fahrenheit and Celsius.\n"
);
println!("What units do you want to convert to others?\n");
println!("Type one of following unit below:");
println!(" f - convert Fahrenheit to Celsius;");
println!(" c - convert Celsius to Fahrenheit;");
let mut unit = String::new();
io::stdin()
.read_line(&mut unit)
.expect("Failed to read line");
let unit_in_lowercase = unit.trim().to_lowercase();
if unit_in_lowercase == "f" {
println!("You pick Fahrenheit: {}", unit);
} else if unit_in_lowercase == "c" {
println!("You pick Celsius: {}", unit);
}
println!("Now please input value to convert:");
let mut value = String::new();
io::stdin()
.read_line(&mut value)
.expect("Failed to read line");
let value: f32 = value.trim().parse().expect("incorrect value");
println!("You enter a value: {}", value);
if unit_in_lowercase == "f" {
let result = ((value + 40 as f32) / 1.8) - 40 as f32;
println!("{} degrees Fahrenheit = {} degrees Celsius", value, result);
} else if unit_in_lowercase == "c" {
let result = ((value + 40 as f32) * 1.8) - 40 as f32;
println!("{} degrees Celsius = {} degrees Fahrenheit", value, result);
}
}
|
// This file is part of lock-free-multi-producer-single-consumer-ring-buffer. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/lock-free-multi-producer-single-consumer-ring-buffer/master/COPYRIGHT. No part of lock-free-multi-producer-single-consumer-ring-buffer, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
// Copyright © 2017 - 2019 The developers of lock-free-multi-producer-single-consumer-ring-buffer. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/lock-free-multi-producer-single-consumer-ring-buffer/master/COPYRIGHT.
#[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
struct SpinLockBackOff(u8);
impl Default for SpinLockBackOff
{
#[inline(always)]
fn default() -> Self
{
Self::Initial
}
}
impl SpinLockBackOff
{
const Initial: Self = SpinLockBackOff(4);
/// Exponential back-off for the spinning paths.
#[inline(always)]
fn back_off(&mut self)
{
let original_count = self.0;
let mut i = original_count;
while i != 0
{
spin_loop_hint();
i -= 1;
}
const Maximum: u8 = 128;
if original_count < Maximum
{
self.0 += original_count;
}
}
}
|
use crate::block::{self, BlockId};
use crate::model::modeless::ModelessId;
use crate::renderer::TableBlock;
use crate::Color;
#[derive(Clone)]
pub enum Tool {
Selector,
Pen {
line_width: f64,
color: Color,
show_option_menu: bool,
},
Eracer {
line_width: f64,
show_option_menu: bool,
},
Measure {
color: Color,
block_id: Option<BlockId>,
show_option_menu: bool,
},
Area {
type_: block::table_object::area::Type,
color_1: Color,
color_2: Color,
block_id: Option<BlockId>,
show_option_menu: bool,
},
Route {
block_id: Option<BlockId>,
show_option_menu: bool,
},
Character,
Tablemask {
size: [f32; 2],
color: Color,
is_rounded: bool,
is_inved: bool,
show_option_menu: bool,
},
Boxblock {
size: [f32; 3],
color: Color,
show_option_menu: bool,
},
}
#[derive(Clone)]
pub enum Focused {
None,
Character(TableBlock),
Tablemask(TableBlock),
Area(TableBlock),
Boxblock(TableBlock),
}
pub struct State {
selecting_tool: Tool,
info: Vec<(String, String)>,
last_mouse_position: [f32; 2],
last_mouse_down_position: [f32; 2],
last_mouse_up_position: [f32; 2],
is_2d_mode: bool,
focused: Focused,
moving_tab: Option<(ModelessId, usize)>,
floating_object: Option<BlockId>,
}
impl Tool {
pub fn is_selector(&self) -> bool {
match self {
Self::Selector => true,
_ => false,
}
}
pub fn is_pen(&self) -> bool {
match self {
Self::Pen { .. } => true,
_ => false,
}
}
pub fn is_eracer(&self) -> bool {
match self {
Self::Eracer { .. } => true,
_ => false,
}
}
pub fn is_measure(&self) -> bool {
match self {
Self::Measure { .. } => true,
_ => false,
}
}
pub fn is_area(&self) -> bool {
match self {
Self::Area { .. } => true,
_ => false,
}
}
pub fn is_route(&self) -> bool {
match self {
Self::Route { .. } => true,
_ => false,
}
}
pub fn is_character(&self) -> bool {
match self {
Self::Character => true,
_ => false,
}
}
pub fn is_tablemask(&self) -> bool {
match self {
Self::Tablemask { .. } => true,
_ => false,
}
}
pub fn is_boxblock(&self) -> bool {
match self {
Self::Boxblock { .. } => true,
_ => false,
}
}
}
impl State {
pub fn new() -> Self {
Self {
selecting_tool: Tool::Selector,
info: vec![],
last_mouse_position: [0.0, 0.0],
last_mouse_down_position: [0.0, 0.0],
last_mouse_up_position: [0.0, 0.0],
is_2d_mode: false,
focused: Focused::None,
moving_tab: None,
floating_object: None,
}
}
pub fn selecting_tool(&self) -> &Tool {
&self.selecting_tool
}
pub fn selecting_tool_mut(&mut self) -> &mut Tool {
&mut self.selecting_tool
}
pub fn set_selecting_tool(&mut self, tool: Tool) {
self.selecting_tool = tool;
}
pub fn info(&self) -> &Vec<(String, String)> {
&self.info
}
pub fn set_info(&mut self, info: Vec<(String, String)>) {
self.info = info;
}
pub fn clear_info(&mut self) {
self.info.clear();
}
pub fn add_info(&mut self, key: impl Into<String>, value: impl Into<String>) {
self.info.push((key.into(), value.into()));
}
pub fn last_mouse_position(&self) -> &[f32; 2] {
&self.last_mouse_position
}
pub fn set_last_mouse_position(&mut self, pos: [f32; 2]) {
self.last_mouse_position = pos;
}
pub fn last_mouse_down_position(&self) -> &[f32; 2] {
&self.last_mouse_down_position
}
pub fn set_last_mouse_down_position(&mut self, pos: [f32; 2]) {
self.last_mouse_down_position = pos;
}
pub fn set_last_mouse_up_position(&mut self, pos: [f32; 2]) {
self.last_mouse_up_position = pos;
}
pub fn is_2d_mode(&self) -> bool {
self.is_2d_mode
}
pub fn set_is_2d_mode(&mut self, is_2d_mode: bool) {
self.is_2d_mode = is_2d_mode;
}
pub fn focused(&self) -> &Focused {
&self.focused
}
pub fn set_focused(&mut self, focused: Focused) {
self.focused = focused
}
pub fn moving_tab(&self) -> Option<&(ModelessId, usize)> {
self.moving_tab.as_ref()
}
pub fn set_moving_tab(&mut self, moving_tab: Option<(ModelessId, usize)>) {
self.moving_tab = moving_tab;
}
pub fn floating_object(&self) -> Option<&BlockId> {
self.floating_object.as_ref()
}
pub fn set_floating_object(&mut self, floating_object: Option<BlockId>) {
self.floating_object = floating_object;
}
}
|
#![feature(core)]
#![feature(path_ext)]
#![feature(convert)]
#![allow(deprecated)]
extern crate term;
extern crate itertools;
mod version;
mod paths;
mod downloader;
mod builder;
mod reporter;
use std::rc::Rc;
use paths::Paths;
use downloader::Downloader;
use builder::Builder;
fn main() {
dump_env();
let paths = Paths::new();
let paths_rc = Rc::new(paths);
let nginx_downloader = Downloader::new(paths_rc.clone());
nginx_downloader.download();
let nginx_builder = Builder::new(paths_rc.clone());
nginx_builder.build();
}
fn dump_env() {
use std::env;
use reporter::report;
report("Dumping", "Environment");
for (key, value) in env::vars() {
println!("{:>30}: {}", key, value);
}
}
|
use bincode::{deserialize, serialize};
use faster_hex::hex_string;
use hash::blake2b_256;
use numext_fixed_hash::H256;
use numext_fixed_uint::U256;
use serde_derive::{Deserialize, Serialize};
use std::{fmt, mem};
pub use crate::{BlockNumber, Version};
pub const HEADER_VERSION: Version = 0;
#[derive(Clone, Serialize, Deserialize, PartialEq, Eq, Default)]
pub struct Seal {
nonce: u64,
#[serde(with = "serde_bytes")]
proof: Vec<u8>,
}
impl fmt::Debug for Seal {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Seal")
.field("nonce", &self.nonce)
.field(
"proof",
&format_args!("0x{}", &hex_string(&self.proof).expect("hex proof")),
)
.finish()
}
}
impl Seal {
pub fn new(nonce: u64, proof: Vec<u8>) -> Self {
Seal { nonce, proof }
}
pub fn destruct(self) -> (u64, Vec<u8>) {
let Seal { nonce, proof } = self;
(nonce, proof)
}
}
#[derive(Clone, Serialize, Deserialize, PartialEq, Eq, Debug, Default)]
pub struct RawHeader {
version: Version,
/// Parent hash.
parent_hash: H256,
/// Block timestamp(ms).
timestamp: u64,
/// Genesis number is 0, Child block number is parent block number + 1.
number: BlockNumber,
/// Transactions merkle root.
transactions_root: H256,
/// Transactions proposal merkle root.
proposals_root: H256,
/// Witness hash commitment.
witnesses_root: H256,
/// Block difficulty.
difficulty: U256,
/// Hash of the uncles
uncles_hash: H256,
/// Number of the uncles
uncles_count: u32,
}
impl RawHeader {
pub fn pow_hash(&self) -> H256 {
blake2b_256(serialize(self).expect("RawHeader serialize should not fail")).into()
}
pub fn with_seal(self, seal: Seal) -> Header {
let builder = HeaderBuilder {
inner: Header { raw: self, seal },
};
builder.build()
}
pub fn number(&self) -> BlockNumber {
self.number
}
pub fn difficulty(&self) -> &U256 {
&self.difficulty
}
pub fn uncles_count(&self) -> u32 {
self.uncles_count
}
pub fn mut_uncles_count(&mut self) -> &mut u32 {
&mut self.uncles_count
}
// temp
pub const fn serialized_size() -> usize {
mem::size_of::<Version>()
+ H256::size_of() * 5
+ U256::size_of()
+ mem::size_of::<u64>() * 2
+ mem::size_of::<u32>()
}
}
#[derive(Clone, Serialize, Deserialize, Default, Eq)]
pub struct Header {
raw: RawHeader,
/// proof seal
seal: Seal,
}
impl fmt::Debug for Header {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Header")
.field("hash", &format_args!("{:#x}", &self.hash()))
.field("version", &self.raw.version)
.field("parent_hash", &format_args!("{:#x}", self.raw.parent_hash))
.field("timestamp", &self.raw.timestamp)
.field("number", &self.raw.number)
.field(
"transactions_root",
&format_args!("{:#x}", self.raw.transactions_root),
)
.field(
"proposals_root",
&format_args!("{:#x}", self.raw.proposals_root),
)
.field(
"witnesses_root",
&format_args!("{:#x}", self.raw.witnesses_root),
)
.field("difficulty", &format_args!("{:#x}", self.raw.difficulty))
.field("uncles_hash", &format_args!("{:#x}", self.raw.uncles_hash))
.field("uncles_count", &self.raw.uncles_count)
.field("seal", &self.seal)
.finish()
}
}
impl Header {
pub fn serialized_size(proof_size: usize) -> usize {
RawHeader::serialized_size() + proof_size + mem::size_of::<u64>()
}
pub fn version(&self) -> u32 {
self.raw.version
}
pub fn seal(&self) -> &Seal {
&self.seal
}
pub fn number(&self) -> BlockNumber {
self.raw.number
}
pub fn difficulty(&self) -> &U256 {
&self.raw.difficulty
}
pub fn timestamp(&self) -> u64 {
self.raw.timestamp
}
pub fn proof(&self) -> &[u8] {
&self.seal.proof
}
pub fn nonce(&self) -> u64 {
self.seal.nonce
}
pub fn hash(&self) -> H256 {
blake2b_256(serialize(&self).expect("Header serialize should not fail")).into()
}
pub fn pow_hash(&self) -> H256 {
self.raw.pow_hash()
}
pub fn is_genesis(&self) -> bool {
self.number() == 0
}
pub fn parent_hash(&self) -> &H256 {
&self.raw.parent_hash
}
pub fn transactions_root(&self) -> &H256 {
&self.raw.transactions_root
}
pub fn proposals_root(&self) -> &H256 {
&self.raw.proposals_root
}
pub fn witnesses_root(&self) -> &H256 {
&self.raw.witnesses_root
}
pub fn uncles_hash(&self) -> &H256 {
&self.raw.uncles_hash
}
pub fn raw(&self) -> &RawHeader {
&self.raw
}
pub fn into_raw(self) -> RawHeader {
self.raw
}
pub fn uncles_count(&self) -> u32 {
self.raw.uncles_count
}
}
impl PartialEq for Header {
fn eq(&self, other: &Header) -> bool {
self.hash() == other.hash()
}
}
#[derive(Default)]
pub struct HeaderBuilder {
inner: Header,
}
impl HeaderBuilder {
pub fn new(bytes: &[u8]) -> Self {
HeaderBuilder {
inner: deserialize(bytes).expect("header deserializing should be ok"),
}
}
pub fn header(mut self, header: Header) -> Self {
self.inner = header;
self
}
pub fn seal(mut self, seal: Seal) -> Self {
self.inner.seal = seal;
self
}
pub fn version(mut self, version: u32) -> Self {
self.inner.raw.version = version;
self
}
pub fn number(mut self, number: BlockNumber) -> Self {
self.inner.raw.number = number;
self
}
pub fn difficulty(mut self, difficulty: U256) -> Self {
self.inner.raw.difficulty = difficulty;
self
}
pub fn timestamp(mut self, timestamp: u64) -> Self {
self.inner.raw.timestamp = timestamp;
self
}
pub fn proof(mut self, proof: Vec<u8>) -> Self {
self.inner.seal.proof = proof;
self
}
pub fn nonce(mut self, nonce: u64) -> Self {
self.inner.seal.nonce = nonce;
self
}
pub fn parent_hash(mut self, hash: H256) -> Self {
self.inner.raw.parent_hash = hash;
self
}
pub fn transactions_root(mut self, hash: H256) -> Self {
self.inner.raw.transactions_root = hash;
self
}
pub fn proposals_root(mut self, hash: H256) -> Self {
self.inner.raw.proposals_root = hash;
self
}
pub fn witnesses_root(mut self, hash: H256) -> Self {
self.inner.raw.witnesses_root = hash;
self
}
pub fn uncles_hash(mut self, hash: H256) -> Self {
self.inner.raw.uncles_hash = hash;
self
}
pub fn uncles_count(mut self, uncles_count: u32) -> Self {
self.inner.raw.uncles_count = uncles_count;
self
}
pub fn build(self) -> Header {
self.inner
}
}
|
// thread 'rustc' panicked at 'not implemented: ty=Closure(DefId(0:4 ~ place_utils_81[317d]::main::{closure#0}), [i16, extern "rust-call" fn((i32,)), (&mut std::vec::Vec<i32>,)])'
// analysis/src/abstract_domains/place_utils.rs:81:17
fn main() {
let mut v: Vec<i32> = Vec::new();
let _ = (0..1).map(|_| {
v = Vec::new();
});
} |
use ash::version::DeviceV1_0;
use ash::{vk, Device};
use std::ffi::CString;
use anyhow::Result;
use super::create_shader_module;
use crate::vulkan::{texture::Texture, GfaestusVk};
use crate::{geometry::Point, vulkan::render_pass::Framebuffers};
pub struct PostProcessPipeline {
descriptor_pool: vk::DescriptorPool,
descriptor_set_layout: vk::DescriptorSetLayout,
descriptor_set: vk::DescriptorSet,
pipeline_layout: vk::PipelineLayout,
pipeline: vk::Pipeline,
}
impl PostProcessPipeline {
pub fn new(
app: &GfaestusVk,
image_count: u32,
render_pass: vk::RenderPass,
frag_src: &[u8],
) -> Result<Self> {
let vk_context = app.vk_context();
let device = vk_context.device();
let layout = Self::create_descriptor_set_layout(device)?;
let descriptor_pool = {
let pool_size = vk::DescriptorPoolSize {
ty: vk::DescriptorType::COMBINED_IMAGE_SAMPLER,
descriptor_count: image_count,
};
let pool_sizes = [pool_size];
let pool_info = vk::DescriptorPoolCreateInfo::builder()
.pool_sizes(&pool_sizes)
.max_sets(image_count)
.build();
unsafe { device.create_descriptor_pool(&pool_info, None) }
}?;
let descriptor_sets = {
let layouts = vec![layout];
let alloc_info = vk::DescriptorSetAllocateInfo::builder()
.descriptor_pool(descriptor_pool)
.set_layouts(&layouts)
.build();
unsafe { device.allocate_descriptor_sets(&alloc_info) }
}?;
let (pipeline, pipeline_layout) =
Self::create_pipeline(device, render_pass, layout, frag_src);
Ok(Self {
descriptor_pool,
descriptor_set_layout: layout,
descriptor_set: descriptor_sets[0],
pipeline_layout,
pipeline,
})
}
pub fn new_buffer_read(
app: &GfaestusVk,
image_count: u32,
render_pass: vk::RenderPass,
frag_src: &[u8],
) -> Result<Self> {
let vk_context = app.vk_context();
let device = vk_context.device();
let layout = Self::create_buffer_descriptor_set_layout(device)?;
let descriptor_pool = {
let pool_size = vk::DescriptorPoolSize {
ty: vk::DescriptorType::STORAGE_BUFFER,
descriptor_count: image_count,
};
let pool_sizes = [pool_size];
let pool_info = vk::DescriptorPoolCreateInfo::builder()
.pool_sizes(&pool_sizes)
.max_sets(image_count)
.build();
unsafe { device.create_descriptor_pool(&pool_info, None) }
}?;
let descriptor_sets = {
let layouts = vec![layout];
let alloc_info = vk::DescriptorSetAllocateInfo::builder()
.descriptor_pool(descriptor_pool)
.set_layouts(&layouts)
.build();
unsafe { device.allocate_descriptor_sets(&alloc_info) }
}?;
let (pipeline, pipeline_layout) =
Self::create_pipeline(device, render_pass, layout, frag_src);
Ok(Self {
descriptor_pool,
descriptor_set_layout: layout,
descriptor_set: descriptor_sets[0],
pipeline_layout,
pipeline,
})
}
pub fn write_buffer_descriptor_set(
&mut self,
device: &Device,
buffer: vk::Buffer,
) {
let pixels_buf_info = vk::DescriptorBufferInfo::builder()
.buffer(buffer)
.offset(0)
.range(vk::WHOLE_SIZE)
.build();
let pixels_buf_infos = [pixels_buf_info];
let pixels = vk::WriteDescriptorSet::builder()
.dst_set(self.descriptor_set)
.dst_binding(0)
.dst_array_element(0)
.descriptor_type(vk::DescriptorType::STORAGE_BUFFER)
.buffer_info(&pixels_buf_infos)
.build();
let descriptor_writes = [pixels];
unsafe { device.update_descriptor_sets(&descriptor_writes, &[]) }
}
pub fn write_descriptor_set(
&mut self,
device: &Device,
new_image: Texture,
sampler: Option<vk::Sampler>,
) {
let sampler = sampler.unwrap_or_else(|| new_image.sampler.unwrap());
let image_info = vk::DescriptorImageInfo::builder()
.image_layout(vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL)
.image_view(new_image.view)
.sampler(sampler)
.build();
let image_infos = [image_info];
let sampler_descriptor_write = vk::WriteDescriptorSet::builder()
.dst_set(self.descriptor_set)
.dst_binding(0)
.dst_array_element(0)
.descriptor_type(vk::DescriptorType::COMBINED_IMAGE_SAMPLER)
.image_info(&image_infos)
.build();
let descriptor_writes = [sampler_descriptor_write];
unsafe { device.update_descriptor_sets(&descriptor_writes, &[]) }
}
pub fn draw(
&self,
device: &Device,
cmd_buf: vk::CommandBuffer,
render_pass: vk::RenderPass,
framebuffers: &Framebuffers,
screen_size: Point,
sample_size: Point,
) -> Result<()> {
let clear_values = {
[vk::ClearValue {
color: vk::ClearColorValue {
float32: [0.0, 0.0, 0.0, 0.0],
},
}]
};
let extent = vk::Extent2D {
width: screen_size.x as u32,
height: screen_size.y as u32,
};
let render_pass_begin_info = vk::RenderPassBeginInfo::builder()
.render_pass(render_pass)
.framebuffer(framebuffers.selection_blur)
.render_area(vk::Rect2D {
offset: vk::Offset2D { x: 0, y: 0 },
extent,
})
.clear_values(&clear_values)
.build();
unsafe {
device.cmd_begin_render_pass(
cmd_buf,
&render_pass_begin_info,
vk::SubpassContents::INLINE,
)
};
unsafe {
device.cmd_bind_pipeline(
cmd_buf,
vk::PipelineBindPoint::GRAPHICS,
self.pipeline,
)
};
let desc_sets = [self.descriptor_set];
unsafe {
let null = [];
device.cmd_bind_descriptor_sets(
cmd_buf,
vk::PipelineBindPoint::GRAPHICS,
self.pipeline_layout,
0,
&desc_sets[0..=0],
&null,
);
};
let push_constants = PushConstants::new(sample_size, screen_size, true);
let pc_bytes = push_constants.bytes();
unsafe {
use vk::ShaderStageFlags as Flags;
device.cmd_push_constants(
cmd_buf,
self.pipeline_layout,
Flags::VERTEX | Flags::FRAGMENT,
0,
&pc_bytes,
)
};
unsafe { device.cmd_draw(cmd_buf, 3u32, 1, 0, 0) };
// End render pass
unsafe { device.cmd_end_render_pass(cmd_buf) };
Ok(())
}
pub fn destroy(&self, device: &Device) {
unsafe {
device.destroy_descriptor_set_layout(
self.descriptor_set_layout,
None,
);
device.destroy_descriptor_pool(self.descriptor_pool, None);
device.destroy_pipeline(self.pipeline, None);
device.destroy_pipeline_layout(self.pipeline_layout, None);
}
}
fn layout_binding() -> vk::DescriptorSetLayoutBinding {
use vk::ShaderStageFlags as Stages;
vk::DescriptorSetLayoutBinding::builder()
.binding(0)
.descriptor_type(vk::DescriptorType::COMBINED_IMAGE_SAMPLER)
.descriptor_count(1)
.stage_flags(Stages::FRAGMENT)
.build()
}
fn create_descriptor_set_layout(
device: &Device,
) -> Result<vk::DescriptorSetLayout> {
let binding = Self::layout_binding();
let bindings = [binding];
let layout_info = vk::DescriptorSetLayoutCreateInfo::builder()
.bindings(&bindings)
.build();
let layout =
unsafe { device.create_descriptor_set_layout(&layout_info, None) }?;
Ok(layout)
}
fn create_buffer_descriptor_set_layout(
device: &Device,
) -> Result<vk::DescriptorSetLayout> {
use vk::ShaderStageFlags as Stages;
let binding = vk::DescriptorSetLayoutBinding::builder()
.binding(0)
.descriptor_type(vk::DescriptorType::STORAGE_BUFFER)
.descriptor_count(1)
.stage_flags(Stages::FRAGMENT)
.build();
let bindings = [binding];
let layout_info = vk::DescriptorSetLayoutCreateInfo::builder()
.bindings(&bindings)
.build();
let layout =
unsafe { device.create_descriptor_set_layout(&layout_info, None) }?;
Ok(layout)
}
fn create_pipeline(
device: &Device,
render_pass: vk::RenderPass,
descriptor_set_layout: vk::DescriptorSetLayout,
frag_src: &[u8],
) -> (vk::Pipeline, vk::PipelineLayout) {
create_pipeline(
device,
render_pass,
descriptor_set_layout,
crate::include_shader!("post/post.vert.spv"),
frag_src,
)
}
}
pub(crate) fn create_pipeline(
device: &Device,
render_pass: vk::RenderPass,
descriptor_set_layout: vk::DescriptorSetLayout,
vert_shader: &[u8],
frag_shader: &[u8],
) -> (vk::Pipeline, vk::PipelineLayout) {
let vert_src = {
let mut cursor = std::io::Cursor::new(vert_shader);
ash::util::read_spv(&mut cursor).unwrap()
};
let frag_src = {
let mut cursor = std::io::Cursor::new(frag_shader);
ash::util::read_spv(&mut cursor).unwrap()
};
let vert_module = create_shader_module(device, &vert_src);
let frag_module = create_shader_module(device, &frag_src);
let entry_point = CString::new("main").unwrap();
let vert_state_info = vk::PipelineShaderStageCreateInfo::builder()
.stage(vk::ShaderStageFlags::VERTEX)
.module(vert_module)
.name(&entry_point)
.build();
let frag_state_info = vk::PipelineShaderStageCreateInfo::builder()
.stage(vk::ShaderStageFlags::FRAGMENT)
.module(frag_module)
.name(&entry_point)
.build();
let shader_state_infos = [vert_state_info, frag_state_info];
let vert_input_info =
vk::PipelineVertexInputStateCreateInfo::builder().build();
let input_assembly_info =
vk::PipelineInputAssemblyStateCreateInfo::builder()
.topology(vk::PrimitiveTopology::TRIANGLE_LIST)
.primitive_restart_enable(false)
.build();
let viewport_info = vk::PipelineViewportStateCreateInfo::builder()
.viewport_count(1)
.scissor_count(1)
.build();
let dynamic_states = {
use vk::DynamicState as DS;
[DS::VIEWPORT, DS::SCISSOR]
};
let dynamic_state_info = vk::PipelineDynamicStateCreateInfo::builder()
.dynamic_states(&dynamic_states)
.build();
let rasterizer_info = vk::PipelineRasterizationStateCreateInfo::builder()
.depth_clamp_enable(false)
.rasterizer_discard_enable(false)
.polygon_mode(vk::PolygonMode::FILL)
.line_width(1.0)
.cull_mode(vk::CullModeFlags::NONE)
.front_face(vk::FrontFace::COUNTER_CLOCKWISE)
.depth_bias_enable(false)
.depth_bias_constant_factor(0.0)
.depth_bias_clamp(0.0)
.depth_bias_slope_factor(0.0)
.build();
let multisampling_info = vk::PipelineMultisampleStateCreateInfo::builder()
.sample_shading_enable(false)
.rasterization_samples(vk::SampleCountFlags::TYPE_1)
.min_sample_shading(1.0)
.alpha_to_coverage_enable(false)
.alpha_to_one_enable(false)
.build();
let color_blend_attachment =
vk::PipelineColorBlendAttachmentState::builder()
.color_write_mask(vk::ColorComponentFlags::all())
.blend_enable(true)
.src_color_blend_factor(vk::BlendFactor::SRC_ALPHA)
.dst_color_blend_factor(vk::BlendFactor::ONE_MINUS_SRC_ALPHA)
.color_blend_op(vk::BlendOp::ADD)
.src_alpha_blend_factor(vk::BlendFactor::SRC_ALPHA)
.dst_alpha_blend_factor(vk::BlendFactor::ONE_MINUS_SRC_ALPHA)
.alpha_blend_op(vk::BlendOp::ADD)
.build();
let color_blend_attachments = [color_blend_attachment];
let color_blending_info = vk::PipelineColorBlendStateCreateInfo::builder()
.logic_op_enable(false)
.logic_op(vk::LogicOp::COPY)
.attachments(&color_blend_attachments)
.blend_constants([0.0, 0.0, 0.0, 0.0])
.build();
let layout = {
use vk::ShaderStageFlags as Flags;
let layouts = [descriptor_set_layout];
let pc_range = vk::PushConstantRange::builder()
.stage_flags(Flags::VERTEX | Flags::FRAGMENT)
.offset(0)
.size(PushConstants::PC_RANGE)
.build();
let pc_ranges = [pc_range];
let layout_info = vk::PipelineLayoutCreateInfo::builder()
.set_layouts(&layouts)
.push_constant_ranges(&pc_ranges)
.build();
unsafe { device.create_pipeline_layout(&layout_info, None).unwrap() }
};
let pipeline_info = vk::GraphicsPipelineCreateInfo::builder()
.stages(&shader_state_infos)
.vertex_input_state(&vert_input_info)
.input_assembly_state(&input_assembly_info)
.viewport_state(&viewport_info)
.dynamic_state(&dynamic_state_info)
.rasterization_state(&rasterizer_info)
.multisample_state(&multisampling_info)
.color_blend_state(&color_blending_info)
.layout(layout)
.render_pass(render_pass)
.subpass(0)
.build();
let pipeline_infos = [pipeline_info];
let pipeline = unsafe {
device
.create_graphics_pipelines(
vk::PipelineCache::null(),
&pipeline_infos,
None,
)
.unwrap()[0]
};
unsafe {
device.destroy_shader_module(vert_module, None);
device.destroy_shader_module(frag_module, None);
}
(pipeline, layout)
}
pub struct PushConstants {
source_size: Point,
target_size: Point,
enabled: bool,
}
impl PushConstants {
pub const PC_RANGE: u32 =
(std::mem::size_of::<u32>() + std::mem::size_of::<f32>() * 4) as u32;
#[inline]
pub fn new(source_size: Point, target_size: Point, enabled: bool) -> Self {
Self {
source_size,
target_size,
enabled,
}
}
#[inline]
pub fn bytes(&self) -> [u8; 20] {
let mut bytes = [0u8; Self::PC_RANGE as usize];
{
let mut offset = 0;
let mut add_float = |f: f32| {
let f_bytes = f.to_ne_bytes();
for i in 0..4 {
bytes[offset] = f_bytes[i];
offset += 1;
}
};
add_float(self.source_size.x);
add_float(self.source_size.y);
add_float(self.target_size.x);
add_float(self.target_size.y);
}
if self.enabled {
bytes[19] = 1;
} else {
bytes[19] = 0;
}
bytes
}
}
|
use std::borrow::Cow;
struct Args<'a> {
data: &'a str,
offset: usize,
}
impl<'a> Iterator for Args<'a> {
type Item = Result<Cow<'a, str>, Error>;
fn next(&mut self) -> Option<Self::Item> {
#[derive(Eq, PartialEq)]
enum State {
Borrowed(usize, usize),
Owned(String),
Empty,
}
let Self { ref data, offset } = self;
let mut quote = false;
let mut escape = false;
let mut state = State::Empty;
for c in data[*offset..].chars() {
let start = *offset;
let bytes = c.len_utf8();
*offset += bytes;
if c.is_ascii_whitespace() {
if escape {
return Some(Err(Error::UnknownEscape(c)));
}
match &mut state {
State::Borrowed(_, _) | State::Owned(_) if !quote => break,
State::Borrowed(_, length) => *length += bytes,
State::Owned(data) => data.push(c),
State::Empty => {}
}
continue;
}
if escape {
escape = false;
let c = match c {
'r' => '\r',
'n' => '\n',
't' => '\t',
'"' => '"',
'\\' => '\\',
_ => return Some(Err(Error::UnknownEscape(c))),
};
match &mut state {
State::Borrowed(offset, length) => {
let mut data = data[*offset..*offset + *length].to_owned();
data.push(c);
state = State::Owned(data);
}
State::Owned(data) => data.push(c),
State::Empty => state = State::Owned(c.to_string()),
}
continue;
}
if c == '"' {
if quote {
quote = false;
break;
}
if state != State::Empty {
return Some(Err(Error::UnexpectedQuote));
}
quote = true;
state = State::Borrowed(*offset, 0);
continue;
}
if c == '\\' {
escape = true;
continue;
}
match &mut state {
State::Borrowed(_, length) => *length += bytes,
State::Owned(data) => data.push(c),
State::Empty => state = State::Borrowed(start, bytes),
}
}
if quote {
return Some(Err(Error::UnfinishedQuote));
}
if escape {
return Some(Err(Error::UnfinishedEscape));
}
match state {
State::Borrowed(offset, length) => {
Some(Cow::Borrowed(&self.data[offset..offset + length]))
}
State::Owned(data) => Some(Cow::Owned(data)),
State::Empty => None,
}
.map(Ok)
}
}
#[derive(Debug)]
pub enum Error {
UnexpectedQuote,
UnfinishedEscape,
UnfinishedQuote,
UnknownEscape(char),
}
pub fn args(data: &str) -> impl Iterator<Item = Result<Cow<'_, str>, Error>> {
Args { data, offset: 0 }
}
#[cfg(test)]
mod tests {
use super::*;
fn check(input: &str, output: &[&str]) {
let input = args(input).map(|item| item.unwrap()).collect::<Vec<_>>();
assert_eq!(&input, output);
}
#[test]
fn words() {
check(" hello, world", &["hello,", "world"]);
check("", &[]);
}
#[test]
fn quotes() {
check(r#"hello "world""#, &["hello", "world"]);
check(r#"hello "nice world""#, &["hello", "nice world"]);
check(r#""""#, &[""]);
}
#[test]
fn escapes() {
check(r#"\r \n \""#, &["\r", "\n", "\""]);
check(r#""quoted \\\r\n\t\"""#, &["quoted \\\r\n\t\""]);
}
#[test]
#[should_panic]
fn unexpected_quote() {
check(r#"abcd"hello""#, &[]);
}
}
|
use proconio::input;
fn main() {
input! {
_a: i64,
b: i64,
};
input! {
c: i64,
_d: i64,
};
println!("{}", b - c);
}
|
fn main() {
rust_grpc_web::configure()
.compile(&["../proto/chat.proto"], &["../proto/"])
.unwrap();
}
|
use std::io;
use std::str::Split;
use std::collections::{HashMap, BTreeMap};
use std::io::{BufReader, BufRead};
use std::path::Path;
use std::fs::File;
use crate::draw::{ObjDef, Vertex, load_data_to_gpu, MtlInfo, Light};
use glium::{Display, texture::Texture2d};
use derive_more::{Error, From};
use crate::quadoctree::{QuadOctreeNode, QuadOctreeError, add_obj_to_quadoctree};
use crate::textures::{load_texture, TextureLoadError};
use crate::assets::find_asset;
const COLLISION_PREFIX: &str = "collision_";
const LIGHT_PREFIX: &str = "light_";
const TERRAIN_PREFIX: &str = "terrain_";
const MISC_PREFIX: &str = "misc_";
#[derive(Debug, derive_more::Display, Error, From)]
pub enum WavefrontLoadError {
#[from(ignore)]
FormatError { msg: &'static str },
#[from(ignore)]
BadIndexError { msg: &'static str },
IOError(io::Error),
FloatParseError(std::num::ParseFloatError),
IntParseError(std::num::ParseIntError),
TextureLoadError(TextureLoadError),
QuadOctreeCreateError(QuadOctreeError)
}
#[derive(PartialEq)]
enum MeshType {
Normal,
Terrain,
Collision,
Light,
Misc
}
fn parse_vertex_or_normal(split: &mut Split<char>, scale: &[f32; 3]) -> Result<[f32; 3], WavefrontLoadError> {
let mut components = [0.0, 0.0, 0.0f32];
for i in 0..3 {
let str_val = split.next().ok_or(WavefrontLoadError::FormatError { msg: "Vertex or normal has too few components" })?;
components[i] = str_val.trim().parse()?;
components[i] *= scale[i];
}
Ok(components)
}
fn parse_texcoords(split: &mut Split<char>) -> Result<[f32; 2], WavefrontLoadError> {
let mut components = [0.0, 0.0f32];
for i in 0..2 {
let str_val = split.next().ok_or(WavefrontLoadError::FormatError { msg: "Tex coords have too few components" })?;
components[i] = str_val.trim().parse()?;
}
Ok(components)
}
fn parse_face(split: &mut Split<char>, vertex_info: &Vec<[f32; 3]>, normal_info: &Vec<[f32; 3]>,
texcoord_info: &Vec<[f32; 2]>, vertices: &mut Vec<Vertex>, indices: &mut Vec<u32>) -> Result<(), WavefrontLoadError> {
let mut face_indices: Vec<u32> = Vec::new();
for i in 0..4 {
let face_index_txt = split.next();
let face_index_txt = match face_index_txt {
None => {
if i < 3 {
return Err(WavefrontLoadError::FormatError { msg: "Face has less than 3 vertices" });
}
break;
},
Some(val) => val
};
let mut face_index_split: Split<&str> = face_index_txt.split("/");
let mut face_index_ref = [0, 0, 0u32];
for j in 0..3 {
let face_index_comp = face_index_split.next()
.ok_or(WavefrontLoadError::FormatError { msg: "Face index ref must contain 3 indices" })?;
if j == 1 && face_index_comp.len() == 0 {
continue;
}
face_index_ref[j] = face_index_comp.trim().parse()?;
}
let mut new_vert = Vertex {
position: vertex_info.get((face_index_ref[0] - 1) as usize)
.ok_or(WavefrontLoadError::BadIndexError { msg: "Vertex index does not exist" })?.clone(),
normal: normal_info.get((face_index_ref[2] - 1) as usize)
.ok_or(WavefrontLoadError::BadIndexError { msg: "Normal index does not exist" })?.clone(),
texcoords: [0., 0.]
};
if face_index_ref[1] > 0 {
new_vert.texcoords = texcoord_info.get((face_index_ref[1] - 1) as usize)
.ok_or(WavefrontLoadError::BadIndexError { msg: "Texcoord index does not exist" })?.clone();
}
vertices.push(new_vert);
face_indices.push((vertices.len() - 1) as u32);
}
if split.next().is_some() {
return Err(WavefrontLoadError::FormatError { msg: "Face has more than 4 vertices" });
}
indices.extend_from_slice(&[face_indices[2], face_indices[1], face_indices[0]]);
if face_indices.len() == 4 {
indices.extend_from_slice(&[face_indices[3], face_indices[2], face_indices[0]]);
}
Ok(())
}
fn load_mtl(display: &Display, obj_split: &mut Split<char>, obj_parent_dir: &Path,
textures: &mut HashMap<String, Texture2d>, mtl_map: &mut HashMap<String, MtlInfo>) -> Result<(), WavefrontLoadError> {
let filename = obj_split.next()
.ok_or(WavefrontLoadError::FormatError { msg: "mtllib does not have filename" })?;
let f = File::open(obj_parent_dir.join(filename.trim()).as_path())?;
let mut f = BufReader::new(f);
let mut line = String::new();
let mut current_name: Option<String> = None;
while f.read_line(&mut line)? != 0 {
let mut split = line.split(' ');
let key = split.next().unwrap().trim();
if key == "newmtl" {
let name = split.next()
.ok_or(WavefrontLoadError::FormatError { msg: "newmtl does not have a name" })?.to_string();
mtl_map.insert(name.clone(), Default::default());
current_name = Some(name);
}
if let Some(name) = current_name.as_ref() {
let mtl = mtl_map.get_mut(name).unwrap();
match key {
"map_Kd" => {
let img_filename = split.next()
.ok_or(WavefrontLoadError::FormatError { msg: "map_Kd does not have a filename" })?.to_string();
if !textures.contains_key(&img_filename) {
let img_path = obj_parent_dir.join(img_filename.trim());
let txt = load_texture(display, img_path.as_path(), true)?;
textures.insert(img_filename.clone(), txt);
}
mtl.diffuse_texture = Some(img_filename.clone());
},
"Kd" => {
for i in 0..3 {
mtl.color[i] = split.next()
.ok_or(WavefrontLoadError::FormatError { msg: "Kd is missing a component" })?
.trim().parse()?;
}
},
&_ => ()
}
}
line.clear();
}
Ok(())
}
fn process_obj(display: Option<&&Display>, vertices: &mut Vec<Vertex>, indices: &mut Vec<u32>,
current_mtl: &Option<MtlInfo>, quadoctree: Option<&mut &mut QuadOctreeNode>, lights: Option<&mut &mut HashMap<String, Light>>,
misc_objs: Option<&mut &mut HashMap<String, [f32; 3]>>,
o_name: &mut Option<String>, result: &mut BTreeMap<String, ObjDef>) -> Result<(), WavefrontLoadError> {
let mesh_type = if o_name.as_ref().unwrap().starts_with(COLLISION_PREFIX) {
MeshType::Collision
} else if o_name.as_ref().unwrap().starts_with(LIGHT_PREFIX) {
MeshType::Light
} else if o_name.as_ref().unwrap().starts_with(TERRAIN_PREFIX) {
MeshType::Terrain
} else if o_name.as_ref().unwrap().starts_with(MISC_PREFIX) {
MeshType::Misc
} else { MeshType::Normal };
if let Some(display) = display {
if MeshType::Normal == mesh_type || MeshType::Terrain == mesh_type {
let mut def = load_data_to_gpu(*display, &vertices, &indices);
def.material = Some(current_mtl.as_ref().unwrap().clone());
result.insert(o_name.as_ref().unwrap().clone(), def);
}
}
if MeshType::Misc == mesh_type {
if let Some(misc_objs) = misc_objs {
misc_objs.insert(o_name.as_ref().unwrap().to_string(), vertices[0].position);
}
}
if MeshType::Terrain == mesh_type || MeshType::Collision == mesh_type {
if let Some(quadoctree) = quadoctree {
add_obj_to_quadoctree(&mut (**quadoctree), &vertices, &indices, MeshType::Collision == mesh_type)?;
}
}
if MeshType::Light == mesh_type {
if let Some(lights) = lights {
lights.insert(o_name.as_ref().unwrap().to_string().trim().to_string(),
Light { position: vertices[0].position, ..Default::default() });
}
}
vertices.clear();
indices.clear();
*o_name = None;
Ok(())
}
pub fn load_obj(filename: &str, app_id: &str, display: Option<&Display>, mut textures: Option<&mut HashMap<String, Texture2d>>,
scale: &[f32; 3], mut quadoctree: Option<&mut QuadOctreeNode>, mut lights: Option<&mut HashMap<String, Light>>,
mut misc_objs: Option<&mut HashMap<String, [f32; 3]>>) -> Result<BTreeMap<String, ObjDef>, WavefrontLoadError> {
let path = find_asset(filename, app_id);
let f = File::open(path.as_path())?;
let mut f = BufReader::new(f);
let mut line = String::new();
let mut vertex_info: Vec<[f32; 3]> = Vec::new();
let mut normal_info: Vec<[f32; 3]> = Vec::new();
let mut texcoord_info: Vec<[f32; 2]> = Vec::new();
let mut vertices: Vec<Vertex> = Vec::new();
let mut indices: Vec<u32> = Vec::new();
let mut mtl_map: HashMap<String, MtlInfo> = HashMap::new();
let mut current_mtl: Option<MtlInfo> = None;
let mut current_o_name: Option<String> = None;
let mut result: BTreeMap<String, ObjDef> = BTreeMap::new();
while f.read_line(&mut line)? != 0 {
let mut split = line.split(' ');
match split.next().unwrap() {
"mtllib" => {
if let Some(display) = display.as_ref() {
let parent_dir = path.parent().unwrap();
load_mtl(*display, &mut split, &parent_dir, *textures.as_mut().unwrap(), &mut mtl_map)?;
}
},
"usemtl" => {
if display.is_some() {
let mtl_name = split.next()
.ok_or(WavefrontLoadError::FormatError { msg: "usemtl does not have a name" })?;
current_mtl = Some(mtl_map.get(mtl_name)
.ok_or(WavefrontLoadError::FormatError { msg: "Material does not exist" })?.clone());
}
},
"v" => vertex_info.push(parse_vertex_or_normal(&mut split, scale)?),
"vn" => normal_info.push(parse_vertex_or_normal(&mut split, &[1., 1., 1.])?),
"vt" => texcoord_info.push(parse_texcoords(&mut split)?),
"f" => parse_face(&mut split, &vertex_info, &normal_info, &texcoord_info, &mut vertices,
&mut indices)?,
"o" => {
if current_o_name.is_some() {
process_obj(display.as_ref(), &mut vertices, &mut indices, ¤t_mtl,
quadoctree.as_mut(), lights.as_mut(), misc_objs.as_mut(), &mut current_o_name, &mut result)?;
}
current_o_name = Some(split.next()
.ok_or(WavefrontLoadError::FormatError { msg: "o does not have a name" })?.to_string());
}
&_ => ()
}
line.clear();
}
if current_o_name.is_some() {
process_obj(display.as_ref(), &mut vertices, &mut indices, ¤t_mtl,
quadoctree.as_mut(), lights.as_mut(), misc_objs.as_mut(), &mut current_o_name, &mut result)?;
}
Ok(result)
}
|
use anyhow::{Context, Result};
use cyclovander::{cond, tr_h};
use indicatif::ProgressBar;
use rayon::prelude::*;
use rayon::ThreadPoolBuilder;
use std::fs::File;
use std::io::{self, BufRead, BufReader, Write};
use std::path::PathBuf;
use structopt::StructOpt;
#[derive(StructOpt, Debug)]
struct Opt {
/// Compute trace instead of condition number
#[structopt(short, long)]
trace: bool,
#[structopt(subcommand)]
cmd: Command,
}
#[derive(StructOpt, Debug)]
enum Command {
/// Generate table from file
Table {
/// Input file
#[structopt(parse(from_os_str))]
input: PathBuf,
/// Disable spinner
#[structopt(short = "q", long = "quiet")]
quiet: bool,
/// Number of threads
#[structopt(short, long, default_value = "1")]
threads: usize,
},
/// Output for integer n
Get {
/// Unsigned 64 bits integer
n: u64,
},
}
fn get_result(n: u64, trace: bool) -> String {
if trace {
tr_h(n).to_string()
} else {
cond(n).to_string()
}
}
fn print_table(reader: BufReader<File>, trace: bool) -> Result<()> {
let header = if trace { "n\tTr(H_n)" } else { "n\tCond(V_n)" };
writeln!(io::stdout(), "{}", header)?;
let _ = reader
.lines()
.par_bridge()
.filter_map(|line| line.ok())
.filter_map(|line| line.parse::<u64>().ok())
.try_for_each(|n| writeln!(io::stdout(), "{0}\t{1}", n, get_result(n, trace)));
Ok(())
}
fn print_table_with_spinner(reader: BufReader<File>, trace: bool) -> Result<()> {
let pb = ProgressBar::new_spinner();
pb.enable_steady_tick(120);
let header = if trace { "n\tTr(H_n)" } else { "n\tCond(V_n)" };
writeln!(io::stdout(), "{}", header)?;
let _ = reader
.lines()
.par_bridge()
.filter_map(|line| line.ok())
.filter_map(|line| line.parse::<u64>().ok())
.try_for_each(|n| {
pb.set_message(format!("computing {}...", n));
writeln!(io::stdout(), "{0}\t{1}", n, get_result(n, trace))
});
Ok(())
}
fn parse_table(input: PathBuf, trace: bool, mut quiet: bool, threads: usize) -> Result<()> {
let in_stream = File::open(input).with_context(|| format!("could not read file"))?;
let reader = BufReader::new(in_stream);
if atty::is(atty::Stream::Stdout) {
quiet = true
}
ThreadPoolBuilder::new()
.num_threads(threads)
.build_global()?;
match quiet {
false => print_table_with_spinner(reader, trace),
true => print_table(reader, trace),
}
}
fn parse_get(n: u64, trace: bool) -> Result<()> {
println!("{}", get_result(n, trace));
Ok(())
}
fn main() -> Result<()> {
let matches = Opt::from_args();
match matches.cmd {
Command::Table {
input,
quiet,
threads,
} => parse_table(input, matches.trace, quiet, threads),
Command::Get { n } => parse_get(n, matches.trace),
}
}
|
#[doc = "Register `RTSR2` reader"]
pub type R = crate::R<RTSR2_SPEC>;
#[doc = "Register `RTSR2` writer"]
pub type W = crate::W<RTSR2_SPEC>;
#[doc = "Field `RT2` reader - Rising trigger event configuration bit of configurable line 34"]
pub type RT2_R = crate::BitReader<RT2_A>;
#[doc = "Rising trigger event configuration bit of configurable line 34\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum RT2_A {
#[doc = "0: Rising edge trigger is disabled"]
Disabled = 0,
#[doc = "1: Rising edge trigger is enabled"]
Enabled = 1,
}
impl From<RT2_A> for bool {
#[inline(always)]
fn from(variant: RT2_A) -> Self {
variant as u8 != 0
}
}
impl RT2_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RT2_A {
match self.bits {
false => RT2_A::Disabled,
true => RT2_A::Enabled,
}
}
#[doc = "Rising edge trigger is disabled"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == RT2_A::Disabled
}
#[doc = "Rising edge trigger is enabled"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == RT2_A::Enabled
}
}
#[doc = "Field `RT2` writer - Rising trigger event configuration bit of configurable line 34"]
pub type RT2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, RT2_A>;
impl<'a, REG, const O: u8> RT2_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Rising edge trigger is disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(RT2_A::Disabled)
}
#[doc = "Rising edge trigger is enabled"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(RT2_A::Enabled)
}
}
impl R {
#[doc = "Bit 2 - Rising trigger event configuration bit of configurable line 34"]
#[inline(always)]
pub fn rt2(&self) -> RT2_R {
RT2_R::new(((self.bits >> 2) & 1) != 0)
}
}
impl W {
#[doc = "Bit 2 - Rising trigger event configuration bit of configurable line 34"]
#[inline(always)]
#[must_use]
pub fn rt2(&mut self) -> RT2_W<RTSR2_SPEC, 2> {
RT2_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "EXTI rising trigger selection register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rtsr2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`rtsr2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct RTSR2_SPEC;
impl crate::RegisterSpec for RTSR2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`rtsr2::R`](R) reader structure"]
impl crate::Readable for RTSR2_SPEC {}
#[doc = "`write(|w| ..)` method takes [`rtsr2::W`](W) writer structure"]
impl crate::Writable for RTSR2_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets RTSR2 to value 0"]
impl crate::Resettable for RTSR2_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use std::collections::HashMap;
use std::collections::hashmap::{Occupied, Vacant};
pub fn word_count(input: &str) -> HashMap<String, uint> {
let mut map: HashMap<String, uint> = HashMap::new();
let norm = input.chars().map(|c| c.to_lowercase()).collect::<String>();
for word in norm.as_slice().split(|c: char| !c.is_alphanumeric()).filter(|s| !s.is_empty()) {
match map.entry(word.to_string()) {
Vacant(entry) => entry.set(1),
Occupied(mut entry) => {
*entry.get_mut() += 1;
entry.into_mut()
}
};
}
map
}
|
pub mod builtin;
pub mod env;
pub mod objects;
use std::cell::RefCell;
use std::convert::TryFrom;
use std::rc::Rc;
use anyhow::Result;
use crate::parser::{ast, tools};
use crate::evaluator::builtin::{Function, FALSE, NULL, TRUE};
use crate::evaluator::env::Environment;
pub fn eval_node(node: &ast::Node, env: Rc<RefCell<Environment>>) -> Result<objects::Object> {
match node {
ast::Node::Program(n) => eval_program(n, Rc::clone(&env)),
ast::Node::Stmt(stmt) => eval_stmt(stmt, Rc::clone(&env)),
ast::Node::Expr(expr) => eval_expr(expr, Rc::clone(&env)),
}
}
fn eval_stmt(stmt: &ast::Stmt, env: Rc<RefCell<Environment>>) -> Result<objects::Object> {
match stmt {
ast::Stmt::ExprStmt(stmt) => eval_expr(&stmt.expr, Rc::clone(&env)),
ast::Stmt::Let(stmt) => {
let val = eval_expr(&stmt.value, Rc::clone(&env))?;
env.borrow_mut().insert(&stmt.name.value, val);
Ok(NULL.into())
}
ast::Stmt::Block(stmt) => eval_block(stmt, Rc::clone(&env)),
ast::Stmt::Return(stmt) => {
let val = eval_expr(&stmt.return_value, Rc::clone(&env))?;
Ok(objects::Return {
value: Box::new(val),
}
.into())
}
}
}
fn eval_expr(expr: &ast::Expr, env: Rc<RefCell<Environment>>) -> Result<objects::Object> {
match expr {
ast::Expr::Array(expr) => {
let elements = eval_expressions(&expr.elements, Rc::clone(&env))?;
Ok(objects::Array { elements }.into())
}
ast::Expr::Boolean(expr) => Ok(native_bool_to_boolean_object(expr.value)),
ast::Expr::Call(expr) => {
if expr.func.to_string() == "quote" {
return quote(ast::Node::Expr(expr.args[0].clone()), Rc::clone(&env));
}
let func = eval_expr(&expr.func, Rc::clone(&env))?;
let args = eval_expressions(&expr.args, Rc::clone(&env))?;
Ok(apply_function(&func, &args)?)
}
ast::Expr::Function(expr) => Ok(objects::Function {
params: expr.params.clone(),
body: expr.body.clone(),
env: Rc::clone(&env),
}
.into()),
ast::Expr::Hash(expr) => Ok(eval_hash_literal(expr, Rc::clone(&env))?),
ast::Expr::Identifier(expr) => Ok(eval_identifier(expr, Rc::clone(&env))?),
ast::Expr::If(expr) => Ok(eval_if_expr(expr, Rc::clone(&env))?),
ast::Expr::Index(expr) => {
let left = eval_expr(&expr.left, Rc::clone(&env))?;
let index = eval_expr(&expr.index, Rc::clone(&env))?;
// TOOD: clone to reference
Ok(eval_index_expr(&left, &index)?.clone())
}
ast::Expr::InfixExpr(expr) => {
let left = eval_expr(&expr.left, Rc::clone(&env))?;
let right = eval_expr(&expr.right, Rc::clone(&env))?;
Ok(eval_infix_expr(&expr.ope, &left, &right)?)
}
ast::Expr::Integer(expr) => Ok(objects::Integer { value: expr.value }.into()),
ast::Expr::PrefixExpr(expr) => {
let right = eval_expr(&expr.right, Rc::clone(&env))?;
Ok(eval_prefix_expr(&expr.ope, &right)?)
}
ast::Expr::StringLit(expr) => Ok(objects::StringLit {
value: expr.value.clone(),
}
.into()),
_ => unreachable!(),
}
}
pub(crate) fn new_error<T>(message: &str) -> Result<T> {
Err(objects::Error::Standard(message.into()).into())
}
fn eval_program(program: &ast::Program, env: Rc<RefCell<Environment>>) -> Result<objects::Object> {
let mut obj: objects::Object = NULL.into();
for stmt in program.statements.iter() {
match eval_stmt(stmt, Rc::clone(&env))? {
objects::Object::Return(r) => return Ok(r.value.as_ref().clone()),
o => obj = o,
}
}
Ok(obj)
}
fn eval_block(block: &ast::Block, env: Rc<RefCell<Environment>>) -> Result<objects::Object> {
let mut obj: objects::Object = NULL.into();
for stmt in block.statements.iter() {
match eval_stmt(stmt, Rc::clone(&env))? {
objects::Object::Return(r) => return Ok(r.into()),
o => obj = o,
}
}
Ok(obj)
}
fn eval_prefix_expr(ope: &ast::Operator, right: &objects::Object) -> Result<objects::Object> {
match ope {
ast::Operator::Bang => Ok(eval_bang_ope_expr(right)),
ast::Operator::Minus => Ok(eval_minus_prefix_ope_expr(right)?),
unknown => new_error(&format!("unknown operator: {} {}", unknown, right.o_type()))?,
}
}
fn native_bool_to_boolean_object(input: bool) -> objects::Object {
objects::Object::Boolean(if input { TRUE } else { FALSE })
}
fn eval_bang_ope_expr(right: &objects::Object) -> objects::Object {
objects::Object::Boolean(match right {
objects::Object::Boolean(b) => match b {
_ if b == &TRUE => FALSE,
_ if b == &FALSE => TRUE,
_ => unreachable!(),
},
objects::Object::Null(_) => TRUE,
_ => FALSE,
})
}
fn eval_minus_prefix_ope_expr(right: &objects::Object) -> Result<objects::Object> {
match right {
objects::Object::Integer(i) => {
let value = match i.value.checked_neg() {
Some(v) => v,
None => new_error(&format!("overflow: {}", i))?,
};
Ok(objects::Integer { value }.into())
}
unknown => new_error(&format!("unknown operator: -{}", unknown.o_type())),
}
}
fn eval_infix_expr(
ope: &ast::Operator,
left: &objects::Object,
right: &objects::Object,
) -> Result<objects::Object> {
match (ope, left, right) {
(o, objects::Object::Integer(l), objects::Object::Integer(r)) => {
eval_integer_infix_expr(o, l, r)
}
(o, objects::Object::StringLit(l), objects::Object::StringLit(r)) => {
eval_string_infix_expr(o, l, r)
}
(ast::Operator::Equal, l, r) => Ok(native_bool_to_boolean_object(l == r)),
(ast::Operator::NotEqual, l, r) => Ok(native_bool_to_boolean_object(l != r)),
(o, l, r) => {
if l.o_type() == r.o_type() {
new_error(&format!(
"unknown operator: {} {} {}",
l.o_type(),
o,
r.o_type()
))?
} else {
new_error(&format!(
"type mismatch: {} {} {}",
l.o_type(),
o,
r.o_type()
))?
}
}
}
}
fn eval_integer_infix_expr(
ope: &ast::Operator,
left: &objects::Integer,
right: &objects::Integer,
) -> Result<objects::Object> {
use ast::Operator::*;
let res = match ope {
Plus => {
let value = match left.value.checked_add(right.value) {
Some(v) => v,
None => new_error(&format!("overflow: {} + {}", left.value, right.value))?,
};
objects::Integer { value }.into()
}
Minus => {
let value = match left.value.checked_sub(right.value) {
Some(v) => v,
None => new_error(&format!("overflow: {} - {}", left.value, right.value))?,
};
objects::Integer { value }.into()
}
Asterisk => {
let value = match left.value.checked_mul(right.value) {
Some(v) => v,
None => new_error(&format!("overflow: {} - {}", left.value, right.value))?,
};
objects::Integer { value }.into()
}
Slash => {
let value = match left.value.checked_div(right.value) {
Some(v) => v,
None => new_error(&format!("overflow: {} - {}", left.value, right.value))?,
};
objects::Integer { value }.into()
}
Lt => native_bool_to_boolean_object(left.value < right.value),
Gt => native_bool_to_boolean_object(left.value > right.value),
Equal => native_bool_to_boolean_object(left.value == right.value),
NotEqual => native_bool_to_boolean_object(left.value != right.value),
unknown => new_error(&format!("unknown operator: {}", unknown))?,
};
Ok(res)
}
fn eval_string_infix_expr(
ope: &ast::Operator,
left: &objects::StringLit,
right: &objects::StringLit,
) -> Result<objects::Object> {
match ope {
ast::Operator::Plus => {
let mut value = left.value.clone();
value.push_str(&right.value);
Ok(objects::StringLit { value }.into())
}
unknown => new_error(&format!("unknown operator: String {} String", unknown)),
}
}
fn eval_if_expr(if_expr: &ast::If, env: Rc<RefCell<Environment>>) -> Result<objects::Object> {
let cond = eval_expr(if_expr.cond.as_ref(), Rc::clone(&env))?;
if is_truthy(cond) {
eval_stmt(
&ast::Stmt::Block(if_expr.consequence.clone()),
Rc::clone(&env),
)
} else if let Some(alt) = &if_expr.alternative {
eval_stmt(&ast::Stmt::Block(alt.clone()), env)
} else {
Ok(NULL.into())
}
}
fn is_truthy(obj: objects::Object) -> bool {
match obj {
objects::Object::Null(_) => false,
objects::Object::Boolean(b) => match b {
_ if b == TRUE => true,
_ if b == FALSE => false,
_ => unreachable!(),
},
_ => true,
}
}
fn eval_identifier(
node: &ast::Identifier,
env: Rc<RefCell<Environment>>,
) -> Result<objects::Object> {
let cur_env = env.borrow_mut();
let ident = cur_env.get(&node.value);
if let Some(id) = ident {
return Ok(id);
}
let builtin = Function::try_from(node.value.as_str());
if let Ok(builtin) = builtin {
return Ok(builtin.into());
}
new_error(&format!("identifier not found: {}", node.value))
}
fn eval_expressions(
expr_list: &[ast::Expr],
env: Rc<RefCell<Environment>>,
) -> Result<Vec<objects::Object>> {
let mut result = Vec::<objects::Object>::new();
for expr in expr_list.iter() {
result.push(eval_expr(expr, Rc::clone(&env))?);
}
Ok(result)
}
fn apply_function(func: &objects::Object, args: &[objects::Object]) -> Result<objects::Object> {
match func {
objects::Object::Function(f) => {
let extended_env = extend_function_env(f, args)?;
let evaluated = eval_stmt(&f.body.clone().into(), extended_env)?;
Ok(unwrap_return_value(&evaluated))
}
objects::Object::Builtin(builtin) => Ok(match builtin.call(args)? {
Some(res) => res,
None => NULL.into(),
}),
invalid => new_error(&format!("not a function: {}", invalid.o_type())),
}
}
fn extend_function_env(
func: &objects::Function,
args: &[objects::Object],
) -> Result<Rc<RefCell<Environment>>> {
let mut env = Environment::new_enclose(Rc::clone(&func.env));
for (i, param) in func.params.iter().enumerate() {
let arg = match args.get(i) {
Some(arg) => arg,
None => new_error("not found arg.")?,
};
env.insert(¶m.value, arg.clone());
}
Ok(Rc::new(RefCell::new(env)))
}
fn unwrap_return_value(obj: &objects::Object) -> objects::Object {
match obj.clone() {
objects::Object::Return(o) => *o.value,
o => o,
}
}
fn eval_index_expr<'a>(
left: &'a objects::Object,
index: &objects::Object,
) -> Result<&'a objects::Object> {
match (left, index) {
(objects::Object::Array(l), objects::Object::Integer(idx)) => {
Ok(eval_array_index_expr(l, idx)?)
}
(objects::Object::Hash(l), idx) => Ok(eval_hash_index_expr(l, idx)?),
(l, _) => new_error(&format!("index operator not supported: {}", l.o_type()))?,
}
}
fn eval_array_index_expr<'a>(
array: &'a objects::Array,
index: &objects::Integer,
) -> Result<&'a objects::Object> {
if index.value < 0 {
return Ok(&objects::Object::Null(NULL));
}
let idx = usize::try_from(index.value).or_else(|e| new_error(&e.to_string()))?;
match array.elements.get(idx) {
Some(obj) => Ok(obj),
None => Ok(&objects::Object::Null(NULL)),
}
}
fn eval_hash_literal(node: &ast::Hash, env: Rc<RefCell<Environment>>) -> Result<objects::Object> {
let mut pairs = objects::HashPairs::new();
for pair in node.pairs.iter() {
let key = eval_expr(&pair.key, Rc::clone(&env))?;
let value = eval_expr(&pair.value, Rc::clone(&env))?;
let key_type = key.o_type();
match objects::HashableObject::try_from(key) {
Ok(o) => pairs.insert(o, value),
Err(_) => new_error(&format!("unusable as hash key: {}", key_type))?,
};
}
Ok(objects::Hash { pairs }.into())
}
fn eval_hash_index_expr<'a>(
hash: &'a objects::Hash,
key: &objects::Object,
) -> Result<&'a objects::Object> {
let key_type = key.o_type();
match objects::HashableObject::try_from(key.clone()) {
Ok(o) => Ok(match hash.pairs.get(&o) {
Some(value) => value,
None => &objects::Object::Null(NULL),
}),
Err(_) => new_error(&format!("unusable as hash key: {}", key_type))?,
}
}
fn quote(node: ast::Node, env: Rc<RefCell<Environment>>) -> Result<objects::Object> {
let node = eval_unquote_calls(node, env)?;
Ok(objects::Quote { node }.into())
}
fn eval_unquote_calls(quoted: ast::Node, env: Rc<RefCell<Environment>>) -> Result<ast::Node> {
tools::modify(quoted, |node: ast::Node| -> Result<ast::Node> {
if !is_unquote_call(&node) {
return Ok(node);
}
match &node {
ast::Node::Expr(ast::Expr::Call(call)) => {
if call.args.len() == 1 {
let arg = call.args[0].clone();
Ok(convert_object_to_ast_node(eval_node(
&arg.into(),
Rc::clone(&env),
)?))
} else {
unimplemented!()
}
}
_ => unimplemented!(),
}
})
}
fn convert_object_to_ast_node(obj: objects::Object) -> ast::Node {
match obj {
objects::Object::Integer(int) => ast::Expr::from(ast::Integer { value: int.value }).into(),
objects::Object::Boolean(b) => ast::Expr::from(ast::Boolean { value: b.value }).into(),
objects::Object::Quote(q) => q.node,
_ => unimplemented!(),
}
}
fn is_unquote_call(node: &ast::Node) -> bool {
match node {
ast::Node::Expr(ast::Expr::Call(call)) => (*call.func).to_string() == "unquote",
_ => false,
}
}
pub fn define_macros(program: &mut ast::Program, env: Rc<RefCell<Environment>>) -> Result<()> {
program
.statements
.iter()
.filter(|stmt| is_macro_definition(stmt))
.try_for_each(|stmt| add_macro(stmt, Rc::clone(&env)))?;
program.statements = program
.statements
.clone()
.into_iter()
.filter(|stmt| !is_macro_definition(stmt))
.collect::<Vec<ast::Stmt>>();
Ok(())
}
fn is_macro_definition(stmt: &ast::Stmt) -> bool {
matches!(
stmt,
ast::Stmt::Let(ast::Let {
value: ast::Expr::MacroLit(_),
..
})
)
}
fn add_macro(stmt: &ast::Stmt, env: Rc<RefCell<Environment>>) -> Result<()> {
let let_stmt = match stmt {
ast::Stmt::Let(l) => l,
stmt => return Err(anyhow::format_err!("expect Let. received {}", stmt)),
};
let ast::Let { name, value } = let_stmt;
let m_macro = match value {
ast::Expr::MacroLit(m) => m,
stmt => return Err(anyhow::format_err!("expect Macro. received {}", stmt)),
};
let ast::MacroLit { params, body, .. } = m_macro;
let m_macro = objects::Macro {
params: params.clone(),
body: body.clone(),
env: Rc::clone(&env),
};
env.borrow_mut().insert(&name.to_string(), m_macro.into());
Ok(())
}
pub fn expand_macros(program: ast::Node, env: Rc<RefCell<Environment>>) -> Result<ast::Node> {
tools::modify(program, |node| {
let call = match &node {
ast::Node::Expr(ast::Expr::Call(call)) => call,
_ => return Ok(node),
};
let m_macro = get_macro_in_env(call, Rc::clone(&env));
let m_macro = if let Some(m) = m_macro {
m
} else {
return Ok(node);
};
let args = quote_args(call.clone());
let eval_env = extend_macro_env(m_macro.clone(), args);
let evaluated = eval_stmt(&m_macro.body.into(), Rc::new(RefCell::new(eval_env)))?;
let quote = match evaluated {
objects::Object::Quote(q) => q,
o => {
return Err(anyhow::format_err!(
"we only support returning AST-nodes from macros. {}",
o
))
}
};
Ok(quote.node)
})
}
fn get_macro_in_env(call: &ast::Call, env: Rc<RefCell<Environment>>) -> Option<objects::Macro> {
let ident = match &(*call.func) {
ast::Expr::Identifier(id) => id,
_ => return None,
};
let obj = env.borrow().get(&ident.value)?;
match obj {
objects::Object::Macro(m) => Some(m),
_ => None,
}
}
fn quote_args(call: ast::Call) -> Vec<objects::Quote> {
call.args
.into_iter()
.map(|arg| objects::Quote { node: arg.into() })
.collect::<Vec<objects::Quote>>()
}
fn extend_macro_env(m_macro: objects::Macro, args: Vec<objects::Quote>) -> Environment {
let mut extended = Environment::new(Some(Rc::clone(&m_macro.env)));
m_macro
.params
.into_iter()
.zip(args)
.for_each(|(id, quote)| {
extended.insert(&id.value, quote.into());
});
extended
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_eval_integer_expression() {
let tests = vec![
("5", 5_i64),
("5", 5_i64),
("10", 10_i64),
("-5", -5_i64),
("-10", -10_i64),
("5 + 5 + 5 + 5 - 10", 10_i64),
("2 * 2 * 2 * 2 * 2", 32_i64),
("-50 + 100 + -50", 0_i64),
("5 * 2 + 10", 20_i64),
("5 + 2 * 10", 25_i64),
("20 + 2 * -10", 0_i64),
("50 / 2 * 2 + 10", 60_i64),
("2 * (5 + 10)", 30_i64),
("3 * 3 * 3 + 10", 37_i64),
("3 * (3 * 3) + 10", 37_i64),
("(5 + 10 * 2 + 15 / 3) * 2 + -10", 50_i64),
];
tests
.into_iter()
.for_each(|(input, expected)| assert_integer_object(eval(input), expected));
}
#[test]
fn test_eval_boolean_expression() {
let tests = vec![
("true", true),
("false", false),
("1 < 2", true),
("1 > 2", false),
("1 < 1", false),
("1 > 1", false),
("1 == 1", true),
("1 != 1", false),
("1 == 2", false),
("1 != 2", true),
("true == true", true),
("false == false", true),
("true == false", false),
("true != false", true),
("false != true", true),
("(1 < 2) == true", true),
("(1 < 2) == false", false),
("(1 > 2) == true", false),
("(1 > 2) == false", true),
];
tests
.into_iter()
.for_each(|(input, expected)| assert_boolean_object(eval(input), expected));
}
#[test]
fn test_bang_operator() {
let tests = vec![
("!true", false),
("!false", true),
("!5", false),
("!!true", true),
("!!false", false),
("!!5", true),
];
tests
.into_iter()
.for_each(|(input, expected)| assert_boolean_object(eval(input), expected));
}
#[test]
fn test_if_else_expression() {
let tests = vec![
("if (true) { 10 }", 10_i64),
("if (1) { 10 }", 10_i64),
("if ( 1 < 2 ) { 10 }", 10_i64),
("if ( 1 > 2 ) { 10 } else { 20 }", 20_i64),
("if ( 1 < 2 ) { 10 } else { 20 }", 10_i64),
];
tests
.into_iter()
.for_each(|(input, expected)| assert_integer_object(eval(input), expected));
let tests = vec!["if (false) { 10 }", "if ( 1 > 2 ) { 10 }"];
tests
.into_iter()
.for_each(|input| assert_null_object(eval(input)));
}
#[test]
fn test_return_statements() {
let tests = vec![
("return 10;", 10_i64),
("return 10; 9;", 10_i64),
("return 2 * 5; 9;", 10_i64),
("9; return 2 * 5; 9;", 10_i64),
(
r#"
if (10 > 1) {
if (10 > 1) {
return 10;
}
}
retrun 1;
"#,
10_i64,
),
];
tests
.into_iter()
.for_each(|(input, expected)| assert_integer_object(eval(input), expected));
}
#[test]
fn test_error_handling() {
let tests = vec![
("5 + true;", "type mismatch: Integer + Boolean"),
("5 + true; 5;", "type mismatch: Integer + Boolean"),
("-true", "unknown operator: -Boolean"),
("true + false;", "unknown operator: Boolean + Boolean"),
("5; true + false; 5", "unknown operator: Boolean + Boolean"),
(
"if (10 > 1 ) { true + false; }",
"unknown operator: Boolean + Boolean",
),
(
r#"
if (10 > 1) {
if (10 > 1) {
return true + false;
}
return 1;
}
"#,
"unknown operator: Boolean + Boolean",
),
("foobar", "identifier not found: foobar"),
(r#""Hello" - "World""#, "unknown operator: String - String"),
(
r#"{"name": "Monkey"}[fn(x) { x }];"#,
"unusable as hash key: Function",
),
];
tests.into_iter().for_each(|(input, expected)| {
assert_error_object(eval_non_check(input).unwrap_err(), expected)
});
}
#[test]
fn test_let_statements() {
let tests = vec![
("let a = 5; a;", 5_i64),
("let a = 5 * 5; a;", 25_i64),
("let a = 5; let b = a; b;", 5_i64),
("let a = 5; let b = a; let c = a + b + 5; c;", 15_i64),
];
tests
.into_iter()
.for_each(|(input, expected)| assert_integer_object(eval(input), expected));
}
#[test]
fn test_function_object() {
let tests = vec![("fn(x) { x + 2; };", 1, "x", "{ (x + 2) }")];
tests.into_iter().for_each(
|(input, expected_params_size, expected_params, expected_body)| {
let obj = eval(input);
match obj {
objects::Object::Function(o) => {
assert_eq!(o.params.len(), expected_params_size);
assert_eq!(o.params[0].to_string(), expected_params);
assert_eq!(o.body.to_string(), expected_body);
}
o => panic!("expected Function. received {}", o),
}
},
);
}
#[test]
fn test_function_application() {
let tests = vec![
("let identity = fn(x) { x; }; identity(5);", 5_i64),
("let identity = fn(x) { return x; }; identity(5);", 5_i64),
("let double = fn(x) { x * 2; }; double(5);", 10_i64),
("let add = fn(x, y) { x + y; }; add(5, 5);", 10_i64),
(
"let add = fn(x, y) { x + y; }; add(5 + 5, add(5, 5));",
20_i64,
),
("fn(x) { x; }(5)", 5_i64),
(
r#"
let add = fn(a, b) { a + b };
let sub = fn(a, b) { a - b };
let apply_func = fn(a, b, func) { func(a, b) };
apply_func(2, 2, add);
"#,
4_i64,
),
(
r#"
let add = fn(a, b) { a + b };
let sub = fn(a, b) { a - b };
let apply_func = fn(a, b, func) { func(a, b) };
apply_func(10, 2, sub);
"#,
8_i64,
),
];
tests
.into_iter()
.for_each(|(input, expected)| assert_integer_object(eval(input), expected));
}
#[test]
fn test_closures() {
let tests = vec![(
r#"
let new_addr = fn(x) {
fn(y) { x + y};
}
let addTwo = new_addr(2);
addTwo(2);
"#,
4_i64,
)];
tests
.into_iter()
.for_each(|(input, expected)| assert_integer_object(eval(input), expected));
}
#[test]
fn test_string_literal() {
let tests = vec![(r#""Hello World!""#, "Hello World!")];
tests
.into_iter()
.for_each(|(input, expected)| assert_string_object(eval(input), expected));
}
#[test]
fn test_string_concatenation() {
let tests = vec![(r#""Hello" + " " + "World!""#, "Hello World!")];
tests
.into_iter()
.for_each(|(input, expected)| assert_string_object(eval(input), expected));
}
#[test]
fn test_builtin_function_len() {
let tests = vec![
(r#"len("")"#, 0_i64),
(r#"len("four")"#, 4_i64),
(r#"len("hello world")"#, 11_i64),
("len([])", 0_i64),
(r#"len([1, "hello", 33])"#, 3_i64),
];
tests
.into_iter()
.for_each(|(input, expected)| assert_integer_object(eval(input), expected));
let tests = vec![
("len(1)", "argument to 'len' not supported, got Integer"),
(
r#"len("one", "two")"#,
"wrong number of arguments. got=2, want=1",
),
];
tests.into_iter().for_each(|(input, expected)| {
assert_error_object(eval_non_check(input).unwrap_err(), expected)
});
}
#[test]
fn test_builtin_function_first() {
let tests = vec![("first([1, 2, 3])", 1_i64)];
tests
.into_iter()
.for_each(|(input, expected)| assert_integer_object(eval(input), expected));
let tests = vec![(r#"first(["one", "two"])"#, "one")];
tests
.into_iter()
.for_each(|(input, expected)| assert_string_object(eval(input), expected));
let tests = vec!["first([])"];
tests
.into_iter()
.for_each(|input| assert_null_object(eval(input)));
let tests = vec![("let a = [1, 2, 3]; first(a); first(a) == first(a)", true)];
tests
.into_iter()
.for_each(|(input, expected)| assert_boolean_object(eval(input), expected));
let tests = vec![
(
"first([1, 2, 3], [1, 2, 3])",
"wrong number of arguments. got=2, want=1",
),
("first(1)", "argument to 'first' must be Array, got Integer"),
];
tests.into_iter().for_each(|(input, expected)| {
assert_error_object(eval_non_check(input).unwrap_err(), expected)
});
}
#[test]
fn test_builtin_function_last() {
let tests = vec![("last([1, 2, 3])", 3_i64)];
tests
.into_iter()
.for_each(|(input, expected)| assert_integer_object(eval(input), expected));
let tests = vec![(r#"last(["one", "two"])"#, "two")];
tests
.into_iter()
.for_each(|(input, expected)| assert_string_object(eval(input), expected));
let tests = vec!["last([])"];
tests
.into_iter()
.for_each(|input| assert_null_object(eval(input)));
let tests = vec![("let a = [1, 2, 3]; last(a); last(a) == last(a)", true)];
tests
.into_iter()
.for_each(|(input, expected)| assert_boolean_object(eval(input), expected));
let tests = vec![
(
"last([1, 2, 3], [1, 2, 3])",
"wrong number of arguments. got=2, want=1",
),
("last(1)", "argument to 'last' must be Array, got Integer"),
];
tests.into_iter().for_each(|(input, expected)| {
assert_error_object(eval_non_check(input).unwrap_err(), expected)
});
}
#[test]
fn test_builtin_function_rest() {
let tests = vec![("rest([1, 2, 3])", [2_i64, 3_i64])];
tests.into_iter().for_each(|(input, expected)| {
assert_integer_array_object(eval(input), expected.to_vec())
});
let tests = vec![(r#"rest(["one", "two"])"#, ["two"])];
tests.into_iter().for_each(|(input, expected)| {
assert_string_array_object(eval(input), expected.to_vec())
});
let tests = vec!["rest([])"];
tests
.into_iter()
.for_each(|input| assert_null_object(eval(input)));
let tests = vec![("let a = [1, 2, 3, 4]; rest(rest(a));", [3_i64, 4_i64])];
tests.into_iter().for_each(|(input, expected)| {
assert_integer_array_object(eval(input), expected.to_vec())
});
let tests = vec!["let a = [1, 2, 3, 4]; rest(rest(a)); rest(rest(rest(rest(rest(a)))));"];
tests
.into_iter()
.for_each(|input| assert_null_object(eval(input)));
let tests = vec![
(
"rest([1, 2, 3], [1, 2, 3])",
"wrong number of arguments. got=2, want=1",
),
("rest(1)", "argument to 'rest' must be Array, got Integer"),
];
tests.into_iter().for_each(|(input, expected)| {
assert_error_object(eval_non_check(input).unwrap_err(), expected)
});
}
#[test]
fn test_builtin_function_push() {
let tests = vec![
("push([1, 2, 3], 4)", vec![1_i64, 2_i64, 3_i64, 4_i64]),
("push([1, 2, 3], 3)", vec![1_i64, 2_i64, 3_i64, 3_i64]),
("push([], 1)", vec![1_i64]),
(
"let a = [1, 2]; push(push(a, 3), 4);",
vec![1_i64, 2_i64, 3_i64, 4_i64],
),
];
tests
.into_iter()
.for_each(|(input, expected)| assert_integer_array_object(eval(input), expected));
let input = "push([1, 2, 3], [4, 5])";
let expected1 = 1_i64;
let expected2 = 2_i64;
let expected3 = 3_i64;
let expected4 = vec![4_i64, 5_i64];
let evaluated = eval(input);
match evaluated {
objects::Object::Array(o) => {
assert_eq!(o.elements.len(), 4);
assert_integer_object(o.elements[0].clone(), expected1);
assert_integer_object(o.elements[1].clone(), expected2);
assert_integer_object(o.elements[2].clone(), expected3);
assert_integer_array_object(o.elements[3].clone(), expected4)
}
o => panic!("expected Array, received {:?}.", o),
}
let tests = vec![
(
"push([1, 2, 3], 1, 2)",
"wrong number of arguments. got=3, want=2",
),
(
"push(1, 2)",
"argument to 'push' must be Array, got Integer",
),
];
tests.into_iter().for_each(|(input, expected)| {
assert_error_object(eval_non_check(input).unwrap_err(), expected)
});
}
#[test]
fn test_array_literal() {
let tests = vec![("[1, 2 * 2, 3 + 3]", vec![1_i64, 4_i64, 6_i64])];
tests
.into_iter()
.for_each(|(input, expected)| assert_integer_array_object(eval(input), expected));
}
#[test]
fn test_array_index_expression() {
let tests = vec![
("[1, 2, 3][0]", 1_i64),
("[1, 2, 3][1]", 2_i64),
("[1, 2, 3][2]", 3_i64),
("let i = 0; [1][i];", 1_i64),
("[1, 2, 3][1 + 1];", 3_i64),
("let myArray = [1, 2, 3]; myArray[2];", 3_i64),
(
"let myArray = [1, 2, 3]; myArray[0] + myArray[1] + myArray[2];",
6_i64,
),
(
"let myArray = [1, 2, 3]; let i = myArray[0]; myArray[i];",
2_i64,
),
];
tests
.into_iter()
.for_each(|(input, expected)| assert_integer_object(eval(input), expected));
let tests = vec!["[1, 2, 3][3]", "[1, 2, 3][-1]"];
tests
.into_iter()
.for_each(|input| assert_null_object(eval(input)));
}
#[test]
fn test_hash_literal() {
let input = r#"
let two = "two";
{
"one": 10 - 9,
two: 1 + 1,
"thr" + "ee": 6 / 2,
4: 4,
true: 5,
false: 6
}
"#;
let evaluated = eval(input);
match evaluated {
objects::Object::Hash(evaluated) => {
assert_eq!(evaluated.pairs.len(), 6);
let expected = vec![
(
objects::HashableObject::try_from(objects::Object::from(
objects::StringLit {
value: "one".into(),
},
))
.unwrap(),
1_i64,
),
(
objects::HashableObject::try_from(objects::Object::from(
objects::StringLit {
value: "two".into(),
},
))
.unwrap(),
2_i64,
),
(
objects::HashableObject::try_from(objects::Object::from(
objects::StringLit {
value: "three".into(),
},
))
.unwrap(),
3_i64,
),
(
objects::HashableObject::try_from(objects::Object::from(
objects::Integer { value: 4_i64 },
))
.unwrap(),
4_i64,
),
(
objects::HashableObject::try_from(objects::Object::from(
objects::Boolean { value: true },
))
.unwrap(),
5_i64,
),
(
objects::HashableObject::try_from(objects::Object::from(
objects::Boolean { value: false },
))
.unwrap(),
6_i64,
),
];
expected
.into_iter()
.for_each(|(expected_key, expected_value)| {
let value = evaluated.pairs.get(&expected_key).unwrap();
assert_integer_object(value.clone(), expected_value);
});
}
o => panic!("expected Hash. received {:?}", o),
}
}
#[test]
fn test_hash_index_expression() {
let tests = vec![
(r#"{"foo": 5}["foo"]"#, 5_i64),
(r#"let key = "foo"; {"foo": 5}[key]"#, 5_i64),
("{5: 5}[5]", 5_i64),
("{true: 5}[true]", 5_i64),
("{false: 5}[false]", 5_i64),
];
tests
.into_iter()
.for_each(|(input, expected)| assert_integer_object(eval(input), expected));
let tests = vec![(r#"{"foo": 5}["bar"]"#), (r#"{}["foo"]"#)];
tests
.into_iter()
.for_each(|input| assert_null_object(eval(input)));
}
#[test]
fn test_quote() {
let tests = vec![
("quote(5)", "5"),
("quote(5 + 8)", "(5 + 8)"),
("quote(foobar)", "foobar"),
("quote(foobar + barfoo)", "(foobar + barfoo)"),
];
tests.into_iter().for_each(|(input, expected)| {
let evaluated = eval(input);
match evaluated {
objects::Object::Quote(o) => assert_eq!(o.to_string(), expected),
o => panic!("expected Quote. received {:?}", o),
}
});
}
#[test]
fn test_quote_unquote() {
let tests = vec![
("quote(unquote(4))", "4"),
("quote(unquote(4 + 4))", "8"),
("quote(8 + unquote(4 + 4))", "(8 + 8)"),
("quote(unquote(4 + 4) + 8)", "(8 + 8)"),
(
"
let foobar = 8;
quote(foobar)
",
"foobar",
),
(
"
let foobar = 8;
quote(unquote(foobar))
",
"8",
),
("quote(unquote(true))", "true"),
("quote(unquote(true == false))", "false"),
("quote(unquote(quote(4 + 4)))", "(4 + 4)"),
(
"
let quotedInfixExpression = quote(4 + 4);
quote(unquote(4 + 4) + unquote(quotedInfixExpression))
",
"(8 + (4 + 4))",
),
];
tests.into_iter().for_each(|(input, expected)| {
let evaluated = eval(input);
match evaluated {
objects::Object::Quote(o) => assert_eq!(o.to_string(), expected),
o => panic!("expected Quote. received {:?}", o),
}
});
}
#[test]
fn test_define_macro() {
let input = "
let number = 1;
let function = fn(x, y) { x + y; };
let mymacro = macro(x, y) { x + y; };
";
let env = Rc::new(RefCell::new(Environment::new(None)));
let mut program = {
let l = crate::lexer::Lexer::new(input.into());
let mut p = crate::parser::Parser::new(l);
p.parse_program().unwrap()
};
define_macros(&mut program, Rc::clone(&env)).unwrap();
assert_eq!(program.statements.len(), 2);
assert!(env.borrow().get("number").is_none());
assert!(env.borrow().get("function").is_none());
let obj = env.borrow().get("mymacro");
assert!(obj.is_some());
let m_macro = match obj.unwrap() {
objects::Object::Macro(m) => m,
obj => panic!("expect Macro. received {}", obj),
};
assert_eq!(m_macro.params.len(), 2);
assert_eq!(m_macro.params[0].to_string(), "x");
assert_eq!(m_macro.params[1].to_string(), "y");
let expected_body = "{ (x + y) }";
assert_eq!(m_macro.body.to_string(), expected_body);
}
#[test]
fn test_expand_macros() {
let tests = vec![
(
"
let infix_expr = macro() { quote(1 + 2) };
infix_expr();
",
"(1 + 2)",
),
(
"
let reverse = macro(a, b) { quote(unquote(b) - unquote(a)); };
reverse(2 + 2, 10 - 5);
",
"(10 - 5) - (2 + 2)",
),
(
r#"
let unless = macro(condition, consequence, alternative) {
quote(
if (!(unquote(condition))) {
unquote(consequence);
} else {
unquote(alternative);
}
);
};
unless(10 > 5, puts("not greater"), puts("greater"));
"#,
r#"if (!(10 > 5)) { puts("not greater") } else { puts("greater") }"#,
),
];
tests.into_iter().for_each(|(input, expected)| {
let expected = {
let l = crate::lexer::Lexer::new(expected.into());
let mut p = crate::parser::Parser::new(l);
p.parse_program().unwrap()
};
let mut program = {
let l = crate::lexer::Lexer::new(input.into());
let mut p = crate::parser::Parser::new(l);
p.parse_program().unwrap()
};
let env = Rc::new(RefCell::new(Environment::new(None)));
define_macros(&mut program, Rc::clone(&env)).unwrap();
let expanded = expand_macros(program.into(), Rc::clone(&env)).unwrap();
assert_eq!(expanded.to_string(), expected.to_string());
assert_eq!(expanded, expected.into());
});
}
#[test]
fn test_fibonacci() {
let input = "
let fibonacci = fn(x) {
if (x == 0) {
return 0;
} else {
if (x == 1) {
return 1;
} else {
fibonacci(x - 1) + fibonacci(x - 2);
}
}
};
fibonacci(15);
";
let env = Rc::new(RefCell::new(Environment::new(None)));
let program = {
let l = crate::lexer::Lexer::new(input.into());
let mut p = crate::parser::Parser::new(l);
p.parse_program().unwrap()
};
let obj = eval_node(&program.into(), env).unwrap();
match obj {
objects::Object::Integer(i) => assert_eq!(i.value, 610),
other => panic!("expected Integer. received {}", other),
};
}
fn check_err_and_unrwap<T, E>(result: std::result::Result<T, E>, input: &str) -> T
where
E: std::fmt::Debug,
{
result
.map_err(move |e| format!("{} //=> {:?}", input, e))
.unwrap()
}
fn eval(input: &str) -> objects::Object {
let evaluated = eval_non_check(input);
check_err_and_unrwap(evaluated, input)
}
fn eval_non_check(input: &str) -> Result<objects::Object> {
let l = crate::lexer::Lexer::new(input.to_string());
let mut p = crate::parser::Parser::new(l);
let program = p.parse_program();
let program = check_err_and_unrwap(program, input);
let env = Rc::new(RefCell::new(Environment::new(None)));
eval_program(&program, env)
}
fn assert_integer_object(obj: objects::Object, expected: i64) {
match obj {
objects::Object::Integer(o) => assert_eq!(o.value, expected),
o => panic!("expected Integer. received {:?}", o),
}
}
fn assert_boolean_object(obj: objects::Object, expected: bool) {
match obj {
objects::Object::Boolean(o) => assert_eq!(o.value, expected),
o => panic!("expected Boolean. received {:?}", o),
}
}
fn assert_null_object(obj: objects::Object) {
match obj {
objects::Object::Null(_) => (),
o => panic!("expected Null. received {:?}", o),
}
}
fn assert_error_object(err: anyhow::Error, expected: &str) {
assert_eq!(err.to_string(), expected)
}
fn assert_string_object(obj: objects::Object, expected: &str) {
match obj {
objects::Object::StringLit(o) => assert_eq!(o.value, expected),
o => panic!("expected StringLit. received {:?}", o),
}
}
fn assert_integer_array_object(obj: objects::Object, expected_arr: Vec<i64>) {
match obj {
objects::Object::Array(o) => {
assert_eq!(o.elements.len(), expected_arr.len());
expected_arr
.into_iter()
.zip(o.elements)
.for_each(|(expected, ele)| match ele {
objects::Object::Integer(o) => assert_eq!(o.value, expected),
o => panic!("expected Array<Integer>. received {:?}", o),
})
}
o => panic!("expected Array<Integer>. received {:?}", o),
}
}
fn assert_string_array_object(obj: objects::Object, expected_arr: Vec<&str>) {
match obj {
objects::Object::Array(o) => {
assert_eq!(o.elements.len(), expected_arr.len());
expected_arr
.into_iter()
.zip(o.elements)
.for_each(|(expected, ele)| match ele {
objects::Object::StringLit(o) => assert_eq!(o.value, expected),
o => panic!("expected Array<Integer>. received {:?}", o),
})
}
o => panic!("expected Array<Integer>. received {:?}", o),
}
}
}
|
use crate::prelude::*;
use serde::{Deserialize, Serialize};
use std::fmt;
use std::str::FromStr;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
pub enum Operator {
Equal,
NotEqual,
LessThan,
GreaterThan,
LessThanOrEqual,
GreaterThanOrEqual,
}
impl ToDebug for Operator {
fn fmt_debug(&self, f: &mut fmt::Formatter, _source: &str) -> fmt::Result {
write!(f, "{}", self.as_str())
}
}
impl Operator {
pub fn print(&self) -> String {
self.as_str().to_string()
}
pub fn as_str(&self) -> &str {
match *self {
Operator::Equal => "==",
Operator::NotEqual => "!=",
Operator::LessThan => "<",
Operator::GreaterThan => ">",
Operator::LessThanOrEqual => "<=",
Operator::GreaterThanOrEqual => ">=",
}
}
}
impl From<&str> for Operator {
fn from(input: &str) -> Operator {
Operator::from_str(input).unwrap()
}
}
impl FromStr for Operator {
type Err = ();
fn from_str(input: &str) -> Result<Self, <Self as std::str::FromStr>::Err> {
match input {
"==" => Ok(Operator::Equal),
"!=" => Ok(Operator::NotEqual),
"<" => Ok(Operator::LessThan),
">" => Ok(Operator::GreaterThan),
"<=" => Ok(Operator::LessThanOrEqual),
">=" => Ok(Operator::GreaterThanOrEqual),
_ => Err(()),
}
}
}
|
use std::collections::HashSet;
use crate::get_result_i32;
// https://adventofcode.com/2020/day/1
// https://www.reddit.com/r/rust/comments/k4hoyk/advent_of_code_2020_day_1/
const SUM_OBJECTIVE: i32 = 2020;
const INPUT_FILENAME: &str = "inputs/input01";
pub fn solve() {
get_result_i32(1, part01, INPUT_FILENAME);
get_result_i32(2, part02, INPUT_FILENAME);
}
fn part01(input: String) -> i32 {
let entries: HashSet<i32> = input.lines()
.map(|x| x.parse::<i32>().unwrap())
.collect();
for entry in &entries {
let other_entry = SUM_OBJECTIVE - entry;
if entries.contains(&other_entry) {
return entry * other_entry;
}
}
panic!("Input does not contain a solution !")
}
fn part02(input: String) -> i32 {
let mut entries: Vec<i32> = input.lines()
.map(|x| x.parse::<i32>().unwrap())
.collect();
entries.sort();
for i in 0..entries.len() {
for k in i..entries.len() {
for j in k..entries.len() {
let first = entries[i];
let second = entries[k];
let third = entries[j];
let sum = first + second + third;
if sum == SUM_OBJECTIVE {
return first * second * third;
}
if sum > SUM_OBJECTIVE {
break;
}
}
}
}
panic!("Input does not contain a solution !")
} |
use std::collections::HashMap;
fn main() {
let data = std::fs::read_to_string("../input.txt").unwrap();
let count = data
.split("\n\n")
.filter(|passport| {
let mut data: HashMap<&str, &str> = HashMap::with_capacity(8);
for field in passport.split_whitespace() {
let parts: Vec<&str> = field.splitn(2, ":").collect();
assert!(parts.len() == 2);
data.insert(parts[0], parts[1]);
}
let valid = data.len() == 8
|| (data.len() == 7 && data.keys().find(|&d| d == &"cid").is_none());
valid
})
.count();
println!("{}", count);
}
|
use enumset::EnumSetType;
use strum::{Display, EnumIter, EnumString, IntoStaticStr};
#[derive(Display, EnumIter, EnumSetType, EnumString, IntoStaticStr)]
#[strum(serialize_all = "snake_case")]
pub enum Category {
Anime,
Book,
Music,
Game,
Real,
}
#[derive(Display, EnumIter, EnumSetType, EnumString, IntoStaticStr)]
#[strum(serialize_all = "snake_case")]
pub enum State {
Wish,
Collect,
Do,
OnHold,
Dropped,
}
pub type Id = u32;
pub type Rating = u8;
pub const MAX_RATING: Rating = 10;
#[derive(Default)]
pub struct Item {
pub id: Id,
pub title: String,
pub rating: Option<Rating>,
pub tags: Vec<String>,
}
|
fn read_line() -> String {
let mut line = String::new();
std::io::stdin().read_line(&mut line).unwrap();
line.trim_end().to_owned()
}
fn main() {
let stdin = read_line();
let mut iter = stdin.split_whitespace();
let n: i64 = iter.next().unwrap().parse().unwrap();
let s: i64 = iter.next().unwrap().parse().unwrap();
let d: i64 = iter.next().unwrap().parse().unwrap();
let mut answer = "No";
(0..n).for_each(|_| {
let stdin = read_line();
let mut iter = stdin.split_whitespace();
let x: i64 = iter.next().unwrap().parse().unwrap();
let y: i64 = iter.next().unwrap().parse().unwrap();
if x >= s {
return;
}
if y <= d {
return;
}
answer = "Yes";
});
println!("{}", answer);
}
|
use super::PubNub;
use crate::data::channel;
use crate::runtime::Runtime;
use crate::subscription::Subscription;
use crate::transport::Transport;
impl<TTransport, TRuntime> PubNub<TTransport, TRuntime>
where
TTransport: Transport + 'static,
TRuntime: Runtime + 'static,
{
/// Subscribe to presence events for the specified channel.
///
/// This is just a tiny wrapper that calls [`PubNub::subscribe`]
/// internally with the specified channel name with a `-pnpres` suffix.
pub async fn subscribe_to_presence(
&mut self,
channel: channel::Name,
) -> Subscription<TRuntime> {
let channel = channel::Name::from_string_unchecked(format!("{}-pnpres", channel));
self.subscribe(channel).await
}
}
|
#![cfg_attr(feature = "unstable", feature(plugin))]
#![cfg_attr(feature = "unstable", plugin(clippy))]
//! A small library meant to be used as a build dependency with Cargo for easily
//! integrating [ISPC](https://ispc.github.io/) code into Rust projects.
//!
//! # Using ispc-rs
//!
//! You'll want to add a build script to your crate (`build.rs`), tell Cargo about it and add this crate
//! as a build dependency and optionally as a runtime dependency if you plan to use the `ispc_module` macro
//! or ISPC tasks.
//!
//! ```toml
//! # Cargo.toml
//! [package]
//! # ...
//! build = "build.rs"
//!
//! [dependencies]
//! ispc = "0.3.7"
//!
//! [build-dependencies]
//! ispc = "0.3.7"
//! ```
//!
//! Now you can use `ispc` to compile your code into a static library:
//!
//! ```ignore
//! extern crate ispc;
//!
//! fn main() {
//! // Compile our ISPC library, this call will exit with EXIT_FAILURE if
//! // compilation fails.
//! ispc::compile_library("simple", &["src/simple.ispc"]);
//! }
//! ```
//!
//! Running `cargo build` should now build your ISPC files into a library and link your Rust
//! application with it. For extra convenience the `ispc_module` macro is provided to import
//! bindings to the library generated with [rust-bindgen](https://github.com/crabtw/rust-bindgen)
//! into a module of the same name. Note that all the functions imported will be unsafe as they're
//! the raw C bindings to your lib.
//!
//! ```ignore
//! #[macro_use]
//! extern crate ispc;
//!
//! // Functions exported from simple will be callable under simple::*
//! ispc_module!(simple);
//! ```
//!
//! Some more complete examples can be found in the
//! [examples/](https://github.com/Twinklebear/ispc-rs/tree/master/examples) folder.
//!
//! # Compile-time Requirements
//!
//! Both the [ISPC compiler](https://ispc.github.io/) and [libclang](http://clang.llvm.org/)
//! (for [rust-bindgen](https://github.com/crabtw/rust-bindgen)) must be available in your path.
//!
//! ## Windows Users
//!
//! You'll need Visual Studio and will have to use the MSVC ABI version of Rust since ISPC
//! and Clang link with MSVC on Windows. For bindgen to find libclang you'll need to copy
//! `libclang.lib` to `clang.lib` and place it in your path.
//!
#![allow(dead_code)]
extern crate bindgen;
extern crate gcc;
extern crate libc;
extern crate aligned_alloc;
extern crate num_cpus;
extern crate regex;
extern crate semver;
pub mod task;
pub mod exec;
pub mod opt;
pub mod instrument;
use std::path::{Path, PathBuf};
use std::fs::File;
use std::io::{Write, BufRead, BufReader};
use std::process::{Command, ExitStatus};
use std::env;
use std::mem;
use std::sync::{Once, ONCE_INIT, Arc};
use std::fmt::Display;
use std::collections::BTreeSet;
use std::ffi::CStr;
use regex::Regex;
use semver::Version;
use task::ISPCTaskFn;
use exec::{TaskSystem, Parallel};
use opt::{MathLib, Addressing, CPU, OptimizationOpt, TargetISA};
use instrument::{Instrument, SimpleInstrument};
/// Convenience macro for generating the module to hold the raw/unsafe ISPC bindings.
///
/// In addition to building the library with ISPC we use rust-bindgen to generate
/// a rust module containing bindings to the functions exported from ISPC. These
/// can be imported by passing the name of your library to the `ispc_module` macro.
///
/// # Example
///
/// ```ignore
/// #[macro_use]
/// extern crate ispc;
///
/// // Functions exported from foo will be callable under foo::*
/// ispc_module!(foo);
/// ```
#[macro_export]
macro_rules! ispc_module {
($lib:ident) => (
include!(concat!(env!("OUT_DIR"), "/", stringify!($lib), ".rs"));
)
}
/// Compile the list of ISPC files into a static library and generate bindings
/// using bindgen. The library name should not contain a lib prefix or a lib
/// extension like '.a' or '.lib', the appropriate prefix and suffix will be
/// added based on the compilation target.
///
/// This function will exit the process with `EXIT_FAILURE` if any stage of
/// compilation or linking fails.
///
/// # Example
/// ```no_run
/// ispc::compile_library("foo", &["src/foo.ispc", "src/bar.ispc"]);
/// ```
pub fn compile_library(lib: &str, files: &[&str]) {
let mut cfg = Config::new();
for f in &files[..] {
cfg.file(*f);
}
cfg.compile(lib)
}
/// Handy wrapper around calling exit that will log the message passed first
/// then exit with a failure exit code.
macro_rules! exit_failure {
($fmt:expr) => {{
eprintln!($fmt);
std::process::exit(libc::EXIT_FAILURE);
}};
($fmt:expr, $($arg:tt)*) => {{
eprintln!($fmt, $($arg)*);
std::process::exit(libc::EXIT_FAILURE);
}}
}
/// Extra configuration to be passed to ISPC
pub struct Config {
ispc_version: Version,
ispc_files: Vec<PathBuf>,
objects: Vec<PathBuf>,
headers: Vec<PathBuf>,
include_paths: Vec<PathBuf>,
// We need to generate a single header so we have one header to give bindgen
bindgen_header: PathBuf,
// These options are set from the environment if not set by the user
out_dir: Option<PathBuf>,
debug: Option<bool>,
opt_level: Option<u32>,
target: Option<String>,
cargo_metadata: bool,
// Additional ISPC compiler options that the user can set
defines: Vec<(String, Option<String>)>,
math_lib: MathLib,
addressing: Option<Addressing>,
optimization_opts: BTreeSet<OptimizationOpt>,
cpu_target: Option<CPU>,
force_alignment: Option<u32>,
no_omit_frame_ptr: bool,
no_stdlib: bool,
no_cpp: bool,
quiet: bool,
werror: bool,
woff: bool,
wno_perf: bool,
instrument: bool,
target_isa: Option<Vec<TargetISA>>,
}
impl Config {
pub fn new() -> Config {
// Query the ISPC compiler version. This also acts as a check that we can
// find the ISPC compiler when we need it later.
let cmd_output = Command::new("ispc").arg("--version").output()
.expect("Failed to find ISPC compiler in PATH");
if !cmd_output.status.success() {
exit_failure!("Failed to get ISPC version, is it in your PATH?");
}
let ver_string = String::from_utf8_lossy(&cmd_output.stdout);
let re = Regex::new(r"Intel\(r\) SPMD Program Compiler \(ispc\), (\d+\.\d+\.\d+)").unwrap();
let ispc_ver = Version::parse(re.captures_iter(&ver_string).next()
.expect("Failed to parse ISPC version").get(1)
.expect("Failed to parse ISPC version").as_str())
.expect("Failed to parse ISPC version");
Config {
ispc_version: ispc_ver,
ispc_files: Vec::new(),
objects: Vec::new(),
headers: Vec::new(),
include_paths: Vec::new(),
bindgen_header: PathBuf::new(),
out_dir: None,
debug: None,
opt_level: None,
target: None,
cargo_metadata: true,
defines: Vec::new(),
math_lib: MathLib::ISPCDefault,
addressing: None,
optimization_opts: BTreeSet::new(),
cpu_target: None,
force_alignment: None,
no_omit_frame_ptr: false,
no_stdlib: false,
no_cpp: false,
quiet: false,
werror: false,
woff: false,
wno_perf: false,
instrument: false,
target_isa: None,
}
}
/// Add an ISPC file to be compiled
pub fn file<P: AsRef<Path>>(&mut self, file: P) -> &mut Config {
self.ispc_files.push(file.as_ref().to_path_buf());
self
}
/// Set the output directory to override the default of `env!("OUT_DIR")`
pub fn out_dir<P: AsRef<Path>>(&mut self, dir: P) -> &mut Config {
self.out_dir = Some(dir.as_ref().to_path_buf());
self
}
/// Set whether debug symbols should be generated, symbols are generated by
/// default if `env!("DEBUG") == "true"`
pub fn debug(&mut self, debug: bool) -> &mut Config {
self.debug = Some(debug);
self
}
/// Set the optimization level to override the default of `env!("OPT_LEVEL")`
pub fn opt_level(&mut self, opt_level: u32) -> &mut Config {
self.opt_level = Some(opt_level);
self
}
/// Set the target triple to compile for, overriding the default of `env!("TARGET")`
pub fn target(&mut self, target: &str) -> &mut Config {
self.target = Some(target.to_string());
self
}
/// Set whether Cargo metadata should be emitted to link to the compiled library
pub fn cargo_metadata(&mut self, metadata: bool) -> &mut Config {
self.cargo_metadata = metadata;
self
}
/// Add a define to be passed to the ISPC compiler, e.g. `-DFOO`
/// or `-DBAR=FOO` if a value should also be set.
pub fn add_define(&mut self, define: &str, value: Option<&str>) -> &mut Config {
self.defines.push((define.to_string(), value.map(|s| s.to_string())));
self
}
/// Select the 32 or 64 bit addressing calculations for addressing calculations in ISPC.
pub fn addressing(&mut self, addressing: Addressing) -> &mut Config {
self.addressing = Some(addressing);
self
}
/// Set the math library used by ISPC code, defaults to the ISPC math library.
pub fn math_lib(&mut self, math_lib: MathLib) -> &mut Config {
self.math_lib = math_lib;
self
}
/// Set an optimization option.
pub fn optimization_opt(&mut self, opt: OptimizationOpt) -> &mut Config {
self.optimization_opts.insert(opt);
self
}
/// Set the cpu target. This overrides the default choice of ISPC which
/// is to target the host CPU.
pub fn cpu(&mut self, cpu: CPU) -> &mut Config {
self.cpu_target = Some(cpu);
self
}
/// Force ISPC memory allocations to be aligned to `alignment`.
pub fn force_alignment(&mut self, alignment: u32) -> &mut Config {
self.force_alignment = Some(alignment);
self
}
/// Add an extra include path for the ispc compiler to search for files.
pub fn include_path<P: AsRef<Path>>(&mut self, path: P) -> &mut Config {
self.include_paths.push(path.as_ref().to_path_buf());
self
}
/// Disable frame pointer omission. It may be useful for profiling to
/// disable omission.
pub fn no_omit_frame_pointer(&mut self) -> &mut Config {
self.no_omit_frame_ptr = true;
self
}
/// Don't make the ispc standard library available.
pub fn no_stdlib(&mut self) -> &mut Config {
self.no_stdlib = true;
self
}
/// Don't run the C preprocessor
pub fn no_cpp(&mut self) -> &mut Config {
self.no_cpp = true;
self
}
/// Enable suppression of all ispc compiler output.
pub fn quiet(&mut self) -> &mut Config {
self.quiet = true;
self
}
/// Enable treating warnings as errors.
pub fn werror(&mut self) -> &mut Config {
self.werror = true;
self
}
/// Disable all warnings.
pub fn woff(&mut self) -> &mut Config {
self.woff = true;
self
}
/// Don't issue warnings related to performance issues
pub fn wno_perf(&mut self) -> &mut Config {
self.wno_perf = true;
self
}
/// Emit instrumentation code for ISPC to gather performance data such
/// as vector utilization.
pub fn instrument(&mut self) -> &mut Config {
let min_ver = Version { major: 1, minor: 9, patch: 1, pre: vec![], build: vec![] };
if self.ispc_version < min_ver {
exit_failure!("Error: instrumentation is not supported on ISPC versions \
older than 1.9.1 as it generates a non-C compatible header");
}
self.instrument = true;
self
}
/// Select the target ISA and vector width. If none is specified ispc will
/// choose the host CPU ISA and vector width.
pub fn target_isa(&mut self, target: TargetISA) -> &mut Config {
self.target_isa = Some(vec![target]);
self
}
/// Select multiple target ISAs and vector widths. If none is specified ispc will
/// choose the host CPU ISA and vector width.
/// Note that certain options are not compatible with this use case,
/// e.g. AVX1.1 will replace AVX1, Host should not be passed (just use the default)
pub fn target_isas(&mut self, targets: Vec<TargetISA>) -> &mut Config {
self.target_isa = Some(targets);
self
}
/// The library name should not have any prefix or suffix, e.g. instead of
/// `libexample.a` or `example.lib` simply pass `example`
pub fn compile(&mut self, lib: &str) {
let dst = self.get_out_dir();
let default_args = self.default_args();
for s in &self.ispc_files[..] {
let fname = s.file_stem().expect("ISPC source files must be files")
.to_str().expect("ISPC source file names must be valid UTF-8");
self.print(&format!("cargo:rerun-if-changed={}", s.display()));
let ispc_fname = String::from(fname) + "_ispc";
let object = dst.join(ispc_fname.clone()).with_extension("o");
let header = dst.join(ispc_fname.clone()).with_extension("h");
let deps = dst.join(ispc_fname.clone()).with_extension("idep");
let output = Command::new("ispc").args(&default_args[..])
.arg(s).arg("-o").arg(&object).arg("-h").arg(&header)
.arg("-MMM").arg(&deps).output().unwrap();
if !output.stderr.is_empty() {
let stderr = String::from_utf8_lossy(&output.stderr);
for l in stderr.lines() {
println!("cargo:warning={}", l);
}
}
if !output.status.success() {
exit_failure!("Failed to compile ISPC source file {}", s.display());
}
self.objects.push(object);
self.headers.push(header);
// Go this files dependencies and add them to Cargo's watch list
let deps_list = File::open(deps)
.expect(&format!("Failed to open dependencies list for {}", s.display())[..]);
let reader = BufReader::new(deps_list);
for d in reader.lines() {
self.print(&format!("cargo:rerun-if-changed={}", d.unwrap()));
}
// Push on the additional ISA-specific object files if any were generated
if let Some(ref t) = self.target_isa {
if t.len() > 1 {
for isa in t.iter() {
let isa_fname = ispc_fname.clone() + "_" + &isa.lib_suffix();
let isa_obj = dst.join(isa_fname).with_extension("o");
self.objects.push(isa_obj);
}
}
}
}
if !self.assemble(lib).success() {
exit_failure!("Failed to assemble ISPC objects into library {}", lib);
}
println!("cargo:rustc-link-lib=static={}", lib);
println!("cargo:rustc-link-search=native={}", dst.display());
// Now generate a header we can give to bindgen and generate bindings
self.generate_bindgen_header(lib);
let bindings = bindgen::Builder::default()
.header(self.bindgen_header.to_str().unwrap());
let bindgen_file = dst.join(lib).with_extension("rs");
let generated_bindings = match bindings.generate() {
Ok(b) => b.to_string(),
Err(_) => exit_failure!("Failed to generating Rust bindings to {}", lib),
};
let mut file = match File::create(bindgen_file) {
Ok(f) => f,
Err(e) => exit_failure!("Failed to open bindgen mod file for writing: {}", e),
};
file.write_all("#[allow(non_camel_case_types,dead_code,non_upper_case_globals,non_snake_case,improper_ctypes)]\n"
.as_bytes()).unwrap();
file.write_all(format!("pub mod {} {{\n", lib).as_bytes()).unwrap();
file.write_all(generated_bindings.as_bytes()).unwrap();
file.write_all(b"}").unwrap();
// Tell cargo where to find the library we just built if we're running
// in a build script
self.print(&format!("cargo:rustc-link-search=native={}", dst.display()));
}
/// Get the ISPC compiler version.
pub fn ispc_version(&self) -> &Version {
&self.ispc_version
}
/// Link the ISPC code into a static library on Unix using `ar`
#[cfg(unix)]
fn assemble(&self, lib: &str) -> ExitStatus {
Command::new("ar").arg("crus")
.arg(format!("lib{}.a", lib))
.args(&self.objects[..])
.current_dir(&self.get_out_dir())
.status().unwrap()
}
/// Link the ISPC code into a static library on Windows using `lib.exe`
#[cfg(windows)]
fn assemble(&self, lib: &str) -> ExitStatus {
let target = self.get_target();
let mut lib_cmd = gcc::windows_registry::find_tool(&target[..], "lib.exe")
.expect("Failed to find lib.exe for MSVC toolchain, aborting")
.to_command();
lib_cmd.arg(format!("/OUT:{}.lib", lib))
.args(&self.objects[..])
.current_dir(&self.get_out_dir())
.status().unwrap()
}
/// Generate a single header that includes all of our ISPC headers which we can
/// pass to bindgen
fn generate_bindgen_header(&mut self, lib: &str) {
self.bindgen_header = self.get_out_dir().join(format!("_{}_ispc_bindgen_header.h", lib));
let mut include_file = File::create(&self.bindgen_header).unwrap();
write!(include_file, "#include <stdint.h>\n").unwrap();
write!(include_file, "#include <stdbool.h>\n").unwrap();
for h in &self.headers[..] {
write!(include_file, "#include \"{}\"\n", h.display()).unwrap();
}
}
/// Build up list of basic args for each target, debug, opt level, etc.
fn default_args(&self) -> Vec<String> {
let mut ispc_args = Vec::new();
if self.get_debug() {
ispc_args.push(String::from("-g"));
}
let opt_level = self.get_opt_level();
if let Some(ref c) = self.cpu_target {
ispc_args.push(c.to_string());
// The ispc compiler crashes if we give -O0 and --cpu=generic,
// see https://github.com/ispc/ispc/issues/1223
if *c != CPU::Generic || (*c == CPU::Generic && opt_level != 0) {
ispc_args.push(String::from("-O") + &opt_level.to_string());
} else {
println!("cargo:warning=ispc-rs: Omitting -O0 on CPU::Generic target, ispc bug 1223");
}
} else {
ispc_args.push(String::from("-O") + &opt_level.to_string());
}
// If we're on Unix we need position independent code
if cfg!(unix) {
ispc_args.push(String::from("--pic"));
}
let target = self.get_target();
if target.starts_with("i686") {
ispc_args.push(String::from("--arch=x86"));
} else if target.starts_with("x86_64") {
ispc_args.push(String::from("--arch=x86-64"));
}
for d in &self.defines {
match d.1 {
Some(ref v) => ispc_args.push(format!("-D{}={}", d.0, v)),
None => ispc_args.push(format!("-D{}", d.0)),
}
}
ispc_args.push(self.math_lib.to_string());
if let Some(ref s) = self.addressing {
ispc_args.push(s.to_string());
}
if let Some(ref f) = self.force_alignment {
ispc_args.push(String::from("--force-alignment=") + &f.to_string());
}
for o in &self.optimization_opts {
ispc_args.push(o.to_string());
}
for p in &self.include_paths {
ispc_args.push(format!("-I {}", p.display()));
}
if self.no_omit_frame_ptr {
ispc_args.push(String::from("--no-omit-frame-pointer"));
}
if self.no_stdlib {
ispc_args.push(String::from("--nostdlib"));
}
if self.no_cpp {
ispc_args.push(String::from("--nocpp"));
}
if self.quiet {
ispc_args.push(String::from("--quiet"));
}
if self.werror {
ispc_args.push(String::from("--werror"));
}
if self.woff {
ispc_args.push(String::from("--woff"));
}
if self.wno_perf {
ispc_args.push(String::from("--wno-perf"));
}
if self.instrument {
ispc_args.push(String::from("--instrument"));
}
if let Some(ref t) = self.target_isa {
let mut isa_str = String::from("--target=");
isa_str.push_str(&t[0].to_string());
for isa in t.iter().skip(1) {
isa_str.push_str(&format!(",{}", isa.to_string()));
}
ispc_args.push(isa_str);
}
ispc_args
}
/// Returns the user-set output directory if they've set one, otherwise
/// returns env("OUT_DIR")
fn get_out_dir(&self) -> PathBuf {
self.out_dir.clone().unwrap_or_else(|| {
env::var_os("OUT_DIR").map(PathBuf::from).unwrap()
})
}
/// Returns the user-set debug flag if they've set one, otherwise returns
/// env("DEBUG")
fn get_debug(&self) -> bool {
self.debug.unwrap_or_else(|| {
env::var("DEBUG").map(|x| x == "true").unwrap()
})
}
/// Returns the user-set optimization level if they've set one, otherwise
/// returns env("OPT_LEVEL")
fn get_opt_level(&self) -> u32 {
self.opt_level.unwrap_or_else(|| {
let opt = env::var("OPT_LEVEL").unwrap();
opt.parse::<u32>().unwrap()
})
}
/// Returns the user-set target triple if they're set one, otherwise
/// returns env("TARGET")
fn get_target(&self) -> String {
self.target.clone().unwrap_or_else(|| {
env::var("TARGET").unwrap()
})
}
/// Print out cargo metadata if enabled
fn print<T: Display>(&self, s: &T) {
if self.cargo_metadata {
println!("{}", s);
}
}
}
impl Default for Config {
fn default() -> Config { Config::new() }
}
static mut TASK_SYSTEM: Option<&'static TaskSystem> = None;
static TASK_INIT: Once = ONCE_INIT;
static mut INSTRUMENT: Option<&'static Instrument> = None;
static INSTRUMENT_INIT: Once = ONCE_INIT;
/// If you have implemented your own task system you can provide it for use instead
/// of the default threaded one. This must be done prior to calling ISPC code which
/// spawns tasks otherwise the task system will have already been initialized to
/// `Parallel`, which you can also see as an example for implementing a task system.
///
/// Use the function to do any extra initialization for your task system. Note that
/// the task system will be leaked and not destroyed until the program exits and the
/// memory space is cleaned up.
pub fn set_task_system<F: FnOnce() -> Arc<TaskSystem>>(f: F) {
TASK_INIT.call_once(|| {
let task_sys = f();
unsafe {
let s: *const TaskSystem = mem::transmute(&*task_sys);
mem::forget(task_sys);
TASK_SYSTEM = Some(&*s);
}
});
}
fn get_task_system() -> &'static TaskSystem {
// TODO: This is a bit nasty, but I'm not sure on a nicer solution. Maybe something that
// would let the user register the desired (or default) task system? But if
// mutable statics can't have destructors we still couldn't have an Arc or Box to something?
TASK_INIT.call_once(|| {
unsafe {
let task_sys = Parallel::new() as Arc<TaskSystem>;
let s: *const TaskSystem = mem::transmute(&*task_sys);
mem::forget(task_sys);
TASK_SYSTEM = Some(&*s);
}
});
unsafe { TASK_SYSTEM.unwrap() }
}
/// If you have implemented your own instrument for logging ISPC performance
/// data you can use this function to provide it for use instead of the
/// default one. This function **must** be called before calling into ISPC code,
/// otherwise the instrumenter will already be set to the default.
pub fn set_instrument<F: FnOnce() -> Arc<Instrument>>(f: F) {
INSTRUMENT_INIT.call_once(|| {
let instrument = f();
unsafe {
let s: *const Instrument = mem::transmute(&*instrument);
mem::forget(instrument);
INSTRUMENT = Some(&*s);
}
});
}
/// Print out a summary of performace data gathered from instrumenting ISPC.
/// Must enable instrumenting to have this record and print data, see
/// `Config::instrument`.
pub fn print_instrumenting_summary() {
get_instrument().print_summary();
}
fn get_instrument() -> &'static Instrument {
// TODO: This is a bit nasty, like above
INSTRUMENT_INIT.call_once(|| {
unsafe {
let instrument = Arc::new(SimpleInstrument) as Arc<Instrument>;
let s: *const Instrument = mem::transmute(&*instrument);
mem::forget(instrument);
INSTRUMENT = Some(&*s);
}
});
unsafe { INSTRUMENT.unwrap() }
}
#[allow(non_snake_case)]
#[doc(hidden)]
#[no_mangle]
pub unsafe extern "C" fn ISPCAlloc(handle_ptr: *mut *mut libc::c_void, size: libc::int64_t,
align: libc::int32_t) -> *mut libc::c_void {
get_task_system().alloc(handle_ptr, size, align)
}
#[allow(non_snake_case)]
#[doc(hidden)]
#[no_mangle]
pub unsafe extern "C" fn ISPCLaunch(handle_ptr: *mut *mut libc::c_void, f: *mut libc::c_void,
data: *mut libc::c_void, count0: libc::c_int,
count1: libc::c_int, count2: libc::c_int) {
let task_fn: ISPCTaskFn = mem::transmute(f);
get_task_system().launch(handle_ptr, task_fn, data, count0 as i32, count1 as i32, count2 as i32);
}
#[allow(non_snake_case)]
#[doc(hidden)]
#[no_mangle]
pub unsafe extern "C" fn ISPCSync(handle: *mut libc::c_void){
get_task_system().sync(handle);
}
#[allow(non_snake_case)]
#[doc(hidden)]
#[no_mangle]
pub unsafe extern "C" fn ISPCInstrument(cfile: *const libc::c_char, cnote: *const libc::c_char,
line: libc::c_int, mask: libc::uint64_t) {
let file_name = CStr::from_ptr(cfile);
let note = CStr::from_ptr(cnote);
let active_count = (mask as u64).count_ones();
get_instrument().instrument(file_name, note, line as i32, mask as u64, active_count);
}
|
mod char_stream;
mod token_buffer;
//https://tools.ietf.org/html/rfc2396#appendix-A
use common_failures::prelude::*;
use std::fmt::Write;
use uri::char_stream::Char;
use uri::token_buffer::TokenStream;
use uri::token_buffer::*;
fn uri<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Option<Uri>>
where
T: TokenStream<Char>,
{
let u: Option<Uri>;
if let Some(au) = absolute_uri(tb)? {
let f: Option<String>;
if let Some(t) = tb.pop()? {
if t.is(b'#') {
f = Some(fragment(tb)?.to_string());
} else {
tb.push(t);
f = None;
}
} else {
f = None;
}
u = Some(Uri::AbsoluteUri(au, f));
} else if let Some(ru) = relative_uri(tb)? {
let f: Option<String>;
if let Some(t) = tb.pop()? {
if t.is(b'#') {
f = Some(fragment(tb)?.to_string());
} else {
tb.push(t);
f = None;
}
} else {
f = None;
}
u = Some(Uri::RelativeUri(ru, f));
} else {
u = None;
}
Ok(u)
}
#[derive(Debug, PartialEq)]
enum Uri {
AbsoluteUri(AbsoluteUri, Option<String>),
RelativeUri(RelativeUri, Option<String>),
}
impl Uri {
pub fn is_absolute(&self) -> bool {
match self {
Uri::AbsoluteUri(_, _) => true,
_ => false,
}
}
pub fn is_opaque(&self) -> bool {
match self {
Uri::AbsoluteUri(uri, _) => match &uri.1 {
HierOrOpaquePart::OpaquePart(_) => true,
_ => false,
},
_ => false,
}
}
pub fn is_relative(&self) -> bool {
match self {
Uri::RelativeUri(_, _) => true,
_ => false,
}
}
pub fn scheme(&self) -> Option<&Scheme> {
match self {
Uri::AbsoluteUri(uri, _) => Some(&uri.0),
_ => None,
}
}
fn net_path(&self) -> Option<&NetPath> {
match self {
Uri::AbsoluteUri(uri, _) => match &uri.1 {
HierOrOpaquePart::HierPart(hier_part) => match &hier_part.0 {
HierPartPath::NetPath(path) => Some(&path),
_ => None,
},
_ => None,
},
Uri::RelativeUri(uri, _) => match &uri.0 {
RelativeUriPath::NetPath(path) => Some(&path),
_ => None,
},
}
}
pub fn userinfo(&self) -> Option<&String> {
match self.net_path() {
Some(net_path) => match &net_path.authority {
Authority::Server(server) => match &server.0 {
Some(userinfo) => Some(&userinfo),
_ => None,
},
_ => None,
},
None => None,
}
}
pub fn host(&self) -> Option<&Host> {
match self.net_path() {
Some(net_path) => match &net_path.authority {
Authority::Server(server) => {
let hostport = &server.1;
Some(&hostport.0)
}
_ => None,
},
None => None,
}
}
pub fn port(&self) -> Option<u16> {
match self.net_path() {
Some(net_path) => match &net_path.authority {
Authority::Server(server) => {
let hostport = &server.1;
match &hostport.1 {
Some(port) => {
let port_string = port.to_string();
match port_string.is_empty() {
true => None,
false => Some(port_string.parse::<u16>().unwrap()),
}
}
None => None,
}
}
_ => None,
},
None => None,
}
}
pub fn path(&self) -> Option<&String> {
match self {
Uri::AbsoluteUri(uri, _) => match &uri.1 {
HierOrOpaquePart::HierPart(hier_part) => match &hier_part.0 {
HierPartPath::NetPath(net_path) => match &net_path.abs_path {
Some(abs_path) => Some(&abs_path),
_ => None,
},
_ => None,
},
_ => None,
},
Uri::RelativeUri(uri, _) => match &uri.0 {
RelativeUriPath::NetPath(net_path) => match &net_path.abs_path {
Some(abs_path) => Some(&abs_path),
_ => None,
},
RelativeUriPath::AbsPath(abs_path) => Some(&abs_path),
RelativeUriPath::RelPath(rel_path) => Some(&rel_path),
},
}
}
pub fn query(&self) -> Option<&String> {
match self {
Uri::AbsoluteUri(uri, _) => match &uri.1 {
HierOrOpaquePart::HierPart(hier_part) => match &hier_part.1 {
Some(query) => Some(&query),
_ => None,
},
_ => None,
},
Uri::RelativeUri(uri, _) => match &uri.1 {
Some(query) => Some(&query),
_ => None,
},
}
}
pub fn fragment(&self) -> Option<&String> {
match self {
Uri::AbsoluteUri(_, fragment) => match fragment {
Some(fragment) => Some(&fragment),
_ => None,
},
Uri::RelativeUri(_, fragment) => match fragment {
Some(fragment) => Some(&fragment),
_ => None,
},
}
}
pub fn opaque_part(&self) -> Option<&String> {
match self {
Uri::AbsoluteUri(uri, _) => match &uri.1 {
HierOrOpaquePart::OpaquePart(opaque_part) => Some(&opaque_part),
_ => None,
},
_ => None,
}
}
}
impl std::fmt::Display for Uri {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Uri::AbsoluteUri(u, f) => {
fmt.write_str(u.to_string().as_str())?;
if let Some(f) = f {
fmt.write_char('#')?;
fmt.write_str(f.to_string().as_str())?;
}
}
Uri::RelativeUri(u, f) => {
fmt.write_str(u.to_string().as_str())?;
if let Some(f) = f {
fmt.write_char('#')?;
fmt.write_str(f.to_string().as_str())?;
}
}
};
Ok(())
}
}
fn absolute_uri<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Option<AbsoluteUri>>
where
T: TokenStream<Char>,
{
let au: Option<AbsoluteUri>;
if let Some(s) = scheme(tb)? {
if let Some(t) = tb.pop()? {
if t.is(b':') {
let hop: Option<HierOrOpaquePart>;
if let Some(hp) = hier_part(tb)? {
hop = Some(HierOrOpaquePart::HierPart(hp));
} else if let Some(op) = opaque_part(tb)? {
hop = Some(HierOrOpaquePart::OpaquePart(op.to_string()));
} else {
hop = None;
}
if let Some(hop) = hop {
au = Some(AbsoluteUri(s, hop));
} else {
tb.push(t);
tb.push_tokens(s.0);
au = None;
}
} else {
tb.push(t);
tb.push_tokens(s.0);
au = None;
}
} else {
tb.push_tokens(s.0);
au = None;
}
} else {
au = None;
}
Ok(au)
}
#[derive(Debug, PartialEq)]
struct AbsoluteUri(Scheme, HierOrOpaquePart);
impl std::fmt::Display for AbsoluteUri {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
fmt.write_str(self.0.to_string().as_str())?;
fmt.write_char(':')?;
fmt.write_str(self.1.to_string().as_str())?;
Ok(())
}
}
#[derive(Debug, PartialEq)]
enum HierOrOpaquePart {
HierPart(HierPart),
OpaquePart(String),
}
impl std::fmt::Display for HierOrOpaquePart {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
HierOrOpaquePart::HierPart(o) => fmt.write_str(o.to_string().as_str()),
HierOrOpaquePart::OpaquePart(o) => fmt.write_str(o.to_string().as_str()),
}
}
}
fn relative_uri<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Option<RelativeUri>>
where
T: TokenStream<Char>,
{
let rup: Option<RelativeUriPath>;
if let Some(np) = net_path(tb)? {
rup = Some(RelativeUriPath::NetPath(np));
} else if let Some(ap) = abs_path(tb)? {
rup = Some(RelativeUriPath::AbsPath(ap.to_string()));
} else if let Some(rp) = rel_path(tb)? {
rup = Some(RelativeUriPath::RelPath(rp.to_string()));
} else {
rup = None;
}
let ru: Option<RelativeUri>;
if let Some(rup) = rup {
let q: Option<String>;
if let Some(t) = tb.pop()? {
if t.is(b'?') {
q = Some(query(tb)?.to_string());
} else {
tb.push(t);
q = None;
}
} else {
q = None;
}
ru = Some(RelativeUri(rup, q));
} else {
ru = None;
}
Ok(ru)
}
#[derive(Debug, PartialEq)]
struct RelativeUri(RelativeUriPath, Option<String>);
impl std::fmt::Display for RelativeUri {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
fmt.write_str(&self.0.to_string().as_str())?;
if let Some(q) = &self.1 {
fmt.write_char('?')?;
fmt.write_str(q.to_string().as_str())?;
}
Ok(())
}
}
#[derive(Debug, PartialEq)]
enum RelativeUriPath {
NetPath(NetPath),
AbsPath(String),
RelPath(String),
}
impl std::fmt::Display for RelativeUriPath {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
RelativeUriPath::NetPath(o) => fmt.write_str(o.to_string().as_str()),
RelativeUriPath::AbsPath(o) => fmt.write_str(o.to_string().as_str()),
RelativeUriPath::RelPath(o) => fmt.write_str(o.to_string().as_str()),
}
}
}
fn hier_part<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Option<HierPart>>
where
T: TokenStream<Char>,
{
let hpp: Option<HierPartPath>;
if let Some(np) = net_path(tb)? {
hpp = Some(HierPartPath::NetPath(np));
} else if let Some(ap) = abs_path(tb)? {
hpp = Some(HierPartPath::AbsPath(ap));
} else {
hpp = None;
}
let hp: Option<HierPart>;
if let Some(hpp) = hpp {
let q: Option<String>;
if let Some(t) = tb.pop()? {
if t.is(b'?') {
q = Some(query(tb)?.to_string());
} else {
tb.push(t);
q = None;
}
} else {
q = None;
}
hp = Some(HierPart(hpp, q));
} else {
hp = None;
}
Ok(hp)
}
#[derive(Debug, PartialEq)]
struct HierPart(HierPartPath, Option<String>);
impl std::fmt::Display for HierPart {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
fmt.write_str(&self.0.to_string().as_str())?;
if let Some(q) = &self.1 {
fmt.write_char('?')?;
fmt.write_str(q.to_string().as_str())?;
}
Ok(())
}
}
#[derive(Debug, PartialEq)]
enum HierPartPath {
NetPath(NetPath),
AbsPath(AbsPath),
}
impl std::fmt::Display for HierPartPath {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
HierPartPath::NetPath(o) => fmt.write_str(o.to_string().as_str()),
HierPartPath::AbsPath(o) => fmt.write_str(o.to_string().as_str()),
}
}
}
fn path<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Option<Path>>
where
T: TokenStream<Char>,
{
let p: Option<Path>;
if let Some(ap) = abs_path(tb)? {
p = Some(Path::AbsPath(ap));
} else if let Some(op) = opaque_part(tb)? {
p = Some(Path::OpaquePart(op));
} else {
p = None;
}
Ok(p)
}
#[derive(Debug, PartialEq)]
enum Path {
AbsPath(AbsPath),
OpaquePart(OpaquePart),
}
impl std::fmt::Display for Path {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Path::AbsPath(o) => fmt.write_str(o.to_string().as_str()),
Path::OpaquePart(o) => fmt.write_str(o.to_string().as_str()),
}
}
}
fn rel_path<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Option<RelPath>>
where
T: TokenStream<Char>,
{
let rp: Option<RelPath>;
if let Some(rs) = rel_segment(tb)? {
let ap = abs_path(tb)?;
rp = Some(RelPath(rs, ap));
} else {
rp = None;
}
Ok(rp)
}
#[derive(Debug, PartialEq)]
struct RelPath(RelSegment, Option<AbsPath>);
impl std::fmt::Display for RelPath {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
fmt.write_str(&self.0.to_string().as_str())?;
if let Some(ap) = &self.1 {
fmt.write_str(ap.to_string().as_str())?;
}
Ok(())
}
}
fn net_path<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Option<NetPath>>
where
T: TokenStream<Char>,
{
let np: Option<NetPath>;
if let Some(t1) = tb.pop()? {
if t1.is(b'/') {
if let Some(t2) = tb.pop()? {
if t2.is(b'/') {
if let Some(a) = authority(tb)? {
let ap = match abs_path(tb)? {
Some(ap) => Some(ap.to_string()),
None => None,
};
np = Some(NetPath {
authority: a,
abs_path: ap,
});
} else {
tb.push(t2);
tb.push(t1);
np = None;
}
} else {
tb.push(t2);
tb.push(t1);
np = None;
}
} else {
tb.push(t1);
np = None;
}
} else {
tb.push(t1);
np = None;
}
} else {
np = None;
}
Ok(np)
}
#[derive(Debug, PartialEq)]
struct NetPath {
authority: Authority,
abs_path: Option<String>,
}
impl std::fmt::Display for NetPath {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
fmt.write_char('/')?;
fmt.write_char('/')?;
fmt.write_str(&self.authority.to_string().as_str())?;
if let Some(ap) = &self.abs_path {
fmt.write_str(ap.to_string().as_str())?;
}
Ok(())
}
}
fn abs_path<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Option<AbsPath>>
where
T: TokenStream<Char>,
{
let mut ap: Option<AbsPath> = None;
if let Some(t) = tb.pop()? {
if t.is(b'/') {
if let Some(ps) = path_segments(tb)? {
ap = Some(AbsPath(ps));
} else {
tb.push(t);
}
} else {
tb.push(t);
}
}
Ok(ap)
}
#[derive(Debug, PartialEq)]
struct AbsPath(PathSegments);
impl std::fmt::Display for AbsPath {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
fmt.write_char('/')?;
fmt.write_str(&self.0.to_string().as_str())?;
Ok(())
}
}
fn opaque_part<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Option<OpaquePart>>
where
T: TokenStream<Char>,
{
let mut tokens: Vec<Char> = Vec::new();
if let Some(t) = tb.pop()? {
if t.is_uric_no_slash() {
tokens.push(t);
} else {
tb.push(t);
}
}
if !tokens.is_empty() {
loop {
if let Some(t) = tb.pop()? {
if t.is_uric() {
tokens.push(t);
continue;
}
tb.push(t);
}
break;
}
}
match tokens.is_empty() {
true => Ok(None),
false => Ok(Some(OpaquePart(tokens))),
}
}
#[derive(Debug, PartialEq)]
struct OpaquePart(Vec<Char>);
impl std::fmt::Display for OpaquePart {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
for c in &self.0 {
fmt.write_str(c.to_string().as_str())?;
}
Ok(())
}
}
fn rel_segment<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Option<RelSegment>>
where
T: TokenStream<Char>,
{
let mut tokens: Vec<Char> = Vec::new();
loop {
if let Some(t) = tb.pop()? {
if t.is_unreserved() {
tokens.push(t);
continue;
}
if t.is_escaped() {
tokens.push(t);
continue;
}
if let Char::Ascii(b) = t {
match b {
b';' | b'@' | b'&' | b'=' | b'+' | b'$' | b',' => {
tokens.push(t);
continue;
}
_ => (),
};
}
tb.push(t);
}
break;
}
match tokens.is_empty() {
true => Ok(None),
false => Ok(Some(RelSegment(tokens))),
}
}
#[derive(Debug, PartialEq)]
struct RelSegment(Vec<Char>);
impl std::fmt::Display for RelSegment {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
for c in &self.0 {
fmt.write_str(c.to_string().as_str())?;
}
Ok(())
}
}
fn scheme<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Option<Scheme>>
where
T: TokenStream<Char>,
{
let mut tokens: Vec<Char> = Vec::new();
if let Some(t) = tb.pop()? {
if t.is_alpha() {
tokens.push(t);
} else {
tb.push(t);
}
}
if !tokens.is_empty() {
loop {
if let Some(t) = tb.pop()? {
if t.is_alpha() || t.is_digit() || t.is(b'+') || t.is(b'-') || t.is(b'.') {
tokens.push(t);
continue;
}
tb.push(t);
}
break;
}
}
match tokens.is_empty() {
true => Ok(None),
false => Ok(Some(Scheme(tokens))),
}
}
#[derive(Debug, PartialEq)]
struct Scheme(Vec<Char>);
impl std::fmt::Display for Scheme {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
for c in &self.0 {
fmt.write_str(c.to_string().as_str())?;
}
Ok(())
}
}
fn authority<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Option<Authority>>
where
T: TokenStream<Char>,
{
let a: Option<Authority>;
if let Some(s) = server(tb)? {
a = Some(Authority::Server(s));
} else if let Some(r) = reg_name(tb)? {
a = Some(Authority::RegName(r));
} else {
a = None;
}
Ok(a)
}
#[derive(Debug, PartialEq)]
enum Authority {
Server(Server),
RegName(RegName),
}
impl std::fmt::Display for Authority {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Authority::Server(o) => fmt.write_str(o.to_string().as_str()),
Authority::RegName(o) => fmt.write_str(o.to_string().as_str()),
}
}
}
fn reg_name<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Option<RegName>>
where
T: TokenStream<Char>,
{
let mut tokens: Vec<Char> = Vec::new();
loop {
if let Some(t) = tb.pop()? {
if t.is_unreserved() {
tokens.push(t);
continue;
}
if t.is_escaped() {
tokens.push(t);
continue;
}
if let Char::Ascii(b) = t {
match b {
b'$' | b',' | b';' | b':' | b'@' | b'&' | b'=' | b'+' => {
tokens.push(t);
continue;
}
_ => (),
};
}
tb.push(t);
}
break;
}
match tokens.is_empty() {
true => Ok(None),
false => Ok(Some(RegName(tokens))),
}
}
#[derive(Debug, PartialEq)]
struct RegName(Vec<Char>);
impl std::fmt::Display for RegName {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
for c in &self.0 {
fmt.write_str(c.to_string().as_str())?;
}
Ok(())
}
}
fn server<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Option<Server>>
where
T: TokenStream<Char>,
{
let mut ui = userinfo(tb)?;
let ui = match tb.pop()? {
Some(t) => match t.is(b'@') {
true => Some(ui),
false => {
tb.push(t);
tb.push_tokens(ui.0);
None
}
},
None => {
tb.push_tokens(ui.0);
None
}
};
let s = match hostport(tb)? {
Some(hp) => {
let ui = match ui {
Some(ui) => Some(ui.to_string()),
None => None,
};
Some(Server(ui, hp))
}
None => match ui {
Some(ui) => {
tb.push(Char::Ascii(b'@'));
tb.push_tokens(ui.0);
None
}
None => None,
},
};
Ok(s)
}
#[derive(Debug, PartialEq)]
struct Server(Option<String>, Hostport);
impl std::fmt::Display for Server {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
if let Some(u) = &self.0 {
fmt.write_str(u.to_string().as_str())?;
fmt.write_char('@')?;
}
fmt.write_str(self.1.to_string().as_str())?;
Ok(())
}
}
fn userinfo<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Userinfo>
where
T: TokenStream<Char>,
{
let mut tokens: Vec<Char> = Vec::new();
loop {
if let Some(t) = tb.pop()? {
let is_match = match t {
Char::Escaped(_) => true,
Char::Ascii(b) => {
t.is_unreserved() || match b {
b';' => true,
b':' => true,
b'&' => true,
b'=' => true,
b'+' => true,
b'$' => true,
b',' => true,
_ => false,
}
}
};
if is_match {
tokens.push(t);
continue;
} else {
tb.push(t);
}
}
break;
}
Ok(Userinfo(tokens))
}
#[derive(Debug, PartialEq)]
struct Userinfo(Vec<Char>);
impl std::fmt::Display for Userinfo {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
for c in &self.0 {
fmt.write_str(c.to_string().as_str())?;
}
Ok(())
}
}
fn hostport<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Option<Hostport>>
where
T: TokenStream<Char>,
{
let mut hn: Option<Hostport> = None;
if let Some(ho) = host(tb)? {
let mut po: Option<Port> = None;
if let Some(c) = tb.pop()? {
if c.is(b':') {
po = Some(port(tb)?);
} else {
tb.push(c);
}
}
hn = Some(Hostport(ho, po));
}
Ok(hn)
}
#[derive(Debug, PartialEq)]
struct Hostport(Host, Option<Port>);
impl std::fmt::Display for Hostport {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
fmt.write_str(&self.0.to_string().as_str())?;
if let Some(po) = &self.1 {
fmt.write_char(':')?;
fmt.write_str(po.to_string().as_str())?;
}
Ok(())
}
}
fn host<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Option<Host>>
where
T: TokenStream<Char>,
{
let host: Option<Host>;
if let Some(hn) = hostname(tb)? {
host = Some(Host::Hostname(hn));
} else if let Some(ia) = ipv4_address(tb)? {
host = Some(Host::IPv4address(ia));
} else {
host = None;
}
Ok(host)
}
#[derive(Debug, PartialEq)]
enum Host {
Hostname(Hostname),
IPv4address(IPv4address),
}
impl std::fmt::Display for Host {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Host::Hostname(hn) => fmt.write_str(hn.to_string().as_str()),
Host::IPv4address(ip) => fmt.write_str(ip.to_string().as_str()),
}
}
}
fn hostname<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Option<Hostname>>
where
T: TokenStream<Char>,
{
let mut domainlabels: Vec<Domainlabel> = Vec::new();
loop {
if let Some(dl) = domainlabel(tb)? {
if let Some(c) = tb.pop()? {
if c.is(b'.') {
domainlabels.push(dl);
} else {
tb.push(c);
tb.push_tokens(dl.0);
break;
}
} else {
tb.push_tokens(dl.0);
break;
}
} else {
break;
}
}
let mut tl: Option<Toplabel> = toplabel(tb)?;
let mut dot: Option<Char> = None;
if let Some(_) = tl {
if let Some(c) = tb.pop()? {
if c.is(b'.') {
dot = Some(c);
} else {
tb.push(c);
}
}
} else if !domainlabels.is_empty() {
let last_idx = domainlabels.len() - 1;
let dl = domainlabels.remove(last_idx);
tb.push(Char::Ascii(b'.'));
tb.push_tokens(dl.0);
tl = toplabel(tb)?;
if tl.is_some() {
dot = tb.pop()?
} else {
for _ in 0..domainlabels.len() {
tb.push(Char::Ascii(b'.'));
let last_idx = domainlabels.len() - 1;
let dl = domainlabels.remove(last_idx);
tb.push_tokens(dl.0);
}
}
}
match tl {
Some(tl) => Ok(Some(Hostname(domainlabels, tl, dot))),
None => Ok(None),
}
}
#[derive(Debug, PartialEq)]
struct Hostname(Vec<Domainlabel>, Toplabel, Option<Char>);
impl std::fmt::Display for Hostname {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
for dl in &self.0 {
fmt.write_str(dl.to_string().as_str())?;
fmt.write_char('.')?;
}
fmt.write_str(&self.1.to_string().as_str())?;
if let Some(_) = &self.2 {
fmt.write_char('.')?;
}
Ok(())
}
}
fn domainlabel<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Option<Domainlabel>>
where
T: TokenStream<Char>,
{
let mut tokens: Vec<Char> = Vec::new();
if let Some(c) = tb.pop()? {
if c.is_alphanum() {
tokens.push(c);
loop {
if let Some(c) = tb.pop()? {
if c.is_alphanum() || c.is(b'-') {
tokens.push(c);
} else {
tb.push(c);
break;
}
} else {
break;
}
}
let last_is_alphanum = tokens.last().unwrap().is_alphanum();
if !last_is_alphanum {
tb.push_tokens(tokens);
tokens = Vec::new();
}
} else {
tb.push(c);
}
}
match tokens.is_empty() {
true => Ok(None),
false => Ok(Some(Domainlabel(tokens))),
}
}
#[derive(Debug, PartialEq)]
struct Domainlabel(Vec<Char>);
impl std::fmt::Display for Domainlabel {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
for c in &self.0 {
fmt.write_str(c.to_string().as_str())?;
}
Ok(())
}
}
fn toplabel<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Option<Toplabel>>
where
T: TokenStream<Char>,
{
let mut tokens: Vec<Char> = Vec::new();
if let Some(c) = tb.pop()? {
if c.is_alpha() {
tokens.push(c);
loop {
if let Some(c) = tb.pop()? {
if c.is_alphanum() || c.is(b'-') {
tokens.push(c);
} else {
tb.push(c);
break;
}
} else {
break;
}
}
let last_is_alphanum = tokens.last().unwrap().is_alphanum();
if !last_is_alphanum {
tb.push_tokens(tokens);
tokens = Vec::new();
}
} else {
tb.push(c);
}
}
let tokens_len = tokens.len();
match tokens.is_empty() {
true => Ok(None),
false => Ok(Some(Toplabel(tokens))),
}
}
#[derive(Debug, PartialEq)]
struct Toplabel(Vec<Char>);
impl std::fmt::Display for Toplabel {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
for c in &self.0 {
fmt.write_str(c.to_string().as_str())?;
}
Ok(())
}
}
fn ipv4_address<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Option<IPv4address>>
where
T: TokenStream<Char>,
{
let d1 = digits(tb)?;
if d1.is_empty() {
return Ok(None);
};
let dot1 = match tb.pop()? {
Some(c) => {
if c.is(b'.') {
Some(c)
} else {
tb.push(c);
None
}
}
None => None,
};
let dot1 = match dot1 {
Some(c) => c,
None => {
tb.push_tokens(d1);
return Ok(None);
}
};
let d2 = digits(tb)?;
if d2.is_empty() {
tb.push(dot1);
tb.push_tokens(d1);
return Ok(None);
};
let dot2 = match tb.pop()? {
Some(c) => {
if c.is(b'.') {
Some(c)
} else {
tb.push(c);
None
}
}
None => None,
};
let dot2 = match dot2 {
Some(c) => c,
None => {
tb.push_tokens(d2);
tb.push(dot1);
tb.push_tokens(d1);
return Ok(None);
}
};
let d3 = digits(tb)?;
if d3.is_empty() {
tb.push(dot2);
tb.push_tokens(d2);
tb.push(dot1);
tb.push_tokens(d1);
return Ok(None);
};
let dot3 = match tb.pop()? {
Some(c) => {
if c.is(b'.') {
Some(c)
} else {
tb.push(c);
None
}
}
None => None,
};
let dot3 = match dot3 {
Some(c) => c,
None => {
tb.push_tokens(d3);
tb.push(dot2);
tb.push_tokens(d2);
tb.push(dot1);
tb.push_tokens(d1);
return Ok(None);
}
};
let d4 = digits(tb)?;
if d4.is_empty() {
tb.push(dot3);
tb.push_tokens(d3);
tb.push(dot2);
tb.push_tokens(d2);
tb.push(dot1);
tb.push_tokens(d1);
return Ok(None);
};
Ok(Some(IPv4address(d1, d2, d3, d4)))
}
#[derive(Debug, PartialEq)]
struct IPv4address(Vec<Char>, Vec<Char>, Vec<Char>, Vec<Char>);
impl std::fmt::Display for IPv4address {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
for c in &self.0 {
fmt.write_str(c.to_string().as_str())?;
}
fmt.write_char('.')?;
for c in &self.1 {
fmt.write_str(c.to_string().as_str())?;
}
fmt.write_char('.')?;
for c in &self.2 {
fmt.write_str(c.to_string().as_str())?;
}
fmt.write_char('.')?;
for c in &self.3 {
fmt.write_str(c.to_string().as_str())?;
}
Ok(())
}
}
#[derive(Debug, PartialEq)]
struct Port(Vec<Char>);
impl std::fmt::Display for Port {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
for d in &self.0 {
fmt.write_str(d.to_string().as_str())?;
}
Ok(())
}
}
fn port<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Port>
where
T: TokenStream<Char>,
{
let d = digits(tb)?;
Ok(Port(d))
}
fn digits<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Vec<Char>>
where
T: TokenStream<Char>,
{
let mut digits: Vec<Char> = Vec::new();
loop {
if let Some(c) = tb.pop()? {
if c.is_digit() {
digits.push(c);
continue;
} else {
tb.push(c);
}
}
break;
}
Ok(digits)
}
#[derive(Debug, PartialEq)]
struct PathSegments {
segments: Vec<Segment>,
}
impl std::fmt::Display for PathSegments {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
for (i, s) in self.segments.iter().enumerate() {
if i > 0 {
fmt.write_char('/')?;
}
fmt.write_str(s.to_string().as_str())?;
}
Ok(())
}
}
fn path_segments<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Option<PathSegments>>
where
T: TokenStream<Char>,
{
let mut segments: Vec<Segment> = Vec::new();
match segment(tb)? {
Some(segment) => segments.push(segment),
None => (),
};
if !segments.is_empty() {
loop {
if let Some(c) = tb.pop()? {
if c.is(b'/') {
match segment(tb)? {
Some(segment) => {
segments.push(segment);
continue;
}
None => {
segments.push(Segment::new());
break;
}
}
} else {
tb.push(c);
}
}
break;
}
}
match segments.is_empty() {
true => Ok(None),
false => Ok(Some(PathSegments { segments })),
}
}
#[derive(Debug, PartialEq, Default)]
struct Segment {
pchars: Vec<Char>,
params: Option<Vec<Param>>,
}
impl Segment {
fn new() -> Segment {
Segment {
..Default::default()
}
}
}
impl std::fmt::Display for Segment {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
for c in &self.pchars {
fmt.write_str(c.to_string().as_str())?;
}
if let Some(params) = &self.params {
for p in params {
fmt.write_char(';')?;
fmt.write_str(p.to_string().as_str())?;
}
}
Ok(())
}
}
fn segment<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Option<Segment>>
where
T: TokenStream<Char>,
{
let mut s = match pchars(tb)? {
Some(pchars) => Some(Segment {
pchars: pchars,
params: None,
}),
None => None,
};
if let Some(s) = &mut s {
let mut params: Vec<Param> = Vec::new();
loop {
if let Some(c) = tb.pop()? {
if c.is(b';') {
match param(tb)? {
Some(p) => {
params.push(p);
continue;
}
None => {
params.push(Param { pchars: Vec::new() });
break;
}
}
} else {
tb.push(c);
}
}
break;
}
if !params.is_empty() {
s.params = Some(params);
}
}
Ok(s)
}
#[derive(Debug, PartialEq)]
struct Param {
pchars: Vec<Char>,
}
impl std::fmt::Display for Param {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
for c in &self.pchars {
fmt.write_str(c.to_string().as_str())?;
}
Ok(())
}
}
fn param<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Option<Param>>
where
T: TokenStream<Char>,
{
let p = match pchars(tb)? {
Some(pchars) => Some(Param { pchars }),
None => None,
};
Ok(p)
}
fn pchars<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Option<Vec<Char>>>
where
T: TokenStream<Char>,
{
let mut pchars: Vec<Char> = Vec::new();
loop {
if let Some(c) = tb.pop()? {
if c.is_pchar() {
pchars.push(c);
} else {
tb.push(c);
break;
}
} else {
break;
}
}
match pchars.len() {
0 => Ok(None),
_ => Ok(Some(pchars)),
}
}
fn query<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Query>
where
T: TokenStream<Char>,
{
Ok(Query(fragment(tb)?.0))
}
#[derive(Debug, PartialEq)]
struct Query(Vec<Char>);
impl std::fmt::Display for Query {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
for c in &self.0 {
fmt.write_str(c.to_string().as_str())?;
}
Ok(())
}
}
fn fragment<T>(tb: &mut TokenBuffer<Char, T>) -> Result<Fragment>
where
T: TokenStream<Char>,
{
let mut tokens: Vec<Char> = Vec::new();
loop {
if let Some(t) = tb.pop()? {
if t.is_uric() {
tokens.push(t);
} else {
tb.push(t);
break;
}
} else {
break;
}
}
Ok(Fragment(tokens))
}
#[derive(Debug, PartialEq)]
struct Fragment(Vec<Char>);
impl std::fmt::Display for Fragment {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
for c in &self.0 {
fmt.write_str(c.to_string().as_str())?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_absolute_uri() {
let uri_str = "http://user:pwd@www.sourcepit.org:123/foo/bar.html?query=true#fragment";
let mut tb = TokenBuffer::from(uri_str.as_bytes());
assert_eq!(0, tb.buffer.len());
let u = uri(&mut tb).unwrap();
assert!(u.is_some());
let u = u.unwrap();
assert_eq!(true, u.is_absolute());
assert_eq!(false, u.is_opaque());
assert_eq!(false, u.is_relative());
let scheme = u.scheme();
assert!(scheme.is_some());
let scheme = scheme.unwrap();
assert_eq!("http", scheme.to_string());
let userinfo = u.userinfo();
assert!(userinfo.is_some());
let userinfo = userinfo.unwrap();
assert_eq!("user:pwd", userinfo);
let host = u.host();
assert!(host.is_some());
let host = host.unwrap();
assert_eq!("www.sourcepit.org", host.to_string());
let port = u.port();
assert!(port.is_some());
let port = port.unwrap();
assert_eq!(123, port);
let path = u.path();
assert!(path.is_some());
let path = path.unwrap();
assert_eq!("/foo/bar.html", path);
let query = u.query();
assert!(query.is_some());
let query = query.unwrap();
assert_eq!("query=true", query);
let fragment = u.fragment();
assert!(fragment.is_some());
let fragment = fragment.unwrap();
assert_eq!("fragment", fragment);
assert_eq!(uri_str, u.to_string());
}
#[test]
fn test_opaque_uri() {
let uri_str = "mailto:a@b.com#fragment";
let mut tb = TokenBuffer::from(uri_str.as_bytes());
assert_eq!(0, tb.buffer.len());
let u = uri(&mut tb).unwrap();
assert!(u.is_some());
let u = u.unwrap();
assert_eq!(true, u.is_absolute());
assert_eq!(true, u.is_opaque());
assert_eq!(false, u.is_relative());
let scheme = u.scheme();
assert!(scheme.is_some());
let scheme = scheme.unwrap();
assert_eq!("mailto", scheme.to_string());
let opaque_part = u.opaque_part();
assert!(opaque_part.is_some());
let opaque_part = opaque_part.unwrap();
assert_eq!("a@b.com", opaque_part.to_string());
let userinfo = u.userinfo();
assert!(userinfo.is_none());
let host = u.host();
assert!(host.is_none());
let path = u.path();
assert!(path.is_none());
let query = u.query();
assert!(query.is_none());
let fragment = u.fragment();
assert!(fragment.is_some());
let fragment = fragment.unwrap();
assert_eq!("fragment", fragment);
assert_eq!(uri_str, u.to_string());
}
#[test]
fn test_relative_uri_with_net_path() {
let uri_str = "//user:pwd@www.sourcepit.org:123/foo/bar.html?query=true#fragment";
let mut tb = TokenBuffer::from(uri_str.as_bytes());
assert_eq!(0, tb.buffer.len());
let u = uri(&mut tb).unwrap();
assert!(u.is_some());
let u = u.unwrap();
assert_eq!(false, u.is_absolute());
assert_eq!(false, u.is_opaque());
assert_eq!(true, u.is_relative());
let scheme = u.scheme();
assert!(scheme.is_none());
let userinfo = u.userinfo();
assert!(userinfo.is_some());
let userinfo = userinfo.unwrap();
assert_eq!("user:pwd", userinfo);
let host = u.host();
assert!(host.is_some());
let host = host.unwrap();
assert_eq!("www.sourcepit.org", host.to_string());
let port = u.port();
assert!(port.is_some());
let port = port.unwrap();
assert_eq!(123, port);
let path = u.path();
assert!(path.is_some());
let path = path.unwrap();
assert_eq!("/foo/bar.html", path);
let query = u.query();
assert!(query.is_some());
let query = query.unwrap();
assert_eq!("query=true", query);
let fragment = u.fragment();
assert!(fragment.is_some());
let fragment = fragment.unwrap();
assert_eq!("fragment", fragment);
assert_eq!(uri_str, u.to_string());
}
#[test]
fn test_relative_uri_with_abs_path() {
let uri_str = "/foo/bar.html?query=true#fragment";
let mut tb = TokenBuffer::from(uri_str.as_bytes());
assert_eq!(0, tb.buffer.len());
let u = uri(&mut tb).unwrap();
assert!(u.is_some());
let u = u.unwrap();
assert_eq!(false, u.is_absolute());
assert_eq!(false, u.is_opaque());
assert_eq!(true, u.is_relative());
let scheme = u.scheme();
assert!(scheme.is_none());
let userinfo = u.userinfo();
assert!(userinfo.is_none());
let host = u.host();
assert!(host.is_none());
let path = u.path();
assert!(path.is_some());
let path = path.unwrap();
assert_eq!("/foo/bar.html", path);
let query = u.query();
assert!(query.is_some());
let query = query.unwrap();
assert_eq!("query=true", query);
let fragment = u.fragment();
assert!(fragment.is_some());
let fragment = fragment.unwrap();
assert_eq!("fragment", fragment);
assert_eq!(uri_str, u.to_string());
}
#[test]
fn test_relative_uri_with_rel_path() {
let uri_str = "foo/bar.html?query=true#fragment";
let mut tb = TokenBuffer::from(uri_str.as_bytes());
assert_eq!(0, tb.buffer.len());
let u = uri(&mut tb).unwrap();
assert!(u.is_some());
let u = u.unwrap();
assert_eq!(false, u.is_absolute());
assert_eq!(false, u.is_opaque());
assert_eq!(true, u.is_relative());
let scheme = u.scheme();
assert!(scheme.is_none());
let userinfo = u.userinfo();
assert!(userinfo.is_none());
let host = u.host();
assert!(host.is_none());
let path = u.path();
assert!(path.is_some());
let path = path.unwrap();
assert_eq!("foo/bar.html", path);
let query = u.query();
assert!(query.is_some());
let query = query.unwrap();
assert_eq!("query=true", query);
let fragment = u.fragment();
assert!(fragment.is_some());
let fragment = fragment.unwrap();
assert_eq!("fragment", fragment);
assert_eq!(uri_str, u.to_string());
}
#[test]
fn test_server() {
let mut tb = TokenBuffer::from("".as_bytes());
let s = server(&mut tb).unwrap();
assert_eq!(0, tb.buffer.len());
assert_eq!(None, s);
let mut tb = TokenBuffer::from("foo".as_bytes());
let s = server(&mut tb).unwrap().unwrap();
assert_eq!(0, tb.buffer.len());
assert_eq!(None, s.0);
assert_eq!("foo", s.1.to_string());
let mut tb = TokenBuffer::from("foo@bar".as_bytes());
let s = server(&mut tb).unwrap().unwrap();
assert_eq!(0, tb.buffer.len());
assert!(s.0.is_some());
assert_eq!("foo", s.0.unwrap().to_string());
assert_eq!("bar", s.1.to_string());
let mut tb = TokenBuffer::from("foo@".as_bytes());
let s = server(&mut tb).unwrap();
assert_eq!(4, tb.buffer.len());
assert_eq!(None, s);
}
#[test]
fn test_hostport() {
let mut tb = TokenBuffer::from("".as_bytes());
let hp = hostport(&mut tb).unwrap();
assert_eq!(0, tb.buffer.len());
assert_eq!(None, hp);
let mut tb = TokenBuffer::from("foo".as_bytes());
let hp = hostport(&mut tb).unwrap().unwrap();
assert_eq!(0, tb.buffer.len());
assert_eq!("foo", hp.0.to_string());
assert!(hp.1.is_none());
assert_eq!("foo", hp.to_string());
let mut tb = TokenBuffer::from("foo:".as_bytes());
let hp = hostport(&mut tb).unwrap().unwrap();
assert_eq!(0, tb.buffer.len());
assert_eq!("foo", hp.0.to_string());
assert!(hp.1.is_some());
assert_eq!("foo:", hp.to_string());
let mut tb = TokenBuffer::from("foo:123".as_bytes());
let hp = hostport(&mut tb).unwrap().unwrap();
assert_eq!(0, tb.buffer.len());
assert_eq!("foo", hp.0.to_string());
assert!(hp.1.is_some());
assert_eq!("foo:123", hp.to_string());
}
#[test]
fn test_host() {
let mut tb = TokenBuffer::from("".as_bytes());
let ho = host(&mut tb).unwrap();
assert_eq!(0, tb.buffer.len());
assert_eq!(None, ho);
let mut tb = TokenBuffer::from("1.2.3.4".as_bytes());
let ho = host(&mut tb).unwrap().unwrap();
assert_eq!(0, tb.buffer.len());
let is_ip = match ho {
Host::Hostname(_) => false,
Host::IPv4address(_) => true,
};
assert!(is_ip);
let mut tb = TokenBuffer::from("1.2.3.f".as_bytes());
let ho = host(&mut tb).unwrap().unwrap();
assert_eq!(0, tb.buffer.len());
let is_hostname = match ho {
Host::Hostname(_) => true,
Host::IPv4address(_) => false,
};
assert!(is_hostname);
}
#[test]
fn test_hostname() {
let mut tb = TokenBuffer::from("".as_bytes());
let hn = hostname(&mut tb).unwrap();
assert_eq!(0, tb.buffer.len());
assert_eq!(None, hn);
let mut tb = TokenBuffer::from("1.2.3.4".as_bytes());
let hn = hostname(&mut tb).unwrap();
assert_eq!(7, tb.buffer.len());
assert_eq!(None, hn);
let mut tb = TokenBuffer::from("foo".as_bytes());
let hn = hostname(&mut tb).unwrap().unwrap();
assert_eq!(0, tb.buffer.len());
assert!(hn.0.is_empty());
assert_eq!("foo", hn.1.to_string());
assert!(hn.2.is_none());
assert_eq!("foo", hn.to_string());
let mut tb = TokenBuffer::from("foo.".as_bytes());
let hn = hostname(&mut tb).unwrap().unwrap();
assert_eq!(0, tb.buffer.len());
assert!(hn.0.is_empty());
assert_eq!("foo", hn.1.to_string());
assert!(hn.2.is_some());
assert_eq!("foo.", hn.to_string());
let mut tb = TokenBuffer::from("123.foo.".as_bytes());
let hn = hostname(&mut tb).unwrap().unwrap();
assert_eq!(0, tb.buffer.len());
assert_eq!(1, hn.0.len());
assert_eq!("foo", hn.1.to_string());
assert!(hn.2.is_some());
assert_eq!("123.foo.", hn.to_string());
}
#[test]
fn test_domainlabel() {
let mut tb = TokenBuffer::from("".as_bytes());
let dl = domainlabel(&mut tb).unwrap();
assert_eq!(0, tb.buffer.len());
assert_eq!(None, dl);
let mut tb = TokenBuffer::from("1".as_bytes());
let dl = domainlabel(&mut tb).unwrap().unwrap();
assert_eq!(0, tb.buffer.len());
assert_eq!("1", dl.to_string());
let mut tb = TokenBuffer::from("f-".as_bytes());
let dl = domainlabel(&mut tb).unwrap();
assert_eq!(2, tb.buffer.len());
assert_eq!(None, dl);
let mut tb = TokenBuffer::from("fo".as_bytes());
let dl = domainlabel(&mut tb).unwrap().unwrap();
assert_eq!(0, tb.buffer.len());
assert_eq!("fo", dl.to_string());
let mut tb = TokenBuffer::from("f0".as_bytes());
let dl = domainlabel(&mut tb).unwrap().unwrap();
assert_eq!(0, tb.buffer.len());
assert_eq!("f0", dl.to_string());
let mut tb = TokenBuffer::from("f-0".as_bytes());
let dl = domainlabel(&mut tb).unwrap().unwrap();
assert_eq!(0, tb.buffer.len());
assert_eq!("f-0", dl.to_string());
}
#[test]
fn test_toplabel() {
let mut tb = TokenBuffer::from("".as_bytes());
let tl = toplabel(&mut tb).unwrap();
assert_eq!(0, tb.buffer.len());
assert_eq!(None, tl);
let mut tb = TokenBuffer::from("1".as_bytes());
let tl = toplabel(&mut tb).unwrap();
assert_eq!(1, tb.buffer.len());
assert_eq!(None, tl);
let mut tb = TokenBuffer::from("f-".as_bytes());
let tl = toplabel(&mut tb).unwrap();
assert_eq!(2, tb.buffer.len());
assert_eq!(None, tl);
let mut tb = TokenBuffer::from("fo".as_bytes());
let tl = toplabel(&mut tb).unwrap().unwrap();
assert_eq!(0, tb.buffer.len());
assert_eq!("fo", tl.to_string());
let mut tb = TokenBuffer::from("f0".as_bytes());
let tl = toplabel(&mut tb).unwrap().unwrap();
assert_eq!(0, tb.buffer.len());
assert_eq!("f0", tl.to_string());
let mut tb = TokenBuffer::from("f-0".as_bytes());
let tl = toplabel(&mut tb).unwrap().unwrap();
assert_eq!(0, tb.buffer.len());
assert_eq!("f-0", tl.to_string());
}
#[test]
fn test_ipv4_address() {
let mut tb = TokenBuffer::from("".as_bytes());
let ip = ipv4_address(&mut tb).unwrap();
assert_eq!(0, tb.buffer.len());
assert_eq!(None, ip);
let mut tb = TokenBuffer::from("foo".as_bytes());
let ip = ipv4_address(&mut tb).unwrap();
assert_eq!(None, ip);
assert_eq!(1, tb.buffer.len());
assert_eq!("f", tb.pop().unwrap().unwrap().to_string());
let mut tb = TokenBuffer::from("12.34.56.foo".as_bytes());
let ip = ipv4_address(&mut tb).unwrap();
assert_eq!(None, ip);
assert_eq!(10, tb.buffer.len());
assert_eq!("1", tb.pop().unwrap().unwrap().to_string());
let mut tb = TokenBuffer::from("12.34.56.78.foo".as_bytes());
let ip = ipv4_address(&mut tb).unwrap().unwrap();
assert_eq!("12.34.56.78", ip.to_string());
assert_eq!(1, tb.buffer.len());
assert_eq!(".", tb.pop().unwrap().unwrap().to_string());
let mut tb = TokenBuffer::from("12.34.56.78".as_bytes());
let ip = ipv4_address(&mut tb).unwrap().unwrap();
assert_eq!("12.34.56.78", ip.to_string());
assert_eq!(0, tb.buffer.len());
assert_eq!(None, tb.pop().unwrap());
}
#[test]
fn test_path_segments() {
let mut tb = TokenBuffer::from("".as_bytes());
let ps = path_segments(&mut tb).unwrap();
assert_eq!(None, ps);
let mut tb = TokenBuffer::from("foo".as_bytes());
let ps = path_segments(&mut tb).unwrap().unwrap();
assert_eq!("foo", ps.to_string());
let segments = ps.segments;
assert_eq!(1, segments.len());
let mut tb = TokenBuffer::from("foo/".as_bytes());
let ps = path_segments(&mut tb).unwrap().unwrap();
assert_eq!("foo/", ps.to_string());
let segments = ps.segments;
assert_eq!(2, segments.len());
let mut tb = TokenBuffer::from("foo/bar".as_bytes());
let ps = path_segments(&mut tb).unwrap().unwrap();
assert_eq!("foo/bar", ps.to_string());
let segments = ps.segments;
assert_eq!(2, segments.len());
let mut tb = TokenBuffer::from("foo;bar/bar".as_bytes());
let ps = path_segments(&mut tb).unwrap().unwrap();
assert_eq!("foo;bar/bar", ps.to_string());
let segments = ps.segments;
assert_eq!(2, segments.len());
}
#[test]
fn test_segment() -> Result<()> {
let mut tb = TokenBuffer::from("".as_bytes());
let p = segment(&mut tb).unwrap();
assert_eq!(None, p);
let mut tb = TokenBuffer::from("foo".as_bytes());
let p = segment(&mut tb).unwrap().unwrap();
assert_eq!(
vec![Char::Ascii(b'f'), Char::Ascii(b'o'), Char::Ascii(b'o')],
p.pchars
);
assert_eq!(None, p.params);
let mut tb = TokenBuffer::from("foo;bar;buh".as_bytes());
let p = segment(&mut tb).unwrap().unwrap();
assert_eq!(
vec![Char::Ascii(b'f'), Char::Ascii(b'o'), Char::Ascii(b'o')],
p.pchars
);
let params = p.params.unwrap();
assert_eq!(2, params.len());
assert_eq!("bar", params[0].to_string());
assert_eq!("buh", params[1].to_string());
let mut tb = TokenBuffer::from("foo;".as_bytes());
let p = segment(&mut tb).unwrap().unwrap();
assert_eq!(
vec![Char::Ascii(b'f'), Char::Ascii(b'o'), Char::Ascii(b'o')],
p.pchars
);
let params = p.params.unwrap();
assert_eq!(1, params.len());
assert_eq!("", params[0].to_string());
Ok(())
}
#[test]
fn test_param() -> Result<()> {
let mut tb = TokenBuffer::from("foo?bar".as_bytes());
let p = param(&mut tb)?.unwrap();
assert_eq!("foo", p.to_string());
let c = tb.pop()?.unwrap();
assert_eq!(Char::Ascii(b'?'), c);
let p = param(&mut tb)?.unwrap();
assert_eq!("bar", p.to_string());
assert_eq!(None, tb.pop()?);
Ok(())
}
#[test]
fn test_pchars() -> Result<()> {
let mut tb = TokenBuffer::from("foo?bar".as_bytes());
let q = pchars(&mut tb)?.unwrap();
assert_eq!(
vec![Char::Ascii(b'f'), Char::Ascii(b'o'), Char::Ascii(b'o')],
q
);
let c = tb.pop()?.unwrap();
assert_eq!(Char::Ascii(b'?'), c);
let q = pchars(&mut tb)?.unwrap();
assert_eq!(
vec![Char::Ascii(b'b'), Char::Ascii(b'a'), Char::Ascii(b'r')],
q
);
assert_eq!(None, tb.pop()?);
Ok(())
}
#[test]
fn test_query() -> Result<()> {
let mut tb = TokenBuffer::from("foo}bar".as_bytes());
let q = query(&mut tb)?;
assert_eq!(
vec![Char::Ascii(b'f'), Char::Ascii(b'o'), Char::Ascii(b'o')],
q.0
);
let c = tb.pop()?.unwrap();
assert_eq!(Char::Ascii(b'}'), c);
let q = query(&mut tb)?;
assert_eq!(
vec![Char::Ascii(b'b'), Char::Ascii(b'a'), Char::Ascii(b'r')],
q.0
);
assert_eq!(None, tb.pop()?);
Ok(())
}
#[test]
fn test_fragment() -> Result<()> {
let mut tb = TokenBuffer::from("foo}bar".as_bytes());
let f = fragment(&mut tb)?;
assert_eq!(
vec![Char::Ascii(b'f'), Char::Ascii(b'o'), Char::Ascii(b'o')],
f.0
);
let c = tb.pop()?.unwrap();
assert_eq!(Char::Ascii(b'}'), c);
let f = fragment(&mut tb)?;
assert_eq!(
vec![Char::Ascii(b'b'), Char::Ascii(b'a'), Char::Ascii(b'r')],
f.0
);
assert_eq!(None, tb.pop()?);
Ok(())
}
}
|
mod addressbook;
pub use self::addressbook::AddressBookFactory;
mod addressbook_tag;
pub use self::addressbook_tag::AddressBookTagFactory;
mod shared_addressbook;
pub use self::shared_addressbook::SharedAddressBookFactory;
mod email;
pub use self::email::EmailFactory;
mod phone;
pub use self::phone::PhoneFactory;
mod address;
pub use self::address::AddressFactory;
pub mod contacts;
pub mod organizations;
|
extern crate anyhow;
extern crate bio;
extern crate csv;
extern crate ndarray;
extern crate ndarray_stats;
extern crate num_traits;
extern crate pretty_env_logger;
#[macro_use]
extern crate log;
extern crate serde;
#[macro_use]
extern crate serde_derive;
use std::env::set_var;
use std::path::PathBuf;
use anyhow::Result;
use structopt::{clap, clap::arg_enum, StructOpt};
mod codon;
mod graph;
mod handlers;
mod io;
mod math;
mod rank;
mod similarity;
#[derive(Debug, StructOpt)]
#[structopt(name = "corrnet")]
#[structopt(long_version(option_env!("LONG_VERSION").unwrap_or(env!("CARGO_PKG_VERSION"))))]
#[structopt(setting(clap::AppSettings::ColoredHelp))]
pub struct Opt {
#[structopt(long = "log", possible_values(&LogLevel::variants()))]
pub log_level: Option<LogLevel>,
#[structopt(subcommand)]
pub subcommand: SubCommands,
}
arg_enum! {
#[derive(Debug)]
pub enum LogLevel {
DEBUG,
INFO,
WARN,
ERROR,
}
}
arg_enum! {
#[derive(Debug)]
pub enum Rank {
HRR,
MR,
}
}
#[derive(Debug, StructOpt)]
pub enum SubCommands {
#[structopt(
name = "construct",
about = "construct rank based network from gene expression matrix"
)]
#[structopt(setting(clap::AppSettings::ColoredHelp))]
Construct {
#[structopt(short = "-i", long = "input")]
input: PathBuf,
#[structopt(short = "-m", long = "method", possible_values(&Rank::variants()))]
method: Option<Rank>,
#[structopt(short = "-o", long = "output")]
output: Option<PathBuf>,
#[structopt(long = "log2")]
log2: bool,
#[structopt(long = "pseudocount", default_value = "1.")]
pseude_count: f64,
#[structopt(long = "rank_cutoff")]
rank_cutoff: Option<usize>,
#[structopt(long = "pcc_cutoff")]
pcc_cutoff: Option<f64>,
},
#[structopt(name = "extract", about = "extract")]
#[structopt(setting(clap::AppSettings::ColoredHelp))]
Extract {
#[structopt(short = "-i", long = "input")]
input: PathBuf,
#[structopt(short = "-g", long = "gene_list")]
gene_list: Option<PathBuf>,
#[structopt(short = "-o", long = "output")]
output: Option<PathBuf>,
#[structopt(long = "rank_cutoff")]
rank_cutoff: Option<f64>,
#[structopt(long = "pcc_cutoff")]
pcc_cutoff: Option<f64>,
},
#[structopt(name = "codon-usage", about = "calculate codon score")]
#[structopt(setting(clap::AppSettings::ColoredHelp))]
CodonUsage {
#[structopt(short = "-i", long = "input_graph")]
input_graph: PathBuf,
#[structopt(short = "-f", long = "input_fasta")]
input_fasta: PathBuf,
#[structopt(short = "-p", long = "percent", default_value = "0.1")]
percent: f64,
},
#[structopt(name = "query", about = "query")]
#[structopt(setting(clap::AppSettings::ColoredHelp))]
Query {
#[structopt(short = "q", long = "query_gene_id")]
gene_id: String,
#[structopt(short = "-i", long = "input_graph")]
input_graph: PathBuf,
#[structopt(short = "d", long = "depth")]
depth: Option<usize>,
#[structopt(long = "rank_cutoff")]
rank_cutoff: Option<f64>,
#[structopt(long = "pcc_cutoff")]
pcc_cutoff: Option<f64>,
},
#[structopt(name = "merge", about = "merge network")]
#[structopt(setting(clap::AppSettings::ColoredHelp))]
Merge {
#[structopt(long = "hrr")]
hrr_path: PathBuf,
#[structopt(long = "mr")]
mr_path: PathBuf,
#[structopt(long = "outpath", short = "-o", default_value = "merge_graph.csv.gz")]
out_path: PathBuf,
#[structopt(long = "priority", possible_values(&Rank::variants()))]
priority: Rank,
#[structopt(long = "max-rank", default_value = "2000")]
max_rank: f64,
},
}
fn main() -> Result<()> {
let opt = Opt::from_args();
match &opt.log_level {
Some(log_level) => match log_level {
LogLevel::DEBUG => set_var("RUST_LOG", "debug"),
LogLevel::INFO => set_var("RUST_LOG", "info"),
LogLevel::WARN => set_var("RUST_LOG", "warn"),
LogLevel::ERROR => set_var("RUST_LOG", "error"),
},
None => set_var("RUST_LOG", "warn"),
};
pretty_env_logger::init_timed();
match &opt.subcommand {
SubCommands::Construct {
input,
output,
method,
log2,
pseude_count,
rank_cutoff,
pcc_cutoff,
} => {
handlers::construct::parse_args(
input,
output.as_ref(),
method.as_ref(),
log2,
pseude_count,
rank_cutoff.as_ref(),
pcc_cutoff.as_ref(),
)?;
}
SubCommands::Extract {
input,
gene_list,
output,
rank_cutoff,
pcc_cutoff,
} => {
handlers::extract::parse_args(
input,
gene_list.as_ref(),
output.as_ref(),
rank_cutoff.as_ref(),
pcc_cutoff.as_ref(),
)?;
}
SubCommands::CodonUsage {
input_graph,
input_fasta,
percent,
} => {
handlers::codon_usage::parse_args(input_graph, input_fasta, percent)?;
}
SubCommands::Query {
gene_id,
input_graph,
depth,
rank_cutoff,
pcc_cutoff,
} => {
handlers::query::parse_args(
gene_id,
input_graph,
depth.unwrap_or(1),
pcc_cutoff.as_ref(),
rank_cutoff.as_ref(),
)?;
}
SubCommands::Merge {
hrr_path,
mr_path,
out_path,
priority,
max_rank,
} => {
handlers::merge::parse_args(hrr_path, mr_path, out_path, priority, max_rank)?;
}
}
Ok(())
}
|
use std::path::PathBuf;
use std::sync::mpsc::{sync_channel, SyncSender};
use std::thread::spawn;
use structopt::StructOpt;
use structopt::clap::AppSettings;
mod curses;
pub mod color;
pub mod gui;
pub mod plain;
use crate::dictionary::Entry;
#[derive(StructOpt, Debug)]
#[structopt(setting = AppSettings::InferSubcommands)]
pub enum Opt {
/// Color
Color,
/// Curses
Curses(curses::Opt),
/// GUI
Gui(gui::Opt),
/// Plain
Plain,
}
#[derive(Clone)]
pub struct Screen {
tx: SyncSender<Option<Vec<Entry>>>,
}
impl Screen {
pub fn new(opt: Opt, dictionary_path: PathBuf, bind_to: String) -> Self {
use self::Opt::*;
let (tx, rx) = sync_channel(0);
let screen = Screen { tx: tx.clone() };
spawn(move || match opt {
Curses(opt) =>
curses::main(&rx, opt, &bind_to),
Color =>
color::main(rx).unwrap(),
Gui(opt) =>
gui::main(tx, rx, opt, dictionary_path),
Plain =>
plain::main(rx).unwrap(),
});
screen
}
pub fn print_opt(&self, content: Option<Vec<Entry>>) {
self.tx.send(content).unwrap();
}
}
|
//! [Confusable detection](https://www.unicode.org/reports/tr39/#Confusable_Detection)
use core::iter;
enum OnceOrMore<T, I> {
Once(iter::Once<T>),
More(I),
}
impl<T, I> Iterator for OnceOrMore<T, I>
where
I: Iterator<Item = T>,
{
type Item = T;
fn next(&mut self) -> Option<T> {
use OnceOrMore::*;
match self {
Once(v) => v.next(),
More(i) => i.next(),
}
}
}
type StaticSliceIterCloned = core::iter::Cloned<core::slice::Iter<'static, char>>;
fn char_prototype(c: char) -> OnceOrMore<char, StaticSliceIterCloned> {
use crate::tables::confusable_detection::char_confusable_prototype;
match char_confusable_prototype(c) {
None => OnceOrMore::Once(iter::once(c)),
Some(l) => OnceOrMore::More(l.iter().cloned()),
}
}
/// Calculate skeleton for string, as defined by UTS 39
pub fn skeleton(s: &str) -> impl Iterator<Item = char> + '_ {
use unicode_normalization::UnicodeNormalization;
s.chars().nfd().flat_map(char_prototype).nfd()
}
|
use regex::Regex;
use std::fs::File;
use std::io::{BufRead, BufReader};
use lib::Token;
// Int match part
// <dec int> ::= <number>{<number>}
// <number> ::= 0|1|2|3|4|5|6|7|8|9
// Regex for constant integers
fn int_match(text: &mut String) -> Token::Token {
let int_re = Regex::new(r"(^[0-9][0-9]*)\b").unwrap();
let int_cap = int_re.captures(&text);
let int;
let mut token = Token::new();
let mut int_str: String;
match int_cap {
Some(int_cap) => {
int = int_cap.get(1).unwrap();
// let int_start = int.start();
let int_end = int.end();
int_str = int.as_str().to_string();
token._type = "CONSTANT".to_string();
token._value = int_str.clone();
// println!("Int Match: {}", int_str);
text.replace_range(..int_end, "");
// println!("Int Start: {}", int_start);
// println!("Int End: {}", int_end);
},
None => panic!("Lexer: int None"),
};
token
}
// Word match part
// <word> :: = <alpha_underscore>{<alpha>|"_"|<number>}
// <alpha_underscore> ::= <alpha>|"_"
// <alpha> ::= a-zA-Z{a-zA-Z}
// Regex for keyword, variable name + symbols
fn word_match(text: &mut String) -> Token::Token {
let word_re = Regex::new(r"([a-zA-Z_][a-zA-Z_0-9]*)\b").unwrap();
let word_cap = word_re.captures(&text);
let word;
let mut token = Token::new();
let mut word_str: String;
match word_cap {
Some(word_cap) => {
word = word_cap.get(1).unwrap();
// let word_start = word.start();
let word_end = word.end();
token._value = word.as_str().to_string();
word_str = keyword_match(word.as_str()).to_string();
token._type = word_str.clone();
// println!("WordMatch: {}", word_str);
text.replace_range(..word_end, "");
// end = word_end;
// println!("Word Start: {}", word_start);
// println!("Word End: {}", word_end);
},
None => panic!("Lexer: word None"),
};
token
}
// Symbol match part
// <symbol> ::= "("|")"|"{"|"}"|";"
fn symbol_match(text: &mut String) -> Token::Token {
let syb_re = Regex::new(r"(^[\(\)\{\};])").unwrap();
let syb_cap = syb_re.captures(&text);
let syb;
let mut token = Token::new();
let mut syb_str: String;
match syb_cap {
Some(syb_cap) => {
syb = syb_cap.get(1).unwrap();
// let syb_start = syb.start();
let syb_end = syb.end();
token._value = syb.as_str().to_string();
syb_str = keysyb_match(syb.as_str()).to_string();
token._type = syb_str.clone();
// println!("Syb Match: {}", syb_str);
text.replace_range(..syb_end, "");
// println!("Syb Start: {}", syb_start);
// println!("Syb End: {}", syb_end);
},
None => panic!("Lexer: Syb None"),
};
token
}
fn keyword_match(word: &str) -> &str {
match word {
"int" => "INT_KEYWORD",
"return" => "RETURN_KEYWORD",
_ => "IDENTIFIER",
}
}
fn keysyb_match(word: &str) -> &str {
match word {
"(" => "OPEN_PAREN",
")" => "CLOSE_PAREN",
"{" => "OPEN_BRACE",
"}" => "CLOSE_BRACE",
";" => "SEMICOLON",
_ => panic!("Lexer: Unrecognize symbol"),
}
}
pub fn lex(path: &str) -> Vec<Token::Token> {
// println!("Lexer:");
// println!("File path: {}", &path);
let file = File::open(&path).expect("\nLexer: Failed to open the file\n");
let reader = BufReader::new(file);
// println!("----------------");
// println!("[-] File: {}\n", &path);
let mut token_vec = Vec::new();
for line in reader.lines(){
let mut text = line.unwrap().trim().to_string();
// println!("New Line: \n\"{}\"\n", text);
let mut first: char;
let mut token: Token::Token;
while text.len()!=0 {
first = text.chars().nth(0).unwrap();
if first.is_ascii_digit() {
token = int_match(&mut text);
// println!("Int token: {}, {}", &token._type, &token._value);
token_vec.push(token);
} else if first.is_ascii_alphabetic() {
token = word_match(&mut text);
// println!("Word token: {}, {}", &token._type, &token._value);
token_vec.push(token);
} else if first.is_ascii_punctuation() {
token = symbol_match(&mut text);
// println!("Syb token: {}, {}", &token._type, &token._value);
token_vec.push(token);
} else {
panic!("Lexer: Invalid char: {}", first);
}
text = text.trim_start().to_string();
}
}
token_vec
}
|
/*
CIS198 Homework 1
Part 1: Implementing functions
Complete and write at least one unit test for each function you implement.
If it already has a unit test, either add assertions to it or add a new one.
Also answer the questions in text.
*/
// Remove these once you are done editing the file!
// This will result in useful warnings if you missed something.
// #![allow(dead_code)]
// #![allow(unused_variables)]
/*
Problem 1: Double
Implement the function that doubles an integer in three different ways.
What are some differences between them? Can you write unit tests
which fail (or fail to compile) for some but not others?
Which of the three do you prefer?
*/
pub fn double_v1(n: i32) -> i32 {
n * 2
}
pub fn double_v2(n: &i32) -> i32 {
n * 2
}
pub fn double_v3(n: &mut i32) {
// double n in place
*n *= 2;
}
// Example unit test (so you can recall the syntax)
#[test]
fn test_double_v1() {
assert_eq!(double_v1(2), 4);
assert_eq!(double_v1(-3), -6);
}
#[test]
fn test_double_v2() {
let a1 = 2;
let a2 = 4;
assert_eq!(double_v2(&a1), a2);
let b1 = -3;
let b2 = -6;
assert_eq!(double_v2(&b1), b2);
assert_eq!(double_v2(&a1), a2);
}
#[test]
fn test_double_v3() {
let mut a1 = 2;
let a2 = 4;
double_v3(&mut a1);
assert_eq!(a1, a2);
let mut b1 = -3;
let b2 = -6;
double_v3(&mut b1);
assert_eq!(b1, b2);
double_v3(&mut a1);
assert_eq!(a1, 8);
}
/*
Problem 2: Integer square root
Implement the integer square root function: sqrt(n) should return the
largest m such that m * m <= n. For a 'harder' version, try to do it more
efficiently than trying every possibility.
*/
pub fn sqrt(n: usize) -> usize {
let mut res = 1;
let end = n / 2 + 1;
for i in (1..end).rev() {
if i * i <= n {
res = i;
break;
}
}
res
}
// Remember to write unit tests here (and on all future functions)
#[test]
fn test_sqrt() {
assert_eq!(sqrt(4), 2);
assert_eq!(sqrt(8), 2);
assert_eq!(sqrt(10), 3);
}
/*
Problem 3: Slice sum
Implement the sum function on slices in two different ways
(using different for loop patterns).
Do not use the predefined sum function.
Also, try to do it without an unnecessary `return` statement at the end --
Clippy should detect if you mess this up.
Which of the two ways do you prefer?
*/
pub fn sum_v1(slice: &[i32]) -> i32 {
let mut acc = 0;
for &v in slice {
acc += v;
}
acc
}
pub fn sum_v2(slice: &[i32]) -> i32 {
let mut acc = 0;
for v in slice {
acc += v;
}
acc
}
#[test]
fn test_sum_v1() {
let a = [42, 8, 3, 2, 5];
assert_eq!(sum_v1(&a[..5]), 60);
assert_eq!(sum_v1(&a[..4]), 55);
assert_eq!(sum_v1(&a[1..4]), 13);
}
#[test]
fn test_sum_v2() {
let a = [42, 8, 3, 2, 5];
assert_eq!(sum_v2(&a[..5]), 60);
assert_eq!(sum_v2(&a[..4]), 55);
assert_eq!(sum_v2(&a[1..4]), 13);
}
/*
Problem 4: Unique
Make unique. Create a new vector which contains each item in the vector
only once! Much like a set would.
This doesn't need to be efficient; you can use a for loop.
*/
pub fn unique(slice: &[i32]) -> Vec<i32> {
let mut v_slice = slice.to_vec();
v_slice.sort_unstable();
let mut unique_vec = Vec::new();
// Is there a better way to do it?
if !v_slice.is_empty() {
let mut prev = v_slice[0];
for x in v_slice {
if x != prev {
unique_vec.push(prev);
prev = x;
}
}
unique_vec.push(prev);
}
unique_vec
}
#[test]
fn test_unique() {
let a = [42, 8, 3, 2, 5, 8, 42, 5];
assert_eq!(unique(&a), [2, 3, 5, 8, 42]);
}
/*
Problem 5: Filter
Return a new vector containing only elements that satisfy `pred`.
This uses some unfamiliar syntax for the type of pred -- all you need
to know is that pred is a function from i32 to bool.
*/
pub fn filter(slice: &[i32], pred: impl Fn(i32) -> bool) -> Vec<i32> {
let mut res_vec = Vec::new();
for x in slice {
if pred(*x) {
res_vec.push(*x);
}
}
res_vec
}
#[test]
fn test_filter() {
fn is_even(n: i32) -> bool {
n % 2 == 0
}
assert_eq!(filter(&vec![1, 2, 3, 4, 5, 6], &is_even), vec![2, 4, 6]);
}
/*
Problem 6: Fibonacci
Given starting fibonacci numbers n1 and n2, compute a vector of
length 'out_size'
where v[i] is the ith fibonacci number.
*/
pub fn fibonacci(n1: i32, n2: i32, out_size: usize) -> Vec<i32> {
let mut vec = Vec::new();
let mut f1 = n1;
let mut f2 = n2;
let mut temp;
for _ in 0..out_size {
vec.push(f1);
temp = f1;
f1 = f2;
f2 += temp;
}
vec
}
#[test]
fn test_fibonacci() {
let res = [1, 1, 2, 3, 5, 8, 13, 21, 34];
assert_eq!(fibonacci(1, 1, 9), res);
assert_eq!(fibonacci(1, 1, 0), &res[..0]);
}
/*
Problem 7: String concatenation
Create a function which concats 2 &strs and returns a String,
and a function which concats 2 Strings and returns a String.
You may use any standard library function you wish.
What are some reasons the second function is not efficient?
*/
pub fn str_concat(s1: &str, s2: &str) -> String {
let mut res = String::from(s1);
res.push_str(s2);
res
}
pub fn string_concat(s1: String, s2: String) -> String {
str_concat(&s1, &s2)
}
#[test]
fn test_str_concat() {
let a1: &str = "hello, ";
let a2: &str = "world!";
assert_eq!(str_concat(a1, a2), "hello, world!");
}
#[test]
fn test_string_concat() {
let a1 = String::from("hello, ");
let a2 = String::from("world!");
assert_eq!(string_concat(a1, a2), "hello, world!");
}
/*
Problem 8: String concatenation continued
Convert a Vec<String> into a String.
Your answer to the previous part may help.
*/
pub fn concat_all(v: Vec<String>) -> String {
let mut acc = String::new();
for string in v {
acc = string_concat(acc, string);
}
acc
}
#[test]
fn test_concat_all() {
let a1 = String::from("hello, ");
let a2 = String::from("world! ");
let a3 = String::from("How is life?");
let vec = vec![a1, a2, a3];
assert_eq!(concat_all(vec), "hello, world! How is life?");
}
/*
Problem 9: Parsing
Convert a Vec<String> into a Vec<i32> and vice versa.
Assume all strings are correct numbers! We will do error handling later.
Use `.expect("ignoring error")` to ignore Result from parse()
See https://doc.rust-lang.org/std/primitive.str.html#method.parse
The unit tests check if your functions are inverses of each other.
A useful macro: format! is like println! but returns a String.
*/
pub fn parse_all(v: Vec<String>) -> Vec<i32> {
let mut res = Vec::with_capacity(v.len());
for string in v {
res.push(string.parse().expect("no errors here!"));
}
res
}
pub fn print_all(v: Vec<i32>) -> Vec<String> {
let mut res = Vec::with_capacity(v.len());
for i in v {
res.push(format!("{}", i));
}
res
}
#[test]
fn test_print_parse() {
assert_eq!(parse_all(print_all(vec![1, 2])), vec![1, 2]);
}
#[test]
fn test_parse_print() {
let v = vec!["1".to_string(), "2".to_string()];
assert_eq!(print_all(parse_all(v.clone())), v);
}
/*
Problem 10: Composing functions
Implement a function which concatenates the even Fibonacci
numbers out of the first n Fibonacci numbers.
For example: if n = 6, the first 5 Fibonacci numbers are 1, 1, 2, 3, 5, 8,
so the function should return the String "28".
Don't use a for loop! Your previous functions should be sufficient.
*/
pub fn concat_even_fibonaccis(n: usize) -> String {
fn is_even(n: i32) -> bool {
n % 2 == 0
}
let fibs = fibonacci(1, 1, n);
let filtered_fibs = filter(&fibs, &is_even);
let string_fibs = print_all(filtered_fibs);
concat_all(string_fibs)
}
#[test]
fn test_concat_even_fibonaccis() {
assert_eq!(&concat_even_fibonaccis(6), "28");
assert_eq!(&concat_even_fibonaccis(9), "2834");
}
|
use cocoa::base::id;
pub trait MTLRenderPassDepthAttachmentDescriptor {
/// The depth to use when the depth attachment is cleared.
///
/// # Discussion
///
/// The default value is 1.0.
///
/// If the `loadAction` property of the attachment is set to
/// `MTLLoadActionClear`, then at the start of a rendering pass,
/// the contents of the texture are filled with the value stored
/// in the `clearDepth` property. Otherwise, `clearDepth` is ignored.
unsafe fn clearDepth(self) -> f64;
unsafe fn setClearDepth(self, clearDepth: f64);
/// The filter used for an MSAA depth resolve operation.
///
/// # Discussion
///
/// The default value is `MTLMultisampleDepthResolveFilterSample0`.
unsafe fn depthResolveFilter(self) -> MTLMultisampleDepthResolveFilter;
unsafe fn setDepthResolveFilter(self, resolveFilter: MTLMultisampleDepthResolveFilter);
unsafe fn copy(self) -> Self;
}
impl MTLRenderPassDepthAttachmentDescriptor for id {
unsafe fn clearDepth(self) -> f64 {
msg_send![self, clearDepth]
}
unsafe fn setClearDepth(self, clearDepth: f64) {
msg_send![self, setClearDepth:clearDepth]
}
unsafe fn depthResolveFilter(self) -> MTLMultisampleDepthResolveFilter {
msg_send![self, depthResolveFilter]
}
unsafe fn setDepthResolveFilter(self, resolveFilter: MTLMultisampleDepthResolveFilter) {
msg_send![self, setDepthResolveFilter:resolveFilter]
}
unsafe fn copy(self) -> Self {
msg_send![self, copy]
}
}
#[repr(usize)]
#[derive(Clone, Copy, Eq, Hash, PartialEq)]
pub enum MTLMultisampleDepthResolveFilter {
MTLMultisampleDepthResolveFilterSample0 = 0,
MTLMultisampleDepthResolveFilterMin = 1,
MTLMultisampleDepthResolveFilterMax = 2
}
|
//! This is the documentation for the `chase` scheme.
//!
//! * Developped by Melissa Chase, "Structured Encryption and Controlled Disclousure", see Section 3
//! * Published in Proceedings of the 2017 ACM SIGSAC Conference on Computer and Communications Security 2017
//! * Available from https://eprint.iacr.org/2017/807.pdf
//! * Type: encryption (structured)
//! * Setting: PRP, PRF
//! * Authors: Georg Bramm
//! * Date: 12/2019
//!
//! # Examples
//!
extern crate serde;
extern crate serde_json;
use rand::*;
use primitive_types::*;
//use serde::ser::{Serialize, Serializer, SerializeSeq, SerializeMap};
use serde::{Serialize, Deserialize};
use mongodb::oid::ObjectId;
use crypto::*;
use crypto::hmac::Hmac;
use crypto::mac::Mac;
use crypto::digest::Digest;
use crypto::sha3::Sha3;
use crate::utils::prp::*;
use crate::utils::prf::*;
use rulinalg::matrix::Matrix;
use rulinalg::matrix::BaseMatrix;
use rulinalg::matrix::BaseMatrixMut;
use rand::*;
use std::fs::File;
use rust_hope::schemes::she::hope::hope;
pub mod objects;
use objects::*;
use objects::comm::*;
use objects::ct::*;
/// An AC17 Public Key (PK)
#[derive(Serialize, Deserialize, Clone)]
pub struct SE {
pub _key: Option<SEKey>,
pub _hope: hope,
pub _ct: Option<SECiphertext>,
}
impl SE {
pub fn new(_name: String) -> SE {
SE {
_key: None,
_hope: hope::new(_name, 4),
_ct: None,
}
}
pub fn keygen() -> Option<SEKey> {
let mut rng = rand::thread_rng();
return Some(SEKey {
_k1: rng.gen::<U256>(),
_k2: rng.gen::<U256>(),
_k3: rng.gen::<U256>(),
});
}
pub fn get_key(&self) -> Option<SEKey> {
match &self._key {
None => None,
Some(_key) => {
return Some(_key.clone());
}
}
}
pub fn get_sp(&self) -> hope {
self._hope.clone()
}
pub fn set_key(&mut self, _key: SEKey) {
self._key = Some(_key.clone());
}
pub fn get_ct(&self) -> Option<SECiphertext> {
match &self._ct {
None => None,
Some(_ct) => {
return Some(_ct.clone());
}
}
}
pub fn encrypt(&self, _input: &osm::OSM) -> Option<SECiphertext> {
let mut rng = rand::thread_rng();
let _rows = _input.ways.len();
let _len_ways = _rows * _rows;
let _len_nodes = _input.nodes.len();
let mut _ct_gamma: Matrix<U256> =
Matrix::new(_rows, _rows, Vec::<U256>::with_capacity(_len_ways));
let mut _ct_vertex: Vec<SECTVertex> = Vec::with_capacity(_len_nodes);
let mut _ct_edge: Vec<SECTEdge> = Vec::with_capacity(_len_ways);
match &self._key {
None => return None,
Some(key) => {
let prf_k1 = PRF::new(key._k1);
let prp_k2 = PRP::new(key._k2);
let _k4: U256 = rng.gen::<U256>();
let prp_k4 = PRP::new(_k4);
for (_i, _way) in _input.ways.iter().enumerate() {}
// permutated order
//let orig: Vec<usize> = (0.._len_all).collect();
//let _permutated_new_objs = prp_k4.g(&orig);
//let _permutated_new_gamma = prp_k2.p::<U256>(_new_gamma);
/*
for (_i, _row) in _input._gamma.row_iter().enumerate() {
for (_j, _item) in _row.col_iter().enumerate() {
let _i_objs = _permutated_new_objs[(_i * _j) + _j];
match _item {
SEObject::EnumEdge(e) => {
let (_id, _label) = _item;
_ct_gamma_content[_i_permutated] = SE::encode(_id.clone(), &_label);
let _edge: SECTEdge = SECTEdge::new( , );
_new_objs[_i_permutated] = SECTObject::EnumEdge(_edge);
},
SEObject::EnumVertex(v) => {
let _vertex: SECTVertex = SECTVertex::new();
_new_objs[_i_permutated] = SECTObject::EnumVertex(_vertex);
},
_ => {}
}
}
for (_j, _item) in _row.col_iter().enumerate() {
let _permutated_index = _permutated_new_objs[(_i * _cols) + _j];
let _permutated_matrix = _permutated_new_objs[(_i * _cols) + _j];
return Some(SECiphertext {
_gamma: _new_gamma,
_objs: _new_objs
});
//let _content: U256 = U256::from(encode(_i, _v) ^ prf.f(_i, _j))
}*/
//}
return Some(SECiphertext {
_gamma: _ct_gamma,
_edge: _ct_edge,
_vertex: _ct_vertex,
});
}
}
None
}
pub fn token_edge(&self, _alpha: usize, _beta: usize, _len: usize) -> Option<SETokenEdge> {
match &self._key {
None => return None,
Some(key) => {
let prf = PRF::new(key._k1);
let prp = PRP::new(key._k2);
let s = prf.f(_alpha, _beta).unwrap();
let (_alpha_prime, _beta_prime) = prp.p_xy(_alpha, _beta, _len);
return Some(SETokenEdge::new(s, _alpha_prime, _beta_prime));
}
}
return None;
}
pub fn token(&self, _search: String) -> Option<SEToken> {
match &self._key {
None => return None,
Some(key) => {
let prf = PRF::new(key._k1 ^ key._k2);
match prf.s(_search) {
None => None,
Some(_prf) => Some(SEToken::new(_prf)),
};
}
}
return None;
}
/*
pub fn lookup(&self, _ciphertext: &SECiphertext, _token: Token) -> Option<SECTObject> {
unsafe {
match _token {
Token::Edge(e) => {
let y: U256 = _ciphertext._gamma.get_unchecked([e._alpha, e._beta]).clone();
let (_id, _v) = &SE::decode(e._s ^ y);
for _obj in _ciphertext._edge.iter() {
if _obj._id.eq(e._id) {
return Some(SECTObject::Edge(_obj.clone()));
}
}
}
Token::All(a) => {
for (_i, _obj) in _ciphertext._vertex.iter().enumerate() {
let (_id, _v) = &SE::decode(a._s ^ U256::from(_i));
if _obj._id.eq(a._id) {
return Some(SECTObject::Vertex(_v.clone()));
}
}
for (_i, _row) in _ciphertext._gamma.row_iter().enumerate() {
for (_j, _col) in _row.col_iter().enumerate() {
let y: U256 = _ciphertext._gamma.get_unchecked([_i, _j]).clone();
let (_id, _v) = &SE::decode(a._s ^ y);
if _obj._id.eq(a._id) {
return Some(SECTObject::Vertex(_obj.clone()));
}
}
}
}
}
}
return None;
}
pub fn decrypt<R>(&self, _obj: SEObject) -> Option<R> {
if _obj
}
*/
pub fn encode(_i: ObjectId, _v: H160) -> U256 {
U256::from(SE::concat_u8(&_i.bytes(), _v.as_bytes()).as_slice())
}
pub fn decode(_node: U256) -> (ObjectId, H160) {
let mut _id_b: [u8; 12] = [0u8; 12];
let mut _v_b: [u8; 20] = [0u8; 20];
for i in 0..12 {
_id_b[i] = _node.byte(i);
}
for i in 0..20 {
_v_b[i] = _node.byte(i + 12);
}
(ObjectId::with_bytes(_id_b), H160::from(_v_b))
}
pub fn concat_u8(one: &[u8], two: &[u8]) -> Vec<u8> {
[one, two].concat()
}
}
#[cfg(test)]
mod tests {
use mongodb::oid::ObjectId;
use std::mem;
use std::mem::*;
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
println!(
"size of ObjectId : {:?}",
mem::size_of_val(&ObjectId::new().unwrap())
);
}
}
|
use crate::read_lines::read_day;
fn check_xmas(numbers : &Vec<u64>, len: usize) -> usize {
println!("{} {}", len, numbers.len());
for i in len..numbers.len() {
if !check_xmas_number(&numbers[i - len..i], numbers[i]) {
return i;
}
}
panic!("did not find any numbers")
}
fn check_xmas_number(slice: &[u64], number: u64) -> bool{
for n_1 in slice.iter().enumerate() {
for n_2 in &slice[n_1.0+1..]{
if n_1.1 + n_2 == number {
return true;
}
}
}
false
}
fn find_xmas_brake(numbers : &Vec<u64>, fault_index: usize) -> Vec<u64> {
let target = numbers[fault_index];
for i in 0..fault_index {
for j in (i+1)..fault_index{
let sum = numbers[i..j+1].iter().sum::<u64>();
if sum == target {
return numbers[i..j+1].to_vec();
}
}
}
panic!("Failed to find a sum")
}
pub fn run() {
let numbers = read_day(9).map(|l| {
let string = l.unwrap();
string.parse::<u64>().unwrap()
});
let numbers = numbers.collect();
let index = check_xmas(&numbers, 25);
let xmas_brake = find_xmas_brake(&numbers, index);
println!("Number found {}", xmas_brake.iter().max().unwrap() + xmas_brake.iter().min().unwrap())
} |
use super::*;
#[derive(Clone, Copy, Default, PartialEq, Eq)]
#[repr(transparent)]
pub struct InterruptFlags(pub(crate) u16);
impl InterruptFlags {
const_new!();
bitfield_bool!(u16; 0, vblank, with_vblank, set_vblank);
bitfield_bool!(u16; 1, hblank, with_hblank, set_hblank);
bitfield_bool!(u16; 2, vcount, with_vcount, set_vcount);
bitfield_bool!(u16; 3, timer0, with_timer0, set_timer0);
bitfield_bool!(u16; 4, timer1, with_timer1, set_timer1);
bitfield_bool!(u16; 5, timer2, with_timer2, set_timer2);
bitfield_bool!(u16; 6, timer3, with_timer3, set_timer3);
bitfield_bool!(u16; 7, serial, with_serial, set_serial);
bitfield_bool!(u16; 8, dma0, with_dma0, set_dma0);
bitfield_bool!(u16; 9, dma1, with_dma1, set_dma1);
bitfield_bool!(u16; 10, dma2, with_dma2, set_dma2);
bitfield_bool!(u16; 11, dma3, with_dma3, set_dma3);
bitfield_bool!(u16; 12, keypad, with_keypad, set_keypad);
bitfield_bool!(u16; 13, gamepak, with_gamepak, set_gamepak);
}
impl_bitwise_ops!(InterruptFlags);
impl core::fmt::Debug for InterruptFlags {
#[inline(never)]
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
write!(f, "InterruptFlags {{")?;
if self.vblank() {
write!(f, "vblank,")?;
}
if self.hblank() {
write!(f, "hblank,")?;
}
if self.vcount() {
write!(f, "vcount,")?;
}
if self.timer0() {
write!(f, "timer0,")?;
}
if self.timer1() {
write!(f, "timer1,")?;
}
if self.timer2() {
write!(f, "timer2,")?;
}
if self.timer3() {
write!(f, "timer3,")?;
}
if self.serial() {
write!(f, "serial,")?;
}
if self.dma0() {
write!(f, "dma0,")?;
}
if self.dma1() {
write!(f, "dma1,")?;
}
if self.dma2() {
write!(f, "dma2,")?;
}
if self.dma3() {
write!(f, "dma3,")?;
}
if self.keypad() {
write!(f, "keypad,")?;
}
if self.gamepak() {
write!(f, "gamepak,")?;
}
write!(f, "}}")
}
}
|
// src/benchmark.rs
#[global_allocator]
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
#[macro_use]
extern crate lazy_static;
mod ast;
#[macro_use]
mod code;
mod compiler;
mod evaluator;
mod lexer;
mod object;
mod parser;
mod repl;
mod token;
mod vm;
use ast::*;
use compiler::*;
use evaluator::*;
use lexer::*;
use object::*;
use parser::*;
use std::cell::*;
use std::rc::*;
use std::time::*;
use vm::*;
fn main() {
let args: Vec<String> = std::env::args().collect();
let engine: String;
if args.len() != 3 {
println!("use -engine 'vm' or 'eval'");
return;
}
match &args[1][..] {
"-engine" => match &args[2][..] {
"vm" | "eval" => engine = String::from(&args[2]),
_ => {
println!("use 'vm or 'eval'");
return;
}
},
_ => {
println!("use 'vm or 'eval'");
return;
}
}
let input = "
let fibonacci = fn(x) {
if (x == 0) {
0
} else {
if (x == 1) {
return 1;
} else {
fibonacci(x - 1) + fibonacci(x - 2);
}
}
};
fibonacci(35);
";
let result: Object;
let l = Lexer::new(input);
let mut p = Parser::new(l);
match p.parse_program() {
Ok(program) => {
let duration: u128;
match &engine[..] {
"vm" => {
let mut comp = Compiler::new();
match comp.compile(Node::Program(program)) {
Err(err) => {
println!("compiler error: {}", err);
return;
}
_ => {}
}
let mut machine = Vm::new(comp.bytecode());
let now = SystemTime::now();
match machine.run() {
Err(err) => {
println!("vm error: {}", err);
return;
}
_ => {}
}
duration = now.elapsed().unwrap().as_millis();
result = machine.last_popped_stack_elem.unwrap();
}
"eval" => {
let env = Rc::new(RefCell::new(new_environment()));
let now = SystemTime::now();
result = evaluate(Node::Program(program), Rc::clone(&env));
duration = now.elapsed().unwrap().as_millis();
}
_ => {
println!("use 'vm or 'eval'");
return;
}
}
println!(
"engine={}, result={}, duration={:?}",
engine,
result.inspect(),
duration
);
}
Err(errors) => panic!("{:?}", errors),
}
}
|
mod compression;
mod data;
mod entry;
mod index;
mod io_utils;
mod series;
mod commit_log;
pub mod file_system;
pub mod series_table;
pub mod error;
pub mod env;
pub use compression::Compression;
pub use entry::Entry;
pub use series::{SeriesReader, SeriesIterator, SeriesWriter};
pub use series_table::SeriesTable; |
pub mod simple_debug;
|
use id::ID;
use std::result::Result as StdResult;
/// Defines Quantized Density Fields errors.
#[derive(Debug)]
pub enum QDFError {
/// Tells that specified space does not exists in container.
SpaceDoesNotExists(ID),
/// Tells that specified level does not exists in container.
LevelDoesNotExists(ID),
/// Tells that specified field does not exists in container.
FieldDoesNotExists(ID),
}
/// Alias for standard result with `QDFError` error type.
pub type Result<T> = StdResult<T, QDFError>;
|
use board::board::Board;
use bitop::b36::B36;
use std::env;
fn execute(bp : u64, wp : u64, turn : i32, alpha : i32, beta : i32) {
let mut board : Board<B36> = Board::<B36>::new();
let result : i32 = board.get_best_result_with_ab(bp, wp, turn, alpha, beta);
println!("Result = {}", result);
println!("Initial = {}", board.get_initial());
println!("Final = {}", board.get_final());
println!("Moves = {}", board.get_move_list_string());
println!("Elapsed = {}", board.get_elapsed());
}
//12駒から
fn main12() {
execute(1753344, 81854976, 0, -6, -2);
}
//14駒から
fn main14() {
execute(551158016, 69329408, 0, -6, -2);
}
//16駒から
fn main16() {
execute(550219776, 70271748, 0, -6, -2);
}
fn main() -> Result<(), ()> {
let args: Vec<String> = env::args().collect();
let mut sel : i32 = 12;
if args.len() > 1 {
sel = args[1].parse().unwrap();
}
println!("Selected = {}", sel);
match sel {
14 => main14(),
16 => main16(),
_ => main12()
}
Ok(())
}
|
//! Manages the saving and loading of settings, as well as providing menu data and a thread-safe API.
use std::sync::{
atomic::{AtomicBool, Ordering},
Arc,
};
use once_cell::sync::OnceCell;
use crate::{
menu::{self, RowData, RowDetail},
resources,
};
static SETTINGS: OnceCell<Settings> = OnceCell::new();
#[derive(serde::Serialize, serde::Deserialize)]
#[serde(default)]
struct StoredSettings {
sixty_fps: bool,
show_fps: bool,
save_cheats: bool,
no_ceiling: bool,
interrupt_loops: bool,
}
impl StoredSettings {
fn into_settings(self) -> Settings {
Settings {
sixty_fps: Arc::new(AtomicBool::new(self.sixty_fps)),
show_fps: Arc::new(AtomicBool::new(self.show_fps)),
save_cheats: Arc::new(AtomicBool::new(self.save_cheats)),
no_ceiling: Arc::new(AtomicBool::new(self.no_ceiling)),
interrupt_loops: Arc::new(AtomicBool::new(self.interrupt_loops)),
dirty: AtomicBool::new(true),
}
}
fn from_settings(settings: &Settings) -> StoredSettings {
StoredSettings {
sixty_fps: settings.sixty_fps.load(Ordering::SeqCst),
show_fps: settings.show_fps.load(Ordering::SeqCst),
save_cheats: settings.save_cheats.load(Ordering::SeqCst),
no_ceiling: settings.no_ceiling.load(Ordering::SeqCst),
interrupt_loops: settings.interrupt_loops.load(Ordering::SeqCst),
}
}
}
impl Default for StoredSettings {
fn default() -> Self {
StoredSettings {
sixty_fps: true,
show_fps: false,
save_cheats: false,
no_ceiling: true,
interrupt_loops: true,
}
}
}
pub struct Settings {
pub sixty_fps: Arc<AtomicBool>,
pub show_fps: Arc<AtomicBool>,
pub save_cheats: Arc<AtomicBool>,
pub no_ceiling: Arc<AtomicBool>,
pub interrupt_loops: Arc<AtomicBool>,
dirty: AtomicBool,
}
impl Settings {
fn load_path(path: std::path::PathBuf) -> eyre::Result<Settings> {
let stored: StoredSettings = serde_json::from_reader(std::fs::File::open(path)?)?;
Ok(stored.into_settings())
}
fn load_shared() {
let path = resources::get_documents_path("cleo_settings.json");
let settings = Self::load_path(path).unwrap_or_else(|err| {
log::error!("Failed to load settings from JSON: {:?}", err);
log::info!("Using default values instead.");
StoredSettings::default().into_settings()
});
if SETTINGS.set(settings).is_err() {
log::warn!("Settings structure already exists.");
}
}
fn save(&self) -> eyre::Result<()> {
// Only save if the settings have changed.
if !self.dirty.load(Ordering::SeqCst) {
log::info!("Settings have not changed since last save.");
return Ok(());
}
self.dirty.store(false, Ordering::SeqCst);
// fixme: Settings::save should be non-blocking.
Ok(serde_json::to_writer_pretty(
std::fs::File::create(resources::get_documents_path("cleo_settings.json"))?,
&StoredSettings::from_settings(self),
)?)
}
fn set_dirty(&self) {
self.dirty.store(true, Ordering::SeqCst);
}
pub fn shared() -> &'static Settings {
SETTINGS.get().unwrap()
}
}
#[derive(Debug)]
struct OptionInfo {
title: &'static str,
desc: &'static str,
value: Arc<AtomicBool>,
}
impl OptionInfo {
fn new(title: &'static str, desc: &'static str, value: Arc<AtomicBool>) -> OptionInfo {
OptionInfo { title, desc, value }
}
}
impl crate::menu::RowData for OptionInfo {
fn title(&self) -> String {
self.title.into()
}
fn detail(&self) -> crate::menu::RowDetail {
RowDetail::Info(self.desc.into())
}
fn value(&self) -> &str {
if self.value.load(Ordering::SeqCst) {
"On"
} else {
"Off"
}
}
fn tint(&self) -> Option<(u8, u8, u8)> {
if self.value.load(Ordering::SeqCst) {
Some(crate::gui::colours::GREEN)
} else {
None
}
}
fn handle_tap(&mut self) -> bool {
self.value
.store(!self.value.load(Ordering::SeqCst), Ordering::SeqCst);
Settings::shared().set_dirty();
true
}
}
impl Drop for OptionInfo {
fn drop(&mut self) {
if let Err(err) = Settings::shared().save() {
log::info!("Error saving settings in OptionInfo::drop: {}", err);
}
}
}
pub fn tab_data() -> menu::TabData {
let settings = Settings::shared();
let option_info = vec![
OptionInfo::new(
"60 FPS",
"Increase the framerate limit from 30 FPS to 60. Default is On.",
settings.sixty_fps.clone(),
),
OptionInfo::new(
"Save Cheat States",
"Preserve the states of toggleable cheats between game loads/launches. Default is Off.",
settings.save_cheats.clone(),
),
// OptionInfo::new(
// "Remove Height Limit",
// "Remove the limit on how high you can fly. Default is On.",
// settings.no_ceiling.clone(),
// ),
OptionInfo::new(
"Show FPS",
"Display the current framerate at the top of the screen. Default is Off.",
settings.show_fps.clone(),
),
OptionInfo::new(
"Interrupt Script Loops",
"Reduce lag by detecting and interrupting long loops in scripts. Default is On.",
settings.interrupt_loops.clone(),
),
];
menu::TabData {
name: "Options".to_string(),
warning: None,
row_data: option_info
.into_iter()
.map(|info| Box::new(info) as Box<dyn RowData>)
.collect(),
}
}
fn load_settings(menu_manager: u64) {
log::info!("Loading CLEO settings");
Settings::load_shared();
// Save the current state of the settings so we create a settings file if it didn't exist.
if let Err(err) = Settings::shared().save() {
log::error!("Unable to save settings after load: {:?}", err);
}
log::info!("Loading game settings");
crate::call_original!(crate::targets::load_settings, menu_manager);
}
pub fn init() {
crate::targets::load_settings::install(load_settings);
}
|
use anyhow::Error;
use futures::{Stream, TryStreamExt};
use log::info;
use postgres_query::{query, Error as PqError, FromSqlRow};
use smallvec::{smallvec, SmallVec};
use stack_string::StackString;
use url::Url;
use uuid::Uuid;
use gdrive_lib::{date_time_wrapper::DateTimeWrapper, directory_info::DirectoryInfo};
use crate::pgpool::PgPool;
#[derive(FromSqlRow, Clone, Debug)]
pub struct FileInfoCache {
pub id: Uuid,
pub filename: StackString,
pub filepath: StackString,
pub urlname: StackString,
pub md5sum: Option<StackString>,
pub sha1sum: Option<StackString>,
pub filestat_st_mtime: i32,
pub filestat_st_size: i32,
pub serviceid: StackString,
pub servicetype: StackString,
pub servicesession: StackString,
pub created_at: DateTimeWrapper,
pub deleted_at: Option<DateTimeWrapper>,
pub modified_at: DateTimeWrapper,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct FileInfoKey {
pub filename: StackString,
pub filepath: StackString,
pub urlname: StackString,
pub serviceid: StackString,
pub servicesession: StackString,
}
impl FileInfoKey {
/// # Errors
/// Return error if db query fails
pub async fn delete_cache_entry(&self, pool: &PgPool) -> Result<(), Error> {
info!("delete_cache_entry");
let query = query!(
r#"
UPDATE file_info_cache SET deleted_at=now(),modified_at=now()
WHERE filename=$filename
AND filepath=$filepath
AND serviceid=$serviceid
AND servicesession=$servicesession
AND urlname=$urlname
"#,
filename = self.filename,
filepath = self.filepath,
serviceid = self.serviceid,
servicesession = self.servicesession,
urlname = self.urlname,
);
let conn = pool.get().await?;
query.execute(&conn).await?;
Ok(())
}
}
impl FileInfoCache {
/// # Errors
/// Return error if db query fails
pub async fn get_all_cached(
servicesession: &str,
servicetype: &str,
pool: &PgPool,
get_deleted: bool,
) -> Result<impl Stream<Item = Result<Self, PqError>>, Error> {
if get_deleted {
let query = query!(
r#"
SELECT * FROM file_info_cache
WHERE servicesession=$servicesession
AND servicetype=$servicetype
AND deleted_at IS NOT NULL
"#,
servicesession = servicesession,
servicetype = servicetype,
);
let conn = pool.get().await?;
query.fetch_streaming(&conn).await.map_err(Into::into)
} else {
let query = query!(
r#"
SELECT * FROM file_info_cache
WHERE servicesession=$servicesession
AND servicetype=$servicetype
AND deleted_at IS NULL
"#,
servicesession = servicesession,
servicetype = servicetype,
);
let conn = pool.get().await?;
query.fetch_streaming(&conn).await.map_err(Into::into)
}
}
/// # Errors
/// Return error if db query fails
pub async fn get_by_urlname(url: &Url, pool: &PgPool) -> Result<Option<Self>, Error> {
let urlname = url.as_str();
let query = query!(
r#"
SELECT * FROM file_info_cache
WHERE urlname=$urlname
AND deleted_at IS NULL
ORDER BY created_at DESC
LIMIT 1
"#,
urlname = urlname,
);
let conn = pool.get().await?;
query.fetch_opt(&conn).await.map_err(Into::into)
}
#[must_use]
pub fn get_key(&self) -> Option<FileInfoKey> {
let filename = self.filename.clone();
let filepath = self.filepath.clone();
let urlname = self.urlname.clone();
let serviceid = self.serviceid.clone();
let servicesession = self.servicesession.clone();
let finfo = FileInfoKey {
filename,
filepath,
urlname,
serviceid,
servicesession,
};
Some(finfo)
}
/// # Errors
/// Return error if db query fails
pub async fn get_cache(&self, pool: &PgPool) -> Result<Option<Self>, Error> {
let query = query!(
r#"
SELECT *
FROM file_info_cache
WHERE filename = $filename
AND filepath = $filepath
AND urlname = $urlname
AND serviceid = $serviceid
AND servicetype = $servicetype
AND servicesession = $servicesession
"#,
filename = self.filename,
filepath = self.filepath,
urlname = self.urlname,
serviceid = self.serviceid,
servicetype = self.servicetype,
servicesession = self.servicesession,
);
let conn = pool.get().await?;
query.fetch_opt(&conn).await.map_err(Into::into)
}
/// # Errors
/// Return error if db query fails
pub async fn insert(&self, pool: &PgPool) -> Result<(), Error> {
info!("FileInfoCache.insert");
let query = query!(
r#"
INSERT INTO file_info_cache (
filename, filepath, urlname, md5sum, sha1sum, filestat_st_mtime,
filestat_st_size, serviceid, servicetype, servicesession, created_at,
deleted_at, modified_at
) VALUES (
$filename, $filepath, $urlname, $md5sum, $sha1sum, $filestat_st_mtime,
$filestat_st_size, $serviceid, $servicetype, $servicesession, now(),
null, now()
) ON CONFLICT (
filename,filepath,urlname,serviceid,servicetype,servicesession
) DO UPDATE SET
md5sum=EXCLUDED.md5sum,
sha1sum=EXCLUDED.sha1sum,
filestat_st_mtime=EXCLUDED.filestat_st_mtime,
filestat_st_size=EXCLUDED.filestat_st_size,
deleted_at=null,
modified_at=now()
"#,
filename = self.filename,
filepath = self.filepath,
urlname = self.urlname,
md5sum = self.md5sum,
sha1sum = self.sha1sum,
filestat_st_mtime = self.filestat_st_mtime,
filestat_st_size = self.filestat_st_size,
serviceid = self.serviceid,
servicetype = self.servicetype,
servicesession = self.servicesession,
);
let conn = pool.get().await?;
query.execute(&conn).await?;
Ok(())
}
/// # Errors
/// Return error if db query fails
pub async fn delete_all(
servicesession: &str,
servicetype: &str,
pool: &PgPool,
) -> Result<usize, Error> {
let query = query!(
r#"
UPDATE file_info_cache SET deleted_at=now(),modified_at=now()
WHERE servicesession=$servicesession
AND servicetype=$servicetype
"#,
servicesession = servicesession,
servicetype = servicetype,
);
let conn = pool.get().await?;
let n = query.execute(&conn).await?;
Ok(n as usize)
}
/// # Errors
/// Return error if db query fails
pub async fn delete_by_id(
gdriveid: &str,
servicesession: &str,
servicetype: &str,
pool: &PgPool,
) -> Result<usize, Error> {
let query = query!(
r#"
UPDATE file_info_cache SET deleted_at=now(),modified_at=now()
WHERE servicesession=$servicesession
AND servicetype=$servicetype
AND serviceid=$gdriveid
"#,
servicesession = servicesession,
servicetype = servicetype,
gdriveid = gdriveid,
);
let conn = pool.get().await?;
let n = query.execute(&conn).await?;
Ok(n as usize)
}
/// # Errors
/// Return error if db query fails
pub async fn clear_all(
servicesession: &str,
servicetype: &str,
pool: &PgPool,
) -> Result<usize, Error> {
let query = query!(
r#"
DELETE FROM file_info_cache
WHERE servicesession=$servicesession
AND servicetype=$servicetype
"#,
servicesession = servicesession,
servicetype = servicetype,
);
let conn = pool.get().await?;
let n = query.execute(&conn).await?;
Ok(n as usize)
}
}
#[derive(FromSqlRow, Clone)]
pub struct DirectoryInfoCache {
pub id: Uuid,
pub directory_id: StackString,
pub directory_name: StackString,
pub parent_id: Option<StackString>,
pub is_root: bool,
pub servicetype: StackString,
pub servicesession: StackString,
}
impl DirectoryInfoCache {
#[must_use]
pub fn into_directory_info(self) -> DirectoryInfo {
DirectoryInfo {
directory_id: self.directory_id,
directory_name: self.directory_name,
parentid: self.parent_id.map(Into::into),
}
}
/// # Errors
/// Return error if db query fails
pub async fn get_all(
servicesession: &str,
servicetype: &str,
pool: &PgPool,
) -> Result<impl Stream<Item = Result<Self, PqError>>, Error> {
let query = query!(
r#"
SELECT * FROM directory_info_cache
WHERE servicesession=$servicesession
AND servicetype=$servicetype
"#,
servicesession = servicesession,
servicetype = servicetype,
);
let conn = pool.get().await?;
query.fetch_streaming(&conn).await.map_err(Into::into)
}
/// # Errors
/// Return error if db query fails
pub async fn insert(&self, pool: &PgPool) -> Result<(), Error> {
let query = query!(
r#"
INSERT INTO directory_info_cache (
directory_id,directory_name,parent_id,is_root,servicetype,servicesession
) VALUES (
$directory_id,$directory_name,$parent_id,$is_root,$servicetype,$servicesession
)
"#,
directory_id = self.directory_id,
directory_name = self.directory_name,
parent_id = self.parent_id,
is_root = self.is_root,
servicetype = self.servicetype,
servicesession = self.servicesession,
);
let conn = pool.get().await?;
query.execute(&conn).await?;
Ok(())
}
/// # Errors
/// Return error if db query fails
pub async fn delete_all(
servicesession: &str,
servicetype: &str,
pool: &PgPool,
) -> Result<usize, Error> {
let query = query!(
r#"
DELETE FROM directory_info_cache
WHERE servicesession=$servicesession
AND servicetype=$servicetype
"#,
servicesession = servicesession,
servicetype = servicetype,
);
let conn = pool.get().await?;
let n = query.execute(&conn).await?;
Ok(n as usize)
}
/// # Errors
/// Return error if db query fails
pub async fn delete_by_id(
gdriveid: &str,
servicesession: &str,
servicetype: &str,
pool: &PgPool,
) -> Result<usize, Error> {
let query = query!(
r#"
DELETE FROM directory_info_cache
WHERE servicesession=$servicesession
AND servicetype=$servicetype
AND serviceid=$gdriveid
"#,
servicesession = servicesession,
servicetype = servicetype,
gdriveid = gdriveid,
);
let conn = pool.get().await?;
let n = query.execute(&conn).await?;
Ok(n as usize)
}
}
#[derive(FromSqlRow, Clone, Debug, PartialEq, Eq)]
pub struct FileSyncCache {
pub id: Uuid,
pub src_url: StackString,
pub dst_url: StackString,
pub created_at: DateTimeWrapper,
}
impl FileSyncCache {
/// # Errors
/// Return error if db query fails
pub async fn get_cache_list(
pool: &PgPool,
) -> Result<impl Stream<Item = Result<Self, PqError>>, Error> {
let query = query!("SELECT * FROM file_sync_cache ORDER BY src_url");
let conn = pool.get().await?;
query.fetch_streaming(&conn).await.map_err(Into::into)
}
/// # Errors
/// Return error if db query fails
pub async fn get_by_id(pool: &PgPool, id: Uuid) -> Result<Option<Self>, Error> {
let query = query!("SELECT * FROM file_sync_cache WHERE id=$id", id = id);
let conn = pool.get().await?;
query.fetch_opt(&conn).await.map_err(Into::into)
}
/// # Errors
/// Return error if db query fails
pub async fn delete_by_id(pool: &PgPool, id: Uuid) -> Result<(), Error> {
let query = query!("DELETE FROM file_sync_cache WHERE id=$id", id = id);
let conn = pool.get().await?;
query.execute(&conn).await?;
Ok(())
}
/// # Errors
/// Return error if db query fails
pub async fn delete_cache_entry(&self, pool: &PgPool) -> Result<(), Error> {
Self::delete_by_id(pool, self.id).await
}
/// # Errors
/// Return error if db query fails
pub async fn cache_sync_sync(&self, pool: &PgPool) -> Result<(), Error> {
let query = query!(
r#"
INSERT INTO file_sync_cache (src_url, dst_url, created_at)
VALUES ($src_url, $dst_url, now())
"#,
src_url = self.src_url,
dst_url = self.dst_url,
);
let conn = pool.get().await?;
query.execute(&conn).await?;
Ok(())
}
/// # Errors
/// Return error if db query fails
pub async fn cache_sync(pool: &PgPool, src_url: &str, dst_url: &str) -> Result<(), Error> {
let src_url: Url = src_url.parse()?;
let dst_url: Url = dst_url.parse()?;
let value = Self {
id: Uuid::new_v4(),
src_url: src_url.as_str().into(),
dst_url: dst_url.as_str().into(),
created_at: DateTimeWrapper::now(),
};
value.cache_sync_sync(pool).await?;
Ok(())
}
}
#[derive(FromSqlRow, Clone, PartialEq, Eq)]
pub struct FileSyncConfig {
pub id: Uuid,
pub src_url: StackString,
pub dst_url: StackString,
pub last_run: DateTimeWrapper,
pub name: Option<StackString>,
}
impl FileSyncConfig {
/// # Errors
/// Return error if db query fails
pub async fn get_config_list(
pool: &PgPool,
) -> Result<impl Stream<Item = Result<Self, PqError>>, Error> {
let query = query!("SELECT * FROM file_sync_config");
let conn = pool.get().await?;
query.fetch_streaming(&conn).await.map_err(Into::into)
}
/// # Errors
/// Return error if db query fails
pub async fn get_url_list(pool: &PgPool) -> Result<Vec<Url>, Error> {
let proc_list: Result<Vec<SmallVec<[_; 2]>>, Error> = Self::get_config_list(pool)
.await?
.map_err(Into::into)
.and_then(|v| async move {
let u0: Url = v.src_url.parse()?;
let u1: Url = v.dst_url.parse()?;
Ok(smallvec![u0, u1])
})
.try_collect()
.await;
Ok(proc_list?.into_iter().flatten().collect())
}
/// # Errors
/// Return error if db query fails
pub async fn get_by_name(pool: &PgPool, name: &str) -> Result<Option<Self>, Error> {
let query = query!(
"SELECT * FROM file_sync_config WHERE name = $name",
name = name
);
let conn = pool.get().await?;
query.fetch_opt(&conn).await.map_err(Into::into)
}
/// # Errors
/// Return error if db query fails
pub async fn insert_config(&self, pool: &PgPool) -> Result<(), Error> {
let query = query!(
r#"
INSERT INTO file_sync_config (src_url, dst_url, last_run, name)
VALUES ($src_url, $dst_url, now(), $name)
"#,
src_url = self.src_url,
dst_url = self.dst_url,
name = self.name,
);
let conn = pool.get().await?;
query.execute(&conn).await?;
Ok(())
}
}
#[derive(FromSqlRow, Clone, Debug)]
pub struct AuthorizedUsers {
pub email: StackString,
}
impl AuthorizedUsers {
/// # Errors
/// Return error if db query fails
pub async fn get_authorized_users(
pool: &PgPool,
) -> Result<impl Stream<Item = Result<Self, PqError>>, Error> {
let query = query!("SELECT * FROM authorized_users");
let conn = pool.get().await?;
query.fetch_streaming(&conn).await.map_err(Into::into)
}
}
#[derive(FromSqlRow, Clone, Debug)]
pub struct FileSyncBlacklist {
pub id: Uuid,
pub blacklist_url: StackString,
}
impl FileSyncBlacklist {
async fn get_blacklist(
pool: &PgPool,
) -> Result<impl Stream<Item = Result<Self, PqError>>, Error> {
let query = query!("SELECT * FROM file_sync_blacklist");
let conn = pool.get().await?;
query.fetch_streaming(&conn).await.map_err(Into::into)
}
}
#[derive(Default)]
pub struct BlackList {
blacklist: Vec<FileSyncBlacklist>,
}
impl BlackList {
/// # Errors
/// Return error if db query fails
pub async fn new(pool: &PgPool) -> Result<Self, Error> {
let blacklist: Vec<_> = FileSyncBlacklist::get_blacklist(pool)
.await?
.try_collect()
.await?;
Ok(Self { blacklist })
}
#[must_use]
pub fn is_in_blacklist(&self, url: &Url) -> bool {
self.blacklist
.iter()
.any(|item| url.as_str().contains(item.blacklist_url.as_str()))
}
#[must_use]
pub fn could_be_in_blacklist(&self, url: &Url) -> bool {
self.blacklist
.iter()
.any(|item| item.blacklist_url.contains(url.as_str()))
}
}
|
/*
Mine simulator .. the mine is represented by an array
... a really poor one :D
*/
mod modules;
use std::io;
fn main() {
let mut mine_size_str = String::new();
let mut mine_size: usize;
loop {
println!("Please enter the mine size. Choose a number between 1 and 20");
io::stdin()
.read_line(&mut mine_size_str)
.expect("Failed to read line");
mine_size = mine_size_str.trim().parse().expect ("Please type a number between 1 and 20!");
if (mine_size > 0) && (mine_size < 21) {
break;
}
}
// this init sucks
let mut mine: Vec<Option<modules::mine::MineSpot>> = Vec::new();
modules::mine::init_mine_with_gold(&mut mine, mine_size);
modules::mine::debug_print(&mine);
let mut dwarf = modules::dwarf::Dwarf::new();
for index in 0 .. mine.len() {
let mine_spot: Option<&modules::mine::MineSpot> = mine[index].as_ref();
dwarf.visit_mine_spot(mine_spot)
}
dwarf.debug_print();
}
#[cfg(test)]
mod tests {
// this runs only if `cargo test is called`
// make the functions outside visible
use super::*;
#[test]
fn test_init_mine_with_gold() {
let mut found: bool = false;
for _index in 0..10 {
let spot: Option<modules::mine::MineSpot> = modules::mine::init_gold_and_stuff();
if spot.is_some() {
found = true
}
}
assert!(found);
}
} |
use std::time::Duration;
use bson::UuidRepresentation;
use pretty_assertions::assert_eq;
use serde::Deserialize;
use crate::{
bson::{Bson, Document},
client::options::{ClientOptions, ConnectionString, ServerAddress},
error::ErrorKind,
options::Compressor,
test::run_spec_test,
Client,
};
#[derive(Debug, Deserialize)]
struct TestFile {
pub tests: Vec<TestCase>,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct TestCase {
description: String,
uri: String,
valid: bool,
warning: Option<bool>,
hosts: Option<Vec<Document>>,
auth: Option<TestAuth>,
options: Option<Document>,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
struct TestAuth {
username: Option<String>,
password: Option<String>,
db: Option<String>,
}
impl TestAuth {
fn matches_client_options(&self, options: &ClientOptions) -> bool {
let credential = options.credential.as_ref();
self.username.as_ref() == credential.and_then(|cred| cred.username.as_ref())
&& self.password.as_ref() == credential.and_then(|cred| cred.password.as_ref())
&& self.db.as_ref() == options.default_database.as_ref()
}
}
async fn run_test(test_file: TestFile) {
for mut test_case in test_file.tests {
if
// TODO: RUST-229: Implement IPv6 Support
test_case.description.contains("ipv6")
|| test_case.description.contains("IP literal")
// TODO: RUST-226: Investigate whether tlsCertificateKeyFilePassword is supported in rustls
|| test_case
.description
.contains("tlsCertificateKeyFilePassword")
// Not Implementing
|| test_case.description.contains("tlsAllowInvalidHostnames")
|| test_case.description.contains("single-threaded")
|| test_case.description.contains("serverSelectionTryOnce")
|| test_case.description.contains("relative path")
// Compression is implemented but will only pass the tests if all
// the appropriate feature flags are set. That is because
// valid compressors are only parsed correctly if the corresponding feature flag is set.
// (otherwise they are treated as invalid, and hence ignored)
|| (test_case.description.contains("compress") &&
!cfg!(
all(features = "zlib-compression",
features = "zstd-compression",
features = "snappy-compression"
)
)
)
// The Rust driver disallows `maxPoolSize=0`.
|| test_case.description.contains("maxPoolSize=0 does not error")
{
continue;
}
#[cfg(not(unix))]
if test_case.description.contains("Unix") {
continue;
}
let warning = test_case.warning.take().unwrap_or(false);
if test_case.valid && !warning {
let mut is_unsupported_host_type = false;
// hosts
if let Some(mut json_hosts) = test_case.hosts.take() {
// skip over unsupported host types
#[cfg(not(unix))]
{
is_unsupported_host_type = json_hosts.iter_mut().any(|h_json| {
matches!(
h_json.remove("type").as_ref().and_then(Bson::as_str),
Some("ip_literal") | Some("unix")
)
});
}
#[cfg(unix)]
{
is_unsupported_host_type = json_hosts.iter_mut().any(|h_json| {
matches!(
h_json.remove("type").as_ref().and_then(Bson::as_str),
Some("ip_literal")
)
});
}
if !is_unsupported_host_type {
let options = ClientOptions::parse(&test_case.uri).await.unwrap();
let hosts: Vec<_> = options
.hosts
.into_iter()
.map(ServerAddress::into_document)
.collect();
assert_eq!(hosts, json_hosts);
}
}
if !is_unsupported_host_type {
// options
let options = ClientOptions::parse(&test_case.uri)
.await
.expect(&test_case.description);
let mut options_doc = bson::to_document(&options).unwrap_or_else(|_| {
panic!(
"{}: Failed to serialize ClientOptions",
&test_case.description
)
});
if let Some(json_options) = test_case.options {
let mut json_options: Document = json_options
.into_iter()
.filter_map(|(k, v)| {
if let Bson::Null = v {
None
} else {
Some((k.to_lowercase(), v))
}
})
.collect();
// tlsallowinvalidcertificates and tlsinsecure must be inverse of each other
if !json_options.contains_key("tlsallowinvalidcertificates") {
if let Some(val) = json_options.remove("tlsinsecure") {
json_options
.insert("tlsallowinvalidcertificates", !val.as_bool().unwrap());
}
}
// The default types parsed from the test file don't match those serialized
// from the `ClientOptions` struct.
if let Ok(min) = json_options.get_i32("minpoolsize") {
json_options.insert("minpoolsize", Bson::Int64(min.into()));
}
if let Ok(max) = json_options.get_i32("maxpoolsize") {
json_options.insert("maxpoolsize", Bson::Int64(max.into()));
}
if let Ok(max_connecting) = json_options.get_i32("maxconnecting") {
json_options.insert("maxconnecting", Bson::Int64(max_connecting.into()));
}
options_doc = options_doc
.into_iter()
.filter(|(ref key, _)| json_options.contains_key(key))
.collect();
// This is required because compressor is not serialize, but the spec tests
// still expect to see serialized compressors.
// This hardcodes the compressors into the options.
if let Some(compressors) = options.compressors {
options_doc.insert(
"compressors",
compressors
.iter()
.map(Compressor::name)
.collect::<Vec<&str>>(),
);
#[cfg(feature = "zlib-compression")]
for compressor in compressors {
if let Compressor::Zlib { level: Some(level) } = compressor {
options_doc.insert("zlibcompressionlevel", level);
}
}
}
assert_eq!(options_doc, json_options, "{}", test_case.description)
}
if let Some(test_auth) = test_case.auth {
let options = ClientOptions::parse(&test_case.uri).await.unwrap();
assert!(test_auth.matches_client_options(&options));
}
}
} else {
let expected_type = if warning { "warning" } else { "error" };
match ClientOptions::parse(&test_case.uri)
.await
.map_err(|e| *e.kind)
{
Ok(_) => panic!("expected {}", expected_type),
Err(ErrorKind::InvalidArgument { .. }) => {}
Err(e) => panic!("expected InvalidArgument, but got {:?}", e),
}
}
}
}
#[cfg_attr(feature = "tokio-runtime", tokio::test)]
#[cfg_attr(feature = "async-std-runtime", async_std::test)]
async fn run_uri_options_spec_tests() {
run_spec_test(&["uri-options"], run_test).await;
}
#[cfg_attr(feature = "tokio-runtime", tokio::test)]
#[cfg_attr(feature = "async-std-runtime", async_std::test)]
async fn run_connection_string_spec_tests() {
run_spec_test(&["connection-string"], run_test).await;
}
async fn parse_uri(option: &str, suggestion: Option<&str>) {
match ConnectionString::parse(format!("mongodb://host:27017/?{}=test", option))
.map_err(|e| *e.kind)
{
Ok(_) => panic!("expected error for option {}", option),
Err(ErrorKind::InvalidArgument { message, .. }) => {
match suggestion {
Some(s) => assert!(message.contains(s)),
None => assert!(!message.contains("similar")),
};
}
Err(e) => panic!("expected InvalidArgument, but got {:?}", e),
}
}
#[cfg_attr(feature = "tokio-runtime", tokio::test)]
#[cfg_attr(feature = "async-std-runtime", async_std::test)]
async fn uuid_representations() {
let mut uuid_repr = parse_uri_with_uuid_representation("csharpLegacy")
.await
.expect("expected `csharpLegacy` to be a valid argument for `uuidRepresentation`");
assert_eq!(UuidRepresentation::CSharpLegacy, uuid_repr);
uuid_repr = parse_uri_with_uuid_representation("javaLegacy")
.await
.expect("expected `javaLegacy` to be a valid argument for `uuidRepresentation`");
assert_eq!(UuidRepresentation::JavaLegacy, uuid_repr);
uuid_repr = parse_uri_with_uuid_representation("pythonLegacy")
.await
.expect("expected `pythonLegacy` to be a valid argument for `uuidRepresentation`");
assert_eq!(UuidRepresentation::PythonLegacy, uuid_repr);
let uuid_err = parse_uri_with_uuid_representation("unknownLegacy")
.await
.expect_err("expect `unknownLegacy` to be an invalid argument for `uuidRepresentation`");
assert_eq!(
"connection string `uuidRepresentation` option can be one of `csharpLegacy`, \
`javaLegacy`, or `pythonLegacy`. Received invalid `unknownLegacy`"
.to_string(),
uuid_err
);
}
async fn parse_uri_with_uuid_representation(uuid_repr: &str) -> Result<UuidRepresentation, String> {
match ConnectionString::parse(format!(
"mongodb://localhost:27017/?uuidRepresentation={}",
uuid_repr
))
.map_err(|e| e.message().unwrap())
{
Ok(cs) => Ok(cs.uuid_representation.unwrap()),
Err(e) => Err(e),
}
}
#[cfg_attr(feature = "tokio-runtime", tokio::test)]
#[cfg_attr(feature = "async-std-runtime", async_std::test)]
async fn parse_unknown_options() {
parse_uri("invalidoption", None).await;
parse_uri("x", None).await;
parse_uri("max", None).await;
parse_uri("tlstimeout", None).await;
parse_uri("waitqueuetimeout", Some("waitqueuetimeoutms")).await;
parse_uri("retry_reads", Some("retryreads")).await;
parse_uri("poolsize", Some("maxpoolsize")).await;
parse_uri(
"tlspermitinvalidcertificates",
Some("tlsallowinvalidcertificates"),
)
.await;
parse_uri("maxstalenessms", Some("maxstalenessseconds")).await;
}
#[cfg_attr(feature = "tokio-runtime", tokio::test)]
#[cfg_attr(feature = "async-std-runtime", async_std::test)]
async fn parse_with_no_default_database() {
let uri = "mongodb://localhost/";
assert_eq!(
ClientOptions::parse(uri).await.unwrap(),
ClientOptions {
hosts: vec![ServerAddress::Tcp {
host: "localhost".to_string(),
port: None
}],
original_uri: Some(uri.into()),
default_database: None,
..Default::default()
}
);
}
#[cfg_attr(feature = "tokio-runtime", tokio::test)]
#[cfg_attr(feature = "async-std-runtime", async_std::test)]
async fn options_debug_omits_uri() {
let uri = "mongodb://username:password@localhost/";
let options = ClientOptions::parse(uri).await.unwrap();
let debug_output = format!("{:?}", options);
assert!(!debug_output.contains("username"));
assert!(!debug_output.contains("password"));
assert!(!debug_output.contains("uri"));
}
#[cfg_attr(feature = "tokio-runtime", tokio::test)]
#[cfg_attr(feature = "async-std-runtime", async_std::test)]
async fn options_enforce_min_heartbeat_frequency() {
let options = ClientOptions::builder()
.hosts(vec![ServerAddress::parse("a:123").unwrap()])
.heartbeat_freq(Duration::from_millis(10))
.build();
Client::with_options(options).unwrap_err();
}
|
use bevy::{asset::AssetServerSettings, prelude::*};
use bevy_prefab::prelude::*;
fn main() {
let asset_folder = std::env::current_dir()
.unwrap()
.as_path()
.to_string_lossy()
.to_string()
+ "/assets";
App::build()
.insert_resource(AssetServerSettings { asset_folder })
.add_plugins(DefaultPlugins)
.add_plugin(
PrefabPlugin::default()
// optional pre built-prefabs
.with_primitives_prefabs()
.with_objects_prefabs(),
)
.add_startup_system(setup.system())
.run();
}
fn setup(mut commands: Commands, asset_server: Res<AssetServer>) {
commands.spawn_bundle(PointLightBundle {
transform: Transform::from_xyz(4.0, 8.0, 4.0),
..Default::default()
});
commands.spawn_bundle(PerspectiveCameraBundle {
transform: Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y),
..Default::default()
});
commands.spawn_prefab(asset_server.load("primitives.prefab"));
}
|
use std::{
fs::File,
io::{Error, ErrorKind, Read},
};
pub fn open_file(path: &str) {
let f = File::open(path).unwrap_or_else(|error| {
if error.kind() == ErrorKind::NotFound {
File::create(path).unwrap_or_else(|error| {
panic!("{:?}", &error);
})
} else {
panic!("{:?}", error);
}
});
println!("{:?}", f);
}
pub fn read_file(path: &str) -> Result<String, Error> {
let mut f = File::open(path)?;
let mut s = String::new();
f.read_to_string(&mut s)?;
Ok(s)
}
|
pub fn find_kth_largest(nums: Vec<i32>, k: i32) -> i32 {
use std::collections::BinaryHeap;
let mut heap = BinaryHeap::new();
for num in nums {
heap.push(-num);
if heap.len() > k as usize {
heap.pop();
}
}
-*heap.peek().unwrap()
} |
//! PyGamer pins
use super::{hal, target_device};
use crate::hal::gpio::{self, *};
use hal::define_pins;
define_pins!(
/// Maps the pins to their arduino names and
/// the numbers printed on the board.
struct Pins,
target_device: target_device,
/// Analog pin 0. Can act as a true analog output
/// as it has a DAC (which is not currently supported
/// by this hal) as well as input.
pin speaker = a2,
/// enable speaker amplifier
pin speaker_enable = a27,
/// Analog pin 1
pin a1 = a5,
/// Analog pin 2
pin a2 = b8,
/// Analog pin 3
pin a3 = b9,
/// Analog pin 4
pin a4 = a4,
/// Analog pin 5
pin a5 = a6,
/// Analog pin 6
pin a6 = b1,
/// Light sensor (also Analog pin 7)
pin light = b4,
/// Digital pin 2 (also Analog pin 8)
pin d2 = b3,
/// Digital pin 3 (also Analog pin 9)
pin d3 = b2,
/// Digital pin 5
pin d5 = a16,
/// Digital pin 6
pin d6 = a18,
/// Accelerometer interrupt pin (also d7)
pin accel_irq = b14,
/// Neopixel data line (controls all 5 neopixels, also d8)
pin neopixel = a15,
/// Digital pin 9
pin d9 = a19,
/// Digital pin 10
pin d10 = a20,
/// Digital pin 11
pin d11 = a21,
/// Digital pin 12
pin d12 = a22,
/// D13 LED/JACDAC
pin d13 = a23,
// TFT(Thin-film-transistor liquid-crystal display) control pins
/// TFT MOSI
pin tft_mosi = b15,
/// TFT SCK
pin tft_sck = b13,
/// TFT Reset
pin tft_reset = a0,
/// TFT DC
pin tft_dc = b5,
/// TFT CS
pin tft_cs = b12,
/// TFT Backlight (also Analog pin 7)
pin tft_backlight = a1,
// UART - Universal Asynchronous Receiver/Transmitter
/// Pin TX (d1)
pin tx = b16,
/// Pin RX (d0)
pin rx = b17,
// SPI - Serial Peripheral Interface (connected to sd card slot)
/// Pin MISO
pin miso = b22,
/// Pin MOSI
pin mosi = b23,
/// Pin SCK
pin sck = a17,
// I2C (connected to LIS3DH accelerometer)
/// STEMMA SDA
pin sda = a12,
/// STEMMA SCL
pin scl = a13,
// Miscellanea
/// SD card chip select (also d4)
pin sd_cs = a14,
/// Joystick X
pin joy_x = b6,
/// Joystick Y
pin joy_y = b7,
/// Button Latch
pin button_latch = b0,
/// Button Out
pin button_out = b30,
/// Button Clock
pin button_clock = b31,
);
|
mod signature;
extern crate num_bigint;
extern crate num_traits;
use signature::*;
use num_bigint::BigInt;
fn main() {
let h = BigInt::from(1234567890);
println!("{}", signature(&h));
}
|
/*
enum Option<T>{
Some(T), -> El valor
None -> La ausencia del algun valor
}
*/
/*
fn obtener_valor(bandera: bool) -> Option<String> {
if bandera {
Some(String::from("Soy un mensaje para la tupla some!"))
} else {
None
}
}
fn main() {
// Option -> Si existe o no algun valor.
// Result -> Errores -> panic
let resultado = obtener_valor(false); //Option
// match resultado {
// Some(valor) => println!("El valor es: {}", valor),
// None => println!("No existe valor alguno")
// }
//unwrap ->intenta obtener lo que la tupla Some almacena
//metodos -> unwrap, unwrap_or, expect
let valor = resultado.expect("Se esperaba un String"); //unwrap_or("La tupla no almacena valor alguno".to_string());
println!("El valor es: {}", valor);
}
*/
#[derive(Debug)]
struct User {
username: String,
password: String,
email: String,
edad: Option<u32>
}
fn main() {
let usuario1 = User{
username: String::from("Victor"),
password: String::from("password"),
email: String::from("victor@caodigofacilito.com"),
edad: None //Some(25)
};
println!("El usuario es: {:?}", usuario1);
//let edad = usuario1.edad.unwrap(); //match
match usuario1.edad {
Some(edad) => println!("Su edad es: {}", edad),
None => { },
}
}
|
use glium::{
Vertex,
VertexBuffer,
Display,
Program,
Surface,
Frame,
index::IndicesSource,
uniforms::Uniforms
};
use crate::Color;
pub enum BoundPolygonInterfaceAction <T> {
Move(T),
Set(T),
Get(*mut T),
Reset
}
pub trait BoundPolygonInterface <U> : Vertex {
type Move: Default;
type Uniform: Uniforms;
const MOVABLE: bool;
const COLORABLE: bool;
fn program(dpy: &Display) -> &'static Program;
fn act_pos(_: &mut U, value: BoundPolygonInterfaceAction <Self::Move>);
fn act_col(_: &mut U, color: BoundPolygonInterfaceAction <Color>);
fn uniforms(_: &U) -> Self::Uniform;
}
pub struct BoundPolygon <'a, V, U, const MV: bool, const CO: bool> where V: BoundPolygonInterface <U>, U: Default {
buf: VertexBuffer <V>,
indices: IndicesSource <'a>,
uniforms: U
}
impl <'a, V, U, const MV: bool, const CO: bool> BoundPolygon <'a, V, U, MV, CO> where V: BoundPolygonInterface <U>, U: Default {
pub(crate) fn new <I> (vxs: Vec <V>, dpy: &Display, idx: I) -> Self where I: Into <IndicesSource <'a>> {
Self {
buf: VertexBuffer::new(dpy, &vxs).unwrap(),
indices: idx.into(),
uniforms: U::default()
}
}
}
impl <'a, V, U, const CO: bool> BoundPolygon <'a, V, U, true, CO> where V: BoundPolygonInterface <U>, U: Default {
#[inline]
pub fn r#move(&mut self, value: V::Move) {
V::act_pos(&mut self.uniforms, BoundPolygonInterfaceAction::Move(value))
}
#[inline]
pub fn set_pos(&mut self, value: V::Move) {
V::act_pos(&mut self.uniforms, BoundPolygonInterfaceAction::Set(value))
}
#[inline]
//noinspection RsSelfConvention
pub fn get_pos(&mut self) -> V::Move {
let mut v: V::Move = Default::default();
V::act_pos(&mut self.uniforms, BoundPolygonInterfaceAction::Get(&mut v as *mut V::Move));
v
}
#[inline]
pub fn reset_pos(&mut self) {
V::act_pos(&mut self.uniforms, BoundPolygonInterfaceAction::Reset)
}
}
impl <'a, V, U, const MV: bool> BoundPolygon <'a, V, U, MV, true> where V: BoundPolygonInterface <U>, U: Default {
#[inline]
pub fn color(&mut self, color: Color, mix: f32) {
V::act_col(&mut self.uniforms, BoundPolygonInterfaceAction::Set(Color::rgba(color.r, color.g, color.b, Color::cv(mix))))
}
#[inline]
//noinspection RsSelfConvention
pub fn get_color(&mut self) -> Color {
let mut v: Color = Default::default();
V::act_col(&mut self.uniforms, BoundPolygonInterfaceAction::Get(&mut v as *mut Color));
v
}
#[inline]
pub fn reset_color(&mut self) {
V::act_col(&mut self.uniforms, BoundPolygonInterfaceAction::Reset)
}
}
impl <'a, V, U, const MV: bool, const CO: bool> crate::Drawable for BoundPolygon <'a, V, U, MV, CO> where V: BoundPolygonInterface <U>, U: Default {
fn draw(&self, dpy: &Display, target: &mut Frame) {
target.draw(&self.buf, self.indices.clone(), V::program(dpy), &V::uniforms(&self.uniforms), &Default::default()).unwrap()
}
}
|
use wgs84;
pub mod consts
{
pub const PIXELS_IN_TILE_ARRIS: u32 = 256;
//pub const critical_latitude: f64 = 85.051_128_78;
// Calculated with Vincenty method with WGS84, WebMercator parameters
//pub const critical_latitude_in_meters: f64 = 9_417_539.062_5;
}
pub fn meters_per_pixel( latitude: f64, level_of_detail: u8 )
-> f64
{
latitude.to_radians().cos() * 2.0 * std::f64::consts::PI * wgs84::consts::MAJOR_RADIUS_IN_METER / pixels_in_arris( level_of_detail ) as f64
}
pub fn pixels_in_arris( level_of_detail: u8 )
-> u32
{
consts::PIXELS_IN_TILE_ARRIS << level_of_detail
}
pub fn angle_to_pixel( longitude: f64, latitude: f64, level_of_detail: u8 )
-> ( u32, u32 )
{
let x = ( longitude + 180.0 ) / 360.0;
let sin_lat = latitude.to_radians().sin();
let y = 0.5 - ( ( 1.0 + sin_lat ) / ( 1.0 - sin_lat ) ).log( std::f64::consts::E ) / ( 4.0 * std::f64::consts::PI );
let s = pixels_in_arris( level_of_detail ) as f64;
( ( x * s + 0.5 ) as u32, ( y * s + 0.5 ) as u32 )
}
pub fn pixel_to_tile( x: u32, y: u32 )
-> ( u32, u32 )
{
( x / consts::PIXELS_IN_TILE_ARRIS, y / consts::PIXELS_IN_TILE_ARRIS )
}
pub fn angle_to_tile( longitude: f64, latitude: f64, level_of_detail: u8 )
-> ( u32, u32 )
{
let ( px, py ) = angle_to_pixel( longitude, latitude, level_of_detail );
pixel_to_tile( px, py )
} |
//! A `Constant` holds a single value.
//!
//! Currently, only constant values upto 64-bits are supported.
use std::fmt;
use il::*;
/// A constant value for Falcon IL
#[derive(Clone, Debug, Deserialize, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize)]
pub struct Constant {
value: u64,
bits: usize
}
impl Constant {
/// Create a new `Constant` with the given value and bitness.
pub fn new(value: u64, bits: usize) -> Constant {
Constant { value: value, bits: bits }
}
/// Get the value of this `Constant`.
pub fn value(&self) -> u64 {
if self.bits == 64 {
self.value
}
else {
self.value & ((1 << self.bits) - 1)
}
}
/// Get the number of bits for this `Constant`.
pub fn bits(&self) -> usize {
self.bits
}
}
impl fmt::Display for Constant {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "0x{:X}:{}", self.value(), self.bits)
}
}
impl Into<Expression> for Constant {
fn into(self) -> Expression {
Expression::constant(self)
}
} |
#![no_std]
#![feature(asm, lang_items, libc, no_std, start)]
extern crate libc;
const LEN: usize = 413;
static OUT: [u8; LEN] = *b"\
1\n2\nFizz\n4\nBuzz\nFizz\n7\n8\nFizz\nBuzz\n11\nFizz\n13\n14\nFizzBuzz\n\
16\n17\nFizz\n19\nBuzz\nFizz\n22\n23\nFizz\nBuzz\n26\nFizz\n28\n29\nFizzBuzz\n\
31\n32\nFizz\n34\nBuzz\nFizz\n37\n38\nFizz\nBuzz\n41\nFizz\n43\n44\nFizzBuzz\n\
46\n47\nFizz\n49\nBuzz\nFizz\n52\n53\nFizz\nBuzz\n56\nFizz\n58\n59\nFizzBuzz\n\
61\n62\nFizz\n64\nBuzz\nFizz\n67\n68\nFizz\nBuzz\n71\nFizz\n73\n74\nFizzBuzz\n\
76\n77\nFizz\n79\nBuzz\nFizz\n82\n83\nFizz\nBuzz\n86\nFizz\n88\n89\nFizzBuzz\n\
91\n92\nFizz\n94\nBuzz\nFizz\n97\n98\nFizz\nBuzz\n";
#[start]
fn start(_argc: isize, _argv: *const *const u8) -> isize {
unsafe {
asm!(
"
mov $$1, %rax
mov $$1, %rdi
mov $0, %rsi
mov $1, %rdx
syscall
"
:
: "r" (&OUT[0]) "r" (LEN)
: "rax", "rdi", "rsi", "rdx"
:
);
}
0
}
#[lang = "stack_exhausted"] extern fn stack_exhausted() {}
#[lang = "eh_personality"] extern fn eh_personality() {}
#[lang = "panic_fmt"] extern fn panic_fmt() {} |
#![cfg_attr(feature = "cargo-clippy", allow(clippy::boxed_local))]
use crate::{
error::*,
persistence::{Persistence, *},
plan_creator::{channel::*, plan::*, plan_steps::*, PlanStepTrait},
search::*,
util::{self, StringAdd},
};
use fnv::FnvHashMap;
use std::boxed::Box;
pub(crate) type FieldRequestCache = FnvHashMap<RequestSearchPart, (usize, PlanStepFieldSearchToTokenIds)>;
pub(crate) type PlanStepId = usize;
#[derive(Serialize, Deserialize, Default, Clone, Debug, PartialEq)]
pub struct PlanRequestSearchPart {
pub request: RequestSearchPart,
#[serde(default)]
pub get_scores: bool,
#[serde(default)]
pub get_ids: bool,
/// Internal data used for whyfound - read and highlight fields
#[serde(skip_deserializing)]
#[serde(default)]
pub store_term_id_hits: bool,
/// Internal data used for whyfound - highlight in original document
#[serde(skip_deserializing)]
#[serde(default)]
pub store_term_texts: bool,
//TODO MOVE TO RequestSearchPart?
/// Also return the actual text
#[serde(skip_serializing_if = "skip_false")]
pub return_term: bool,
//TODO MOVE TO RequestSearchPart?
#[serde(skip_serializing_if = "skip_false")]
pub return_term_lowercase: bool,
}
fn merge_explain(option: &mut Option<search_request::SearchRequestOptions>, explain: bool) {
if let Some(option) = option.as_mut() {
option.explain |= explain;
} else if explain {
*option = Some(SearchRequestOptions {
explain: true,
..Default::default()
});
}
}
/// To three parts are settings propagates currently, the search request, the phrase boosts, and the filter query
fn get_all_field_request_parts_and_propagate_settings<'a>(header_request: &'a Request, request: &'a mut Request, map: &mut Vec<&'a mut RequestSearchPart>) {
if let Some(phrase_boosts) = request.phrase_boosts.as_mut() {
for el in phrase_boosts.iter_mut() {
merge_explain(&mut el.search1.options, header_request.explain);
merge_explain(&mut el.search2.options, header_request.explain);
map.push(&mut el.search1);
map.push(&mut el.search2);
}
}
get_all_field_request_parts_and_propagate_settings_to_search_req(header_request, request.search_req.as_mut().unwrap(), map);
}
fn get_all_field_request_parts_and_propagate_settings_to_search_req<'a>(header_request: &'a Request, request: &'a mut SearchRequest, map: &mut Vec<&'a mut RequestSearchPart>) {
merge_explain(request.get_options_mut(), header_request.explain);
match request {
SearchRequest::And(SearchTree { queries, options: _ }) | SearchRequest::Or(SearchTree { queries, options: _ }) => {
for el in queries {
get_all_field_request_parts_and_propagate_settings_to_search_req(header_request, el, map);
}
}
SearchRequest::Search(search) => {
merge_explain(&mut search.options, header_request.explain);
map.push(search);
}
}
}
/// add first we collect all searches on the fields (virtually the leaf nodes in the execution plan) to avoid duplicate searches. This could also be done on a tree level.
///
/// The function also propagates settings before collecting requests, because this changes the equality. This should be probably done seperately.
///
fn collect_all_field_request_into_cache(header_request: &Request, request: &mut Request, plan: &mut Plan) -> FieldRequestCache {
let mut field_search_cache = FnvHashMap::default();
let mut field_requests = Vec::new();
get_all_field_request_parts_and_propagate_settings(header_request, request, &mut field_requests);
add_request_to_search_field_cache(field_requests, plan, &mut field_search_cache, false);
// collect filter requests seperately and set to fetch ids
// This way we can potentially reuse the same request to emit both, score and ids
if let Some(filter) = request.filter.as_mut() {
let mut field_requests = Vec::new();
get_all_field_request_parts_and_propagate_settings_to_search_req(header_request, filter, &mut field_requests);
add_request_to_search_field_cache(field_requests, plan, &mut field_search_cache, true);
};
field_search_cache
}
fn add_request_to_search_field_cache(field_requests: Vec<&mut RequestSearchPart>, plan: &mut Plan, field_search_cache: &mut FieldRequestCache, ids_only: bool) {
for request_part in field_requests {
// There could be the same query for filter and normal search, then we load scores and ids
if let Some((_, field_search)) = field_search_cache.get_mut(request_part) {
field_search.req.get_ids |= ids_only;
field_search.req.get_scores |= !ids_only;
continue; // else doesn't work because field_search borrow scope expands else
}
let plan_request_part = PlanRequestSearchPart {
request: request_part.clone(),
get_scores: !ids_only,
get_ids: ids_only,
..Default::default()
};
let field_search = PlanStepFieldSearchToTokenIds {
req: plan_request_part,
channel: PlanStepDataChannels::open_channel(0, vec![]),
};
let step_id = plan.add_step(Box::new(field_search.clone())); // this is actually only a placeholder in the plan, will be replaced with the data from the field_search_cache after plan creation
field_search_cache.insert(request_part.clone(), (step_id, field_search));
}
}
pub fn plan_creator(mut request: Request, plan: &mut Plan) {
let request_header = request.clone();
let mut field_search_cache = collect_all_field_request_into_cache(&request_header, &mut request, plan);
let filter_final_step_id: Option<PlanStepId> = if let Some(filter) = request.filter.as_mut() {
// get_all_field_request_parts_and_propagate_settings_to_search_req(header_request, filter, map);
// collect_all_field_request_into_cache(&request_header, filter, &mut field_search_cache, plan, true);
let final_output_filter = plan_creator_2(true, true, None, &request_header, &*filter, vec![], plan, None, None, &mut field_search_cache);
Some(final_output_filter)
} else {
None
};
let boost = request.boost.clone();
let mut final_step_id = {
plan_creator_2(
false,
false,
filter_final_step_id,
&request_header,
&request.search_req.unwrap(),
boost.unwrap_or_default(),
plan,
None,
filter_final_step_id,
&mut field_search_cache,
)
};
// Add intersect step the search result with the filter
if let Some(filter_final_step_id) = filter_final_step_id {
let final_step_channel = plan.get_step_channel(final_step_id).clone();
let filter_receiver = plan.get_step_channel(filter_final_step_id).receiver_for_next_step.clone();
let channel = PlanStepDataChannels::open_channel(1, vec![final_step_channel.receiver_for_next_step, filter_receiver]);
let step = IntersectScoresWithIds { channel };
// step.get_channel().input_prev_steps = vec![final_output.0, filter_data_output.0];
let id_step = plan.add_step(Box::new(step));
plan.add_dependency(id_step, filter_final_step_id);
plan.add_dependency(id_step, final_step_id);
final_step_id = id_step;
}
// Apply Boost from anchor
if let Some(boosts) = request.boost {
let anchor_boosts: Vec<&RequestBoostPart> = boosts.iter().filter(|el| !el.path.contains("[]")).collect();
for boost in anchor_boosts {
let final_step_channel = plan.get_step_channel(final_step_id).clone();
let channel = PlanStepDataChannels::open_channel(1, vec![final_step_channel.receiver_for_next_step.clone()]);
let step = BoostPlanStepFromBoostRequest {
req: boost.clone(),
channel: channel.clone(),
};
let id_step = plan.add_step(Box::new(step.clone()));
plan.add_dependency(id_step, final_step_id);
final_step_id = id_step;
}
}
if let Some(phrase_boosts) = request.phrase_boosts {
final_step_id = add_phrase_boost_plan_steps(phrase_boosts, &mut field_search_cache, final_step_id, plan);
}
//update the field search steps in the plan from the field_search_cache
for (_k, v) in field_search_cache.drain() {
plan.steps[v.0] = Box::new(v.1);
}
plan.plan_result = Some(plan.get_step_channel(final_step_id).receiver_for_next_step.clone());
// final_output.0
}
fn add_phrase_boost_plan_steps(
phrase_boosts: Vec<RequestPhraseBoost>,
field_search_cache: &mut FieldRequestCache,
// search_output: PlanStepReceiverAndId,
search_output_step: PlanStepId,
plan: &mut Plan,
) -> PlanStepId {
let mut phrase_outputs = vec![];
for boost in phrase_boosts {
let mut get_field_search = |req: &RequestSearchPart| -> (PlanDataReceiver, usize) {
// let field_search1 = field_search_cache
// .get_mut(req)
// .unwrap_or_else(|| panic!("PlanCreator: Could not find request in field_search_cache {:?}", req));
let val = field_search_cache.get_mut(req);
let field_search1 = {
if val.is_none() {
panic!(
"PlanCreator: Could not find phrase request in field_search_cache Req: {:#?}, \n Cache: {:#?}",
req,
field_search_cache.keys()
)
}
val.unwrap()
};
field_search1.1.req.get_ids = true;
field_search1.1.channel.num_receivers += 1;
let field_rx = field_search1.1.channel.receiver_for_next_step.clone();
(field_rx, field_search1.0)
};
let (field_rx1, plan_id1) = get_field_search(&boost.search1);
let (field_rx2, plan_id2) = get_field_search(&boost.search2);
let channel = PlanStepDataChannels::open_channel(1, vec![field_rx1, field_rx2]);
let step = PlanStepPhrasePairToAnchorId {
req: boost.clone(),
channel: channel.clone(),
};
phrase_outputs.push(channel.clone());
let id_step = plan.add_step(Box::new(step));
plan.add_dependency(id_step, plan_id1);
plan.add_dependency(id_step, plan_id2);
}
//first is search result channel, rest are boost results
let mut vecco = vec![plan.get_step_channel(search_output_step).receiver_for_next_step.clone()];
for channel in phrase_outputs {
vecco.push(channel.receiver_for_next_step);
}
//boost all results with phrase results
let channel = PlanStepDataChannels::open_channel(1, vecco);
let step = BoostAnchorFromPhraseResults { channel };
let id_step = plan.add_step(Box::new(step));
plan.add_dependency(id_step, search_output_step);
id_step
}
fn merge_vec(boost: &[RequestBoostPart], opt: &Option<&[RequestBoostPart]>) -> Vec<RequestBoostPart> {
let mut boost = boost.to_owned();
if let Some(boosto) = opt.as_ref() {
boost.extend_from_slice(boosto);
}
// boost.extend_from_slice(&opt.as_ref().unwrap_or_else(||vec![]));
boost
}
fn plan_creator_2(
is_filter: bool,
is_filter_channel: bool,
filter_channel_step: Option<usize>, // this channel is used to receive the result from the filter step
request_header: &Request,
request: &SearchRequest,
mut boost: Vec<RequestBoostPart>,
plan: &mut Plan,
parent_step_dependecy: Option<usize>,
depends_on_step: Option<usize>,
field_search_cache: &mut FieldRequestCache,
) -> PlanStepId {
// request.explain |= request_header.explain;
match request {
SearchRequest::Or(SearchTree { queries, options: _ }) => {
let mut channel = PlanStepDataChannels::default();
if let Some(step_id) = filter_channel_step {
plan.get_step_channel(step_id).filter_channel.as_mut().unwrap().num_receivers += 1;
channel.filter_receiver = Some(plan.get_step_channel(step_id).filter_channel.as_mut().unwrap().filter_receiver.clone());
}
if is_filter_channel {
channel.filter_channel = Some(FilterChannel::default());
}
let step = Union { ids_only: is_filter, channel };
let step_id = plan.add_step(Box::new(step));
let result_channels_from_prev_steps = queries
.iter()
.map(|x| {
// x.explain = request_header.explain;
let boost = merge_vec(&boost, &x.get_boost());
let step_id = plan_creator_2(
is_filter,
false,
filter_channel_step,
request_header,
x,
boost,
plan,
Some(step_id),
depends_on_step,
field_search_cache,
);
plan.get_step_channel(step_id).receiver_for_next_step.clone()
})
.collect();
plan.get_step_channel(step_id).input_prev_steps = result_channels_from_prev_steps;
if let Some(parent_step_dependecy) = parent_step_dependecy {
plan.add_dependency(parent_step_dependecy, step_id);
}
if let Some(depends_on_step) = depends_on_step {
plan.add_dependency(step_id, depends_on_step);
}
step_id
}
SearchRequest::And(SearchTree { queries, options: _ }) => {
let mut channel = PlanStepDataChannels::default();
if let Some(step_id) = filter_channel_step {
plan.get_step_channel(step_id).filter_channel.as_mut().unwrap().num_receivers += 1;
channel.filter_receiver = Some(plan.get_step_channel(step_id).filter_channel.as_mut().unwrap().filter_receiver.clone());
}
if is_filter_channel {
channel.filter_channel = Some(FilterChannel::default());
}
let step = Intersect { ids_only: is_filter, channel };
let step_id = plan.add_step(Box::new(step));
let result_channels_from_prev_steps = queries
.iter()
.map(|x| {
// x.explain = request_header.explain;
let boost = merge_vec(&boost, &x.get_boost());
let step_id = plan_creator_2(
is_filter,
false,
filter_channel_step,
request_header,
x,
boost,
plan,
Some(step_id),
depends_on_step,
field_search_cache,
);
plan.get_step_channel(step_id).receiver_for_next_step.clone()
})
.collect();
plan.get_step_channel(step_id).input_prev_steps = result_channels_from_prev_steps;
if let Some(parent_step_dependecy) = parent_step_dependecy {
plan.add_dependency(parent_step_dependecy, step_id);
}
if let Some(depends_on_step) = depends_on_step {
plan.add_dependency(step_id, depends_on_step);
}
step_id
}
SearchRequest::Search(part) => {
// TODO Tokenize query according to field
// part.terms = part.terms.iter().map(|el| util::normalize_text(el)).collect::<Vec<_>>();
plan_creator_search_part(
is_filter_channel,
filter_channel_step,
part,
request_header,
&mut boost,
plan,
parent_step_dependecy,
depends_on_step,
field_search_cache,
)
}
}
}
fn plan_creator_search_part(
is_filter_channel: bool,
filter_channel_step: Option<usize>,
request_part: &RequestSearchPart,
request: &Request,
boosts: &mut [RequestBoostPart],
plan: &mut Plan,
parent_step_dependecy: Option<usize>,
depends_on_step: Option<usize>,
field_search_cache: &mut FieldRequestCache,
) -> PlanStepId {
let paths = util::get_steps_to_anchor(&request_part.path);
let store_term_id_hits = request.why_found || request.text_locality;
let val = field_search_cache.get_mut(request_part);
let (field_search_step_id, field_search_step) = {
if val.is_none() {
panic!(
"PlanCreator: Could not find request in field_search_cache.\nReq: {:#?}, \nCache: {:#?}",
request_part,
field_search_cache.keys()
)
}
val.unwrap()
};
field_search_step.req.store_term_texts |= request.why_found;
field_search_step.req.store_term_id_hits |= store_term_id_hits;
field_search_step.channel.num_receivers += 1;
let field_rx = field_search_step.channel.receiver_for_next_step.clone();
//Check if is 1 to n field
if let Some(pos) = request_part.path.rfind("[]") {
let end_obj = &request_part.path[..pos];
//find where boost matches last path
let boosto: Vec<&RequestBoostPart> = boosts
.iter()
.flat_map(|el| {
if let Some(pos) = el.path.rfind("[]") {
if &el.path[..pos] == end_obj {
return Some(el);
}
}
None
})
.collect();
if !boosto.is_empty() {
assert!(boosto.len() == 1);
// RESOLVE TO ANCHOR (ANCHOR, SCORE) --------------------------------------------------------------------------------------------------------------------
// / \
// SEARCH FIELD APPLY BOOST
// \ /
// Token to text ids (TEXT_IDS) -> text ids to parent valueid (VALUE_IDS) -> ValueIds to boost values (VALUE_IDS, BOOST_VALUES) -> value ids to anchor (ANCHOR_IDS, ANCHOR_IDS)
//+1 for boost
field_search_step.channel.num_receivers += 1;
// STEP1.1: RESOLVE TO ANCHOR (ANCHOR, SCORE)
let mut channel = PlanStepDataChannels::open_channel(1, vec![field_rx.clone()]);
//connect to incoming filter channel (optional)
if let Some(step_id) = filter_channel_step {
plan.get_step_channel(step_id).filter_channel.as_mut().unwrap().num_receivers += 1;
channel.filter_receiver = Some(plan.get_step_channel(step_id).filter_channel.as_mut().unwrap().filter_receiver.clone());
}
let token_to_anchor_step = ResolveTokenIdToAnchor {
request: request_part.clone(),
channel: channel.clone(),
};
let token_to_anchor_step_id = plan.add_step(Box::new(token_to_anchor_step));
// add dependencies to ensure correct execution order
plan.add_dependency(token_to_anchor_step_id, *field_search_step_id);
// if let Some(parent_step_dependecy) = parent_step_dependecy {
// plan.add_dependency(parent_step_dependecy, token_to_anchor_step_id);
// }
if let Some(depends_on_step) = depends_on_step {
plan.add_dependency(token_to_anchor_step_id, depends_on_step);
}
// STEP1.2: resolve anchor boost values
let boost_to_anchor_channel = PlanStepDataChannels::open_channel(1, vec![field_rx]);
let boost_step = Box::new(BoostToAnchor {
path: paths.last().unwrap().add(VALUE_ID_TO_PARENT),
trace_info: "BoostToAnchor".to_string(),
channel: boost_to_anchor_channel.clone(),
request: request_part.clone(),
boost: boosto[0].clone(),
});
let boost_step_id = plan.add_step(boost_step);
plan.add_dependency(boost_step_id, *field_search_step_id); // TODO instead adding the dependency manually here, we should deduce the dependency by dataflow. In open_channel the output is connected (field_rx) and should be added as depedency
// STEP2: APPLY BOOST on anchor
let token_to_anchor_rx = channel.receiver_for_next_step;
let boost_vals_rx = boost_to_anchor_channel.receiver_for_next_step;
let mut apply_boost_to_anchor_channel = PlanStepDataChannels::open_channel(1, vec![token_to_anchor_rx, boost_vals_rx]);
// the last step gets set a filter channel to which he will send the result
if is_filter_channel {
apply_boost_to_anchor_channel.filter_channel = Some(FilterChannel::default());
}
let step = Box::new(ApplyAnchorBoost {
trace_info: "ApplyAnchorBoost".to_string(),
channel: apply_boost_to_anchor_channel,
request: request_part.clone(),
boost: boosto[0].clone(),
});
let step_id = plan.add_step(step);
plan.add_dependency(step_id, boost_step_id);
plan.add_dependency(step_id, token_to_anchor_step_id);
if let Some(parent_step_dependecy) = parent_step_dependecy {
plan.add_dependency(parent_step_dependecy, step_id);
}
if let Some(depends_on_step) = depends_on_step {
plan.add_dependency(step_id, depends_on_step);
}
return step_id;
}
}
// This is the normal case, resolve field directly to anchor ids
let mut channel = PlanStepDataChannels::open_channel(1, vec![field_rx]);
if let Some(step_id) = filter_channel_step {
plan.get_step_channel(step_id).filter_channel.as_mut().unwrap().num_receivers += 1;
channel.filter_receiver = Some(plan.get_step_channel(step_id).filter_channel.as_mut().unwrap().filter_receiver.clone());
}
if is_filter_channel {
channel.filter_channel = Some(FilterChannel::default());
}
let token_to_anchor_step = ResolveTokenIdToAnchor {
request: request_part.clone(),
channel,
};
let id1 = plan.add_step(Box::new(token_to_anchor_step));
plan.add_dependency(id1, *field_search_step_id);
if let Some(parent_step_dependecy) = parent_step_dependecy {
plan.add_dependency(parent_step_dependecy, id1);
}
if let Some(depends_on_step) = depends_on_step {
plan.add_dependency(id1, depends_on_step);
}
id1
}
use rayon::prelude::*;
pub fn execute_steps(steps: Vec<Box<dyn PlanStepTrait>>, persistence: &Persistence) -> Result<(), VelociError> {
let r: Result<Vec<_>, VelociError> = steps.into_par_iter().map(|step: Box<dyn PlanStepTrait>| step.execute_step(persistence)).collect();
if let Err(err) = r {
Err(err)
} else {
Ok(())
}
}
|
use anyhow::{anyhow, Result};
use sha2::{Digest, Sha256};
use std::borrow::Borrow;
use std::collections::HashMap;
use std::convert::TryInto;
use std::hash::Hash;
use std::io::{stdout, Write};
use std::path::Path;
use std::process::Command;
use std::str::from_utf8;
use crate::types::*;
pub fn run_hasher<F>(
path: &Path,
pretty_print: bool,
remove_dependencies: bool,
include_short_shas: bool,
post_process: F,
) -> Result<(), anyhow::Error>
where
F: Fn(&mut Component) -> anyhow::Result<()>,
{
let mut x = load_components(path);
x = toposort_components(x)?;
let mut n: HashMap<String, (i32, [u8; 32])> = HashMap::new();
let x: Result<Vec<_>> = x
.iter_mut()
.map(|comp| {
log::debug!(
"Calculating hashes for {}, dependencies: {:?}",
comp.dir,
comp.dependencies
);
let commit_hash = hash_for_dir(path, &path.join(&comp.dir)).unwrap();
let res = hash_for_node(&commit_hash, &comp.depsorted(), &n);
n.insert(comp.dir.to_owned(), res);
let tree_hex = hex::encode(res.1);
if include_short_shas {
comp.commit_sha_short = Some(commit_hash[..8].to_owned());
comp.tree_sha_short = Some(tree_hex[..16].to_owned());
}
comp.commit_sha = Some(commit_hash);
comp.tree_sha = Some(tree_hex);
if remove_dependencies {
comp.dependencies = Vec::new();
}
post_process(comp)?;
Ok(comp)
})
.collect();
let y = x?;
let json = if pretty_print {
serde_json::to_string_pretty(&y)?
} else {
serde_json::to_string(&y)?
};
stdout().write_all(json.as_ref())?;
Ok(())
}
fn hash_for_dir(git_dir: &Path, path: &Path) -> Result<String> {
let out = Command::new("git")
.args(&[
"-C".as_ref(),
git_dir,
"log".as_ref(),
"-1".as_ref(),
"--pretty=format:%H".as_ref(),
path,
])
.output()?;
if out.status.success() {
let r = String::from(from_utf8(&out.stdout)?.trim());
Ok(r)
} else {
match out.status.code() {
Some(c) => Err(anyhow!(
"git command exited with error code: {}\n{}",
c,
from_utf8(&out.stderr)?
)),
None => Err(anyhow!("git command exited with signal")),
}
}
}
fn hash_for_node<S, T>(
node_hash: &str,
deps: &[S],
hashes: &HashMap<T, (i32, [u8; 32])>,
) -> (i32, [u8; 32])
where
S: Borrow<T> + std::fmt::Display,
T: Hash + Eq,
{
// hash format:
// [depth u16] hash [child data] [root node u8 = 2]
// child data = * [offset u32] [depth u16] hash [end flag u8 = 0/1]
let mut hasher = Sha256::new();
let (d, data) = build_hash(deps, hashes);
let mydepth = d + 1;
let root_hash = hex::decode(node_hash).unwrap();
hasher.update((mydepth as u16).to_be_bytes());
hasher.update(&root_hash);
hasher.update(&data);
hasher.update([2]);
log::debug!(
"root: [depth: {}] {:?} [child data: {:?}] [2]",
mydepth as u16,
root_hash,
data
);
(
mydepth,
hasher
.finalize()
.as_slice()
.try_into()
.expect("Wrong length hash"),
)
}
fn build_hash<S, T>(deps: &[S], hashes: &HashMap<T, (i32, [u8; 32])>) -> (i32, Vec<u8>)
where
S: Borrow<T> + std::fmt::Display,
T: Hash + Eq,
{
let x = translate(deps, hashes);
let mut r = Vec::with_capacity(deps.len() * (4 + 2 + 32 + 1));
let d1 = x.iter().enumerate().fold(-1, |acc, (i, (v, hash))| {
let last_node = deps.len() - 1 == i;
r.extend((i as u32).to_be_bytes().iter());
r.extend((*v as u16).to_be_bytes().iter());
r.extend(hash.as_ref());
r.push(if last_node { 1 } else { 0 });
log::debug!(
"child ({}): [offset: {}] [depth: {}] {:?} [last node: {}]",
deps[i],
i as u32,
*v as u16,
hash,
last_node
);
std::cmp::max(acc, *v)
});
(d1, r)
}
fn translate<'a, S, T, V>(inp: &[S], map: &'a HashMap<T, V>) -> Vec<&'a V>
where
S: Borrow<T>,
T: Hash + Eq,
{
inp.iter()
.map(|item| map.get(item.borrow()).unwrap())
.collect()
}
|
use rocksdb::{self, DBIterator, IteratorMode, Snapshot as RocksSnapshot};
extern crate varint;
use std::f32;
use std::io::Cursor;
use std::iter::Peekable;
use std::mem::transmute;
use std::str;
use self::varint::VarintRead;
use crate::index::Index;
use crate::json_value::JsonValue;
use crate::key_builder::{KeyBuilder, Segment};
use crate::query::{DocResult, QueryScoringInfo};
use crate::returnable::{PathSegment, ReturnPath};
pub struct Snapshot<'a> {
rocks: RocksSnapshot<'a>,
}
impl<'a> Snapshot<'a> {
pub fn new(rocks: RocksSnapshot) -> Snapshot {
Snapshot { rocks }
}
pub fn new_term_doc_result_iterator(&self, term: &str, kb: &KeyBuilder) -> DocResultIterator {
DocResultIterator {
iter: self.rocks.iterator(IteratorMode::Start),
keypathword: kb.get_kp_word_only(term),
}
}
pub fn get(&self, key: &[u8]) -> Option<rocksdb::DBVector> {
self.rocks.get(key).unwrap()
}
pub fn new_scorer(&self, term: &str, kb: &KeyBuilder, boost: f32) -> Scorer {
Scorer {
iter: self.rocks.iterator(IteratorMode::Start),
idf: f32::NAN,
boost,
kb: kb.clone(),
term: term.to_string(),
term_ordinal: 0,
}
}
pub fn new_json_fetcher(&self) -> JsonFetcher {
JsonFetcher {
iter: self.rocks.iterator(IteratorMode::Start),
}
}
pub fn new_iterator(&self) -> DBIterator {
self.rocks.iterator(IteratorMode::Start)
}
pub fn new_rtree_iterator(&self, query: &[u8]) -> DBIterator {
self.rocks.rtree_iterator(query)
}
pub fn new_all_docs_iterator(&self) -> AllDocsIterator {
let mut iter = self.rocks.iterator(IteratorMode::Start);
iter.set_mode(IteratorMode::From(b"S", rocksdb::Direction::Forward));
AllDocsIterator { iter }
}
}
pub struct DocResultIterator {
iter: DBIterator,
keypathword: String,
}
impl DocResultIterator {
pub fn advance_gte(&mut self, start: &DocResult) {
KeyBuilder::add_doc_result_to_kp_word(&mut self.keypathword, start);
// Seek in index to >= entry
self.iter.set_mode(IteratorMode::From(
self.keypathword.as_bytes(),
rocksdb::Direction::Forward,
));
KeyBuilder::truncate_to_kp_word(&mut self.keypathword);
}
pub fn next(&mut self) -> Option<(DocResult, TermPositions)> {
if let Some((key, value)) = self.iter.next() {
if !key.starts_with(self.keypathword.as_bytes()) {
// we passed the key path we are interested in. nothing left to do */
return None;
}
let key_str = unsafe { str::from_utf8_unchecked(&key) };
let dr = KeyBuilder::parse_doc_result_from_kp_word_key(key_str);
Some((
dr,
TermPositions {
pos: value.into_vec(),
},
))
} else {
None
}
}
}
pub struct TermPositions {
pos: Vec<u8>,
}
impl TermPositions {
pub fn positions(self) -> Vec<u32> {
let mut bytes = Cursor::new(self.pos);
let mut positions = Vec::new();
while let Ok(pos) = bytes.read_unsigned_varint_32() {
positions.push(pos);
}
positions
}
}
pub struct Scorer {
iter: DBIterator,
idf: f32,
boost: f32,
kb: KeyBuilder,
term: String,
term_ordinal: usize,
}
impl Scorer {
pub fn init(&mut self, qsi: &mut QueryScoringInfo) {
let key = self.kb.kp_word_count_key(&self.term);
let doc_freq = if let Some(bytes) = self.get_value(&key) {
Index::convert_bytes_to_i32(bytes.as_ref()) as f32
} else {
0.0
};
let key = self.kb.kp_field_count_key();
let num_docs = if let Some(bytes) = self.get_value(&key) {
Index::convert_bytes_to_i32(bytes.as_ref()) as f32
} else {
0.0
};
self.idf = 1.0 + (num_docs / (doc_freq + 1.0)).ln();
self.term_ordinal = qsi.num_terms;
qsi.num_terms += 1;
qsi.sum_of_idt_sqs += self.idf * self.idf;
}
pub fn get_value(&mut self, key: &str) -> Option<Box<[u8]>> {
self.iter.set_mode(IteratorMode::From(
key.as_bytes(),
rocksdb::Direction::Forward,
));
if let Some((ret_key, ret_value)) = self.iter.next() {
if ret_key.len() == key.len() && ret_key.starts_with(key.as_bytes()) {
Some(ret_value)
} else {
None
}
} else {
None
}
}
pub fn add_match_score(&mut self, num_matches: u32, dr: &mut DocResult) {
if self.should_score() {
let key = self.kb.kp_field_length_key_from_doc_result(dr);
let total_field_words = if let Some(bytes) = self.get_value(&key) {
Index::convert_bytes_to_i32(bytes.as_ref()) as f32
} else {
panic!("Couldn't find field length for a match!! WHAT!");
};
let tf: f32 = (num_matches as f32).sqrt();
let norm = 1.0 / (total_field_words as f32).sqrt();
let score = self.idf * self.idf * tf * norm * self.boost;
dr.add_score(self.term_ordinal, score);
}
}
pub fn should_score(&self) -> bool {
!self.idf.is_nan()
}
}
pub struct JsonFetcher {
iter: DBIterator,
}
impl JsonFetcher {
pub fn fetch(
&mut self,
seq: u64,
kb_base: &mut KeyBuilder,
rp: &ReturnPath,
) -> Option<JsonValue> {
JsonFetcher::descend_return_path(&mut self.iter, seq, kb_base, rp, 0)
}
pub fn bytes_to_json_value(bytes: &[u8]) -> JsonValue {
match bytes[0] as char {
's' => {
let string = unsafe { str::from_utf8_unchecked(&bytes[1..]) }.to_string();
JsonValue::String(string)
}
'f' => {
assert!(bytes.len() == 9);
let mut bytes2: [u8; 8] = [0; 8];
for (n, b) in bytes[1..9].iter().enumerate() {
bytes2[n] = *b;
}
let double: f64 = unsafe { transmute(bytes2) };
JsonValue::Number(double)
}
'T' => JsonValue::True,
'F' => JsonValue::False,
'N' => JsonValue::Null,
'o' => JsonValue::Object(vec![]),
'a' => JsonValue::Array(vec![]),
what => panic!("unexpected type tag in value: {}", what),
}
}
fn return_array(mut array: Vec<(u64, JsonValue)>) -> JsonValue {
array.sort_by_key(|tuple| tuple.0);
JsonValue::Array(array.into_iter().map(|(_i, json)| json).collect())
}
fn descend_return_path(
iter: &mut DBIterator,
seq: u64,
kb: &mut KeyBuilder,
rp: &ReturnPath,
mut rp_index: usize,
) -> Option<JsonValue> {
while let Some(segment) = rp.nth(rp_index) {
rp_index += 1;
match *segment {
PathSegment::ObjectKey(ref string) => {
kb.push_object_key(string);
}
PathSegment::ArrayAll => {
let mut i = 0;
let mut vec = Vec::new();
loop {
kb.push_array_index(i);
i += 1;
if let Some(json) = JsonFetcher::descend_return_path(
iter,
seq,
&mut kb.clone(),
rp,
rp_index,
) {
vec.push(json);
kb.pop_array();
} else {
// we didn't get a value, is it because the array ends or the
// full path isn't there? check as there might be more array elements
// with a full path that does match.
let value_key = kb.kp_value_key(seq);
kb.pop_array();
// Seek in index to >= entry
iter.set_mode(IteratorMode::From(
value_key.as_bytes(),
rocksdb::Direction::Forward,
));
if let Some((key, _value)) = iter.next() {
if key.starts_with(value_key.as_bytes()) {
// yes it exists. loop again.
continue;
}
}
if vec.is_empty() {
return None;
} else {
return Some(JsonValue::Array(vec));
}
}
}
}
PathSegment::Array(ref index) => {
kb.push_array_index(*index);
}
}
}
let value_key = kb.kp_value_key(seq);
// Seek in index to >= entry
iter.set_mode(IteratorMode::From(
value_key.as_bytes(),
rocksdb::Direction::Forward,
));
let (key, value) = match iter.next() {
Some((key, value)) => (key, value),
None => return None,
};
if !KeyBuilder::is_kp_value_key_prefix(&value_key, unsafe {
str::from_utf8_unchecked(&key)
}) {
return None;
}
Some(JsonFetcher::do_fetch(
&mut iter.peekable(),
&value_key,
key,
value,
))
}
/// When do_fetch is called it means we know we are going to find a value because
/// we prefix matched the keypath. What we are doing here is parsing the remaining
/// keypath to figure out the nested structure of the remaining keypath. So we
/// depth first recursively parse the keypath and return the value and inserting into
/// containers (arrays or objects) then iterate keys until the keypath no longer matches.
fn do_fetch(
iter: &mut Peekable<&mut DBIterator>,
value_key: &str,
mut key: Box<[u8]>,
mut value: Box<[u8]>,
) -> JsonValue {
if key.len() == value_key.len() {
// we have a key match!
return JsonFetcher::bytes_to_json_value(value.as_ref());
}
let segment = {
let key_str = unsafe { str::from_utf8_unchecked(&key) };
let remaining = &key_str[value_key.len()..];
KeyBuilder::parse_first_kp_value_segment(remaining)
};
match segment {
Some((Segment::ObjectKey(mut unescaped), escaped)) => {
let mut object: Vec<(String, JsonValue)> = Vec::new();
let mut value_key_next = value_key.to_string() + &escaped;
loop {
let json_val = JsonFetcher::do_fetch(iter, &value_key_next, key, value);
object.push((unescaped, json_val));
let segment = match iter.peek() {
Some(&(ref k, ref _v)) => {
let key = unsafe { str::from_utf8_unchecked(k) };
if !KeyBuilder::is_kp_value_key_prefix(value_key, key) {
return JsonValue::Object(object);
}
let key_str = unsafe { str::from_utf8_unchecked(k) };
let remaining = &key_str[value_key.len()..];
KeyBuilder::parse_first_kp_value_segment(remaining)
}
None => return JsonValue::Object(object),
};
if let Some((Segment::ObjectKey(unescaped2), escaped2)) = segment {
unescaped = unescaped2;
// advance the peeked iter
match iter.next() {
Some((k, v)) => {
key = k;
value = v;
}
None => panic!("couldn't advanced already peeked iter"),
};
value_key_next.truncate(value_key.len());
value_key_next.push_str(&escaped2);
} else {
return JsonValue::Object(object);
}
}
}
Some((Segment::Array(mut i), escaped)) => {
// we use a tuple with ordinal because we encounter
// elements in lexical sorting order instead of ordinal order
let mut array: Vec<(u64, JsonValue)> = Vec::new();
let mut value_key_next = value_key.to_string() + &escaped;
loop {
let json_val = JsonFetcher::do_fetch(iter, &value_key_next, key, value);
array.push((i, json_val));
let segment = match iter.peek() {
Some(&(ref k, ref _v)) => {
let key = unsafe { str::from_utf8_unchecked(k) };
if !KeyBuilder::is_kp_value_key_prefix(value_key, key) {
return JsonFetcher::return_array(array);
}
let key_str = unsafe { str::from_utf8_unchecked(k) };
let remaining = &key_str[value_key.len()..];
KeyBuilder::parse_first_kp_value_segment(remaining)
}
None => return JsonFetcher::return_array(array),
};
if let Some((Segment::Array(i2), escaped2)) = segment {
i = i2;
// advance the already peeked iter
match iter.next() {
Some((k, v)) => {
key = k;
value = v;
}
None => panic!("couldn't advanced already peeked iter"),
};
value_key_next.truncate(value_key.len());
value_key_next.push_str(&escaped2);
} else {
return JsonFetcher::return_array(array);
}
}
}
None => {
let key_str = unsafe { str::from_utf8_unchecked(&key) };
panic!(
"somehow couldn't parse key segment {} {}",
value_key, key_str
);
}
}
}
}
pub struct AllDocsIterator {
iter: DBIterator,
}
impl AllDocsIterator {
pub fn next(&mut self) -> Option<DocResult> {
match self.iter.next() {
Some((k, _v)) => {
let key = unsafe { str::from_utf8_unchecked(&k) };
if let Some(seq) = KeyBuilder::parse_seq_key(key) {
let mut dr = DocResult::new();
dr.seq = seq;
Some(dr)
} else {
None
}
}
None => None,
}
}
}
|
// This file was generated by gir (https://github.com/gtk-rs/gir @ fbb95f4)
// from gir-files (https://github.com/gtk-rs/gir-files @ 77d1f70)
// DO NOT EDIT
use FilterOutputStream;
use OutputStream;
use Seekable;
use ffi;
use glib;
use glib::object::Downcast;
use glib::object::IsA;
use glib::signal::SignalHandlerId;
use glib::signal::connect;
use glib::translate::*;
use glib_ffi;
use gobject_ffi;
use std::boxed::Box as Box_;
use std::mem;
use std::mem::transmute;
use std::ptr;
glib_wrapper! {
pub struct BufferedOutputStream(Object<ffi::GBufferedOutputStream, ffi::GBufferedOutputStreamClass>): FilterOutputStream, OutputStream, Seekable;
match fn {
get_type => || ffi::g_buffered_output_stream_get_type(),
}
}
impl BufferedOutputStream {
pub fn new<P: IsA<OutputStream>>(base_stream: &P) -> BufferedOutputStream {
unsafe {
OutputStream::from_glib_full(ffi::g_buffered_output_stream_new(base_stream.to_glib_none().0)).downcast_unchecked()
}
}
pub fn new_sized<P: IsA<OutputStream>>(base_stream: &P, size: usize) -> BufferedOutputStream {
unsafe {
OutputStream::from_glib_full(ffi::g_buffered_output_stream_new_sized(base_stream.to_glib_none().0, size)).downcast_unchecked()
}
}
}
pub trait BufferedOutputStreamExt {
fn get_auto_grow(&self) -> bool;
fn get_buffer_size(&self) -> usize;
fn set_auto_grow(&self, auto_grow: bool);
fn set_buffer_size(&self, size: usize);
fn connect_property_auto_grow_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_buffer_size_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
}
impl<O: IsA<BufferedOutputStream> + IsA<glib::object::Object>> BufferedOutputStreamExt for O {
fn get_auto_grow(&self) -> bool {
unsafe {
from_glib(ffi::g_buffered_output_stream_get_auto_grow(self.to_glib_none().0))
}
}
fn get_buffer_size(&self) -> usize {
unsafe {
ffi::g_buffered_output_stream_get_buffer_size(self.to_glib_none().0)
}
}
fn set_auto_grow(&self, auto_grow: bool) {
unsafe {
ffi::g_buffered_output_stream_set_auto_grow(self.to_glib_none().0, auto_grow.to_glib());
}
}
fn set_buffer_size(&self, size: usize) {
unsafe {
ffi::g_buffered_output_stream_set_buffer_size(self.to_glib_none().0, size);
}
}
fn connect_property_auto_grow_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f));
connect(self.to_glib_none().0, "notify::auto-grow",
transmute(notify_auto_grow_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)
}
}
fn connect_property_buffer_size_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f));
connect(self.to_glib_none().0, "notify::buffer-size",
transmute(notify_buffer_size_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)
}
}
}
unsafe extern "C" fn notify_auto_grow_trampoline<P>(this: *mut ffi::GBufferedOutputStream, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<BufferedOutputStream> {
callback_guard!();
let f: &&(Fn(&P) + 'static) = transmute(f);
f(&BufferedOutputStream::from_glib_borrow(this).downcast_unchecked())
}
unsafe extern "C" fn notify_buffer_size_trampoline<P>(this: *mut ffi::GBufferedOutputStream, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<BufferedOutputStream> {
callback_guard!();
let f: &&(Fn(&P) + 'static) = transmute(f);
f(&BufferedOutputStream::from_glib_borrow(this).downcast_unchecked())
}
|
// camera sub modules
pub mod fish_eye;
pub mod radial_tangential;
|
mod lib;
use lib::*;
use std::collections::HashSet;
use std::iter::FromIterator;
fn main() {
// #[test]
// fn node_eq() {
let n01 = NodeType::new("node0");
let n02 = NodeType::new("node0");
let n11 = NodeType::new("node1");
assert_eq!(n01, n02);
assert_ne!(n01, n11);
// }
// #[test]
// fn edge_eq() {
let n01 = NodeType::new("node0");
let n02 = NodeType::new("node0");
let n11 = NodeType::new("node1");
let e01 = ArcType::new(1, &n01, &n11);
let e02 = ArcType::new(2, &n02, &n11);
let e11 = ArcType::new(3, &n11, &n02);
assert_eq!(e01, e02);
assert_ne!(e01, e11);
// }
// #[test]
// fn add_node_test() {
let mut g = AdjListGraph::new();
let n1 = g.add_node("hello");
let n2 = g.add_node("world");
let n3 = NodeType::new("node");
assert_eq!(g.get_value(&n1), Some("hello".into()));
assert_eq!(g.get_value(&n2), Some("world".into()));
assert_eq!(g.get_value(&n3), None);
g.clear();
// }
// #[test]
// fn get_node_test() {
let mut g = AdjListGraph::new();
let n1 = g.add_node("hello");
let n2 = g.add_node("world");
assert_eq!(g.get_node("hello"), Some(n1));
assert_eq!(g.get_node("world"), Some(n2));
assert_eq!(g.get_node("abcde"), None);
g.clear();
// }
// #[test]
// fn add_edge_test() {
let mut g = AdjListGraph::new();
let n1 = g.add_node("hello");
let n2 = g.add_node("world");
let n3 = g.add_node("excel");
g.add_edge(1, &n1, &n2);
g.add_edge(2, &n2, &n3);
g.add_edge(3, &n3, &n1);
g.clear();
// }
// #[test]
// fn reachable_test() {
// let mut g = AdjListGraph::new();
// let n1 = g.add_node("hello");
// let n2 = g.add_node("world");
// let n3 = g.add_node("excel");
// g.add_edge(1, &n1, &n2);
// g.add_edge(2, &n2, &n3);
// g.add_edge(3, &n3, &n1);
// let nodes = g.all_reachable(&n1);
// assert_eq!(nodes, HashSet::<String>::from_iter(vec!["hello".into(), "world".into(), "excel".into()]));
// g.clear();
// }
// #[test]
// fn connectivity_test() {
let mut g = AdjListGraph::new();
let n1 = g.add_node("hello");
let n2 = g.add_node("world");
let n3 = g.add_node("excel");
g.add_edge(1, &n1, &n2);
g.add_edge(2, &n2, &n3);
assert!(g.connected(&n1, &n2));
assert!(!g.connected(&n2, &n1));
assert!(g.connected(&n1, &n3));
g.clear();
// }
// #[test]
// fn empty_test() {
let mut g = AdjListGraph::new();
assert!(g.is_empty());
g.add_node("hello");
assert!(!g.is_empty());
g.clear();
// }
// #[test]
// fn get_weight_test() {
let mut g = AdjListGraph::new();
let n1 = g.add_node("hello");
let n2 = g.add_node("world");
let n3 = g.add_node("excel");
g.add_edge(1, &n1, &n2);
g.add_edge(2, &n2, &n3);
assert_eq!(g.get_weight(&n1, &n2), Some(1));
assert_eq!(g.get_weight(&n2, &n3), Some(2));
assert_eq!(g.get_weight(&n2, &n1), None);
g.clear();
// }
// #[test]
// fn neighbors_test() {
let mut g = AdjListGraph::new();
let n1 = g.add_node("hello");
let n2 = g.add_node("world");
let n3 = g.add_node("excel");
g.add_edge(1, &n1, &n2);
g.add_edge(2, &n1, &n3);
assert_eq!(g.neighbors(&n1), HashSet::<String>::from_iter(vec!["world".into(), "excel".into()]));
assert_eq!(g.neighbors(&n2), HashSet::<String>::from_iter(vec![]));
assert_eq!(g.neighbors(&n3), HashSet::<String>::from_iter(vec![]));
g.clear();
// }
// #[test]
// fn nodes_test() {
let mut g = AdjListGraph::new();
let n1 = g.add_node("hello");
let n2 = g.add_node("world");
let n3 = g.add_node("excel");
g.add_edge(1, &n1, &n2);
g.add_edge(2, &n2, &n3);
let vector = vec![("hello".into(), "world".into()), ("world".into(), "excel".into())];
assert_eq!(g.edges(), HashSet::<(String, String)>::from_iter(vector));
g.clear();
// }
// #[test]
// fn remove_edge_test() {
let mut g = AdjListGraph::new();
let n1 = g.add_node("hello");
let n2 = g.add_node("world");
let n3 = g.add_node("excel");
g.add_edge(1, &n1, &n2);
g.add_edge(2, &n2, &n3);
assert!(g.connected(&n1, &n3));
assert!(g.connected(&n1, &n2));
assert!(g.connected(&n2, &n3));
g.remove_edge(&n1, &n2);
assert!(g.connected(&n2, &n3));
assert!(!g.connected(&n1, &n2));
assert!(!g.connected(&n1, &n3));
let vector = vec![("world".into(), "excel".into())];
assert_eq!(g.edges(), HashSet::<(String, String)>::from_iter(vector));
g.clear();
// }
// #[test]
// fn remove_node_test() {
let mut g = AdjListGraph::new();
let n1 = g.add_node("hello");
let n2 = g.add_node("world");
let n3 = g.add_node("excel");
g.add_edge(1, &n1, &n2);
g.add_edge(2, &n2, &n3);
g.add_edge(3, &n3, &n1);
assert!(g.connected(&n1, &n2));
assert!(g.connected(&n1, &n3));
assert!(g.connected(&n2, &n1));
assert!(g.connected(&n2, &n3));
assert!(g.connected(&n3, &n1));
assert!(g.connected(&n3, &n2));
g.remove_node(&n1);
assert!(g.connected(&n2, &n3));
let vector = vec!["world".into(), "excel".into()];
assert_eq!(g.nodes(), HashSet::<String>::from_iter(vector));
assert_eq!(g.neighbors(&n2), HashSet::<String>::from_iter(vec!["excel".into()]));
let vector = vec![("world".into(), "excel".into())];
assert_eq!(g.edges(), HashSet::<(String, String)>::from_iter(vector));
g.clear();
// }
// #[test]
// fn dijkstra_test() {
let mut g: AdjListGraph = AdjListGraph::new();
let a = g.add_node("a".into());
let b = g.add_node("b".into());
let c = g.add_node("c".into());
let d = g.add_node("d".into());
g.add_edge(3, &a, &c);
g.add_edge(1, &a, &d);
g.add_edge(2, &c, &d);
g.add_edge(1, &d, &c);
g.add_edge(1, &c, &b);
g.add_edge(3, &d, &b);
assert_eq!(g.dijkstra(&a, &b), vec!["a", "d", "c", "b"]);
g.clear();
// }
}
|
use crate::parse;
#[test]
fn parse_24_7() {
assert!(parse("24/7").is_ok());
}
#[test]
fn parse_invalid() {
assert!(parse("this is not a valid expression").is_err());
assert!(parse("10:00-100:00").is_err());
assert!(parse("10:00-12:00 tomorrow").is_err());
}
|
pub trait Memory {
/// Used to fetch a 32bit opcode in ARM mode.
fn fetch32(&mut self, address: u32, access: AccessType) -> (u32, Waitstates) {
self.load32(address, access)
}
/// Used to to fetch a 16bit opcode in THUMB mode.
fn fetch16(&mut self, address: u32, access: AccessType) -> (u16, Waitstates) {
self.load16(address, access)
}
fn load32(&mut self, address: u32, access: AccessType) -> (u32, Waitstates);
fn load16(&mut self, address: u32, access: AccessType) -> (u16, Waitstates);
fn load8(&mut self, address: u32, access: AccessType) -> (u8, Waitstates);
fn store32(&mut self, address: u32, value: u32, access: AccessType) -> Waitstates;
fn store16(&mut self, address: u32, value: u16, access: AccessType) -> Waitstates;
fn store8(&mut self, address: u32, value: u8, access: AccessType) -> Waitstates;
/// Stalling for some number of internal cycles.
fn stall(&mut self, _cycles: super::Cycles) {
/* NOP */
}
}
#[derive(Clone, Copy, PartialEq, Eq)]
#[repr(u8)]
pub enum AccessType {
NonSeq = 0,
Seq = 1,
}
impl AccessType {
#[inline]
pub fn is_seq(self) -> bool {
self == AccessType::Seq
}
#[inline]
pub fn is_nonseq(self) -> bool {
self == AccessType::NonSeq
}
}
/// The number of waitstates (single clock cycles) that were required to perform a memory operation.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub struct Waitstates(u8);
impl Waitstates {
pub const ZERO: Waitstates = Waitstates(0);
pub const ONE: Waitstates = Waitstates(1);
}
impl std::fmt::Display for Waitstates {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(&self.0, f)
}
}
impl std::ops::Add<Self> for Waitstates {
type Output = Self;
#[inline]
fn add(self, other: Waitstates) -> Self {
Waitstates(self.0 + other.0)
}
}
impl std::ops::AddAssign<Self> for Waitstates {
#[inline]
fn add_assign(&mut self, other: Waitstates) {
self.0 += other.0
}
}
impl std::ops::Sub<Self> for Waitstates {
type Output = Self;
#[inline]
fn sub(self, other: Waitstates) -> Self {
Waitstates(self.0 - other.0)
}
}
impl From<u8> for Waitstates {
#[inline]
fn from(value: u8) -> Self {
Waitstates(value)
}
}
impl From<Waitstates> for u32 {
#[inline]
fn from(wait: Waitstates) -> Self {
wait.0 as u32
}
}
|
/*pub trait GenericEmiter2Arg<X,Y> {
fn mov(&mut self,x64: bool,_: X,_: Y);
fn add(&mut self,x64: bool,_: X,_: Y);
fn sub(&mut self,x64: bool,_: X,_: Y);
}
*/
use crate::assembler::*;
use crate::assembler_x64::*;
use crate::*;
use crate::avx::*;
use crate::constants_x64::*;
macro_rules! generic_gen {
(
trait $trait_name : ident < $($x: ident),* > {
$(
fn $fun_name: ident ( &mut self,$($a: tt),*);
)*
}
impl {
$(($($tname: tt),*) => $(fn $fname: ident (&mut self,$($arg: ident : $typ: tt),*) {$call: ident ($($argc: expr),*) } ),*)*
}
) => {
pub trait $trait_name < $($x),*> {
$(
fn $fun_name (&mut self,$(_: $a),*) {unimplemented!()}
)*
}
$(
impl $trait_name< $($tname),*> for Assembler {
$(
fn $fname (&mut self, $($arg: $typ),*) {
$call(self,$($argc),*);
}
)*
}
)*
};
(expand ( $($tname: tt),*) => $($fnname : ident ($($arg: ident ),*) => $call: ident ($($argc: expr),*) )*) => {
$(
fn $fnname (&mut self,$($arg: $tname),*) {
$call (self,$($argc),*)
}
)*
}
}
generic_gen! (
trait PushPop<X> {
fn push(&mut self,X);
fn pop(&mut self,X);
}
impl {
(Register) =>
fn push(&mut self,r: Register) {emit_pushq_reg(r) },
fn pop(&mut self,r: Register) {emit_popq_reg(r)}
}
);
generic_gen!(
trait Mov<X,Y> {
fn mov(&mut self,bool,X,Y);
}
impl {
(Register,Register) =>
fn mov(&mut self,x64: bool,r1: Register,r2: Register) {
emit_mov_reg_reg(x64 as u8, r1, r2)
}
(i32,Register) =>
fn mov(&mut self,x64: bool,r1: i32,r2: Register) {
emit_movl_imm_reg(r1 as i32, r2)
}
(i64,Register) =>
fn mov(&mut self,x64: bool,r1: i64,r2: Register) {
emit_movq_imm64_reg(r1, r2)
}
(XMMRegister,XMMRegister) =>
fn mov(&mut self,x64: bool,r1: XMMRegister,r2: XMMRegister) {
movsd(r1,r2)
}
(XMMRegister,Register) =>
fn mov(&mut self,x64: bool,r1: XMMRegister,r2: Register) {
movq_freg_reg(r1, r2)
}
}
);
generic_gen!(
trait Add<X,Y> {
fn add(&mut self,bool,X,Y);
}
impl {
(Register,Register) =>
fn add(&mut self,x64: bool,r1: Register,r2: Register) {
emit_add_reg_reg(x64 as u8, r1, r2)
}
(XMMRegister,XMMRegister) =>
fn add(&mut self,x64: bool,r1: XMMRegister,r2: XMMRegister) {
addsd(r1,r2)
}
(i32,Register) =>
fn add(&mut self,x64: bool, r1: i32,r2: Register) {
emit_addq_imm_reg(r1, r2)
}
}
);
generic_gen!(
trait Sub<X,Y> {
fn sub(&mut self,bool,X,Y);
}
impl {
(Register,Register) =>
fn sub(&mut self,x64: bool,r1: Register,r2: Register) {
emit_sub_reg_reg(x64 as u8, r1, r2)
}
(XMMRegister,XMMRegister) =>
fn sub(&mut self,x64: bool,r1: XMMRegister,r2: XMMRegister) {
subsd(r1,r2)
}
(i32,Register) =>
fn sub(&mut self,x64: bool,r1: i32,r2: Register) {
emit_subq_imm_reg(r1, r2)
}
}
);
generic_gen!(
trait Ret<> {
fn ret(&mut self,);
}
impl {
() =>
fn ret(&mut self,) {
emit_retq()
}
}
);
generic_gen!(
trait Call<X> {
fn call(&mut self,X);
}
impl {
(Register) =>
fn call(&mut self,r: Register) {
emit_callq_reg(r)
}
}
); |
#![feature(proc_macro_hygiene, decl_macro, try_trait, backtrace)]
#[macro_use]
extern crate rocket;
extern crate anyhow;
extern crate hmac;
extern crate jsonwebtoken;
extern crate sha2;
extern crate sled;
use rocket_contrib::json::Json;
use serde::{Deserialize, Serialize};
// use std::backtrace::Backtrace;
use std::collections::HashSet;
use std::env;
use std::option::NoneError;
use thiserror::Error;
use anyhow::anyhow;
use anyhow::Context;
use argon2::{self};
use rocket::fairing::AdHoc;
use rocket::http::Status;
use rocket::request::{self, FromRequest, Request};
use rocket::Outcome;
use rocket::State;
use sled_extensions::bincode::Tree;
use sled_extensions::DbExt;
#[derive(Debug, Serialize, Deserialize)]
struct Claims {
email: String,
exp: usize,
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
#[serde(rename_all = "camelCase")]
struct Investment {
financing_option: String,
purchase_price: f64,
down_payment: f64,
interest_rate: f64,
rent: f64,
annual_taxes: f64,
insurance: f64,
expense_ratio: f64,
prop_management: f64,
misc_expenses: f64,
hoa: f64,
title: String,
uuid: String,
}
struct Database {
houses: Tree<Investment>,
users: Tree<User>,
user_investments: Tree<Vec<String>>,
investment_users: Tree<String>,
}
#[get("/")]
fn index() -> &'static str {
"Hello, world!"
}
#[get("/investments")]
fn get_investments(db: State<Database>, user: User) -> anyhow::Result<Json<Vec<Investment>>> {
let house_uuids = db
.user_investments
.get(user.email)?
.ok_or_else(|| anyhow!("No user investments"))?;
let mut houses: Vec<Investment> = vec![];
for house_uuid in house_uuids {
let house = db
.houses
.get(&house_uuid)?
.ok_or_else(|| anyhow!("No investment"))?;
// let mut house: Investment = bincode::deserialize(&house)?;
houses.push(house);
}
Ok(Json(houses))
}
#[derive(Error, Debug)]
pub enum ServerError {
#[error("unauthorized")]
Unauthorized,
#[error("unknown data store error")]
Unknown,
}
impl From<NoneError> for ServerError {
fn from(_err: NoneError) -> Self {
ServerError::Unknown
}
}
impl<'a, 'r> FromRequest<'a, 'r> for User {
type Error = ServerError;
fn from_request(request: &'a Request<'r>) -> request::Outcome<Self, Self::Error> {
let db = request
.guard::<State<Database>>()
.map_failure(|_e| (Status::InternalServerError, ServerError::Unknown))?;
let authorization: Option<&str> = request.headers().get("Authorization").next();
match authorization {
Some(authorization) => match resolve_jwt(authorization, &db) {
Err(e) => {
eprintln!("{:?}", e);
Outcome::Failure((Status::Unauthorized, ServerError::Unauthorized))
}
Ok(user) => Outcome::Success(user),
},
None => Outcome::Failure((Status::Unauthorized, ServerError::Unauthorized)),
}
}
}
#[get("/investments/<uuid>")]
fn get_investment(db: State<Database>, uuid: String) -> anyhow::Result<Json<Investment>> {
let house = db
.houses
.get(uuid)?
.ok_or_else(|| anyhow!("Investment does not exist"))?;
// let house = serde_json::to_string(&house)?;
Ok(Json(house))
}
#[post("/investments", data = "<houses>")]
fn patch_investment(
db: State<Database>,
houses: Json<Vec<Investment>>,
user: User,
) -> anyhow::Result<&'static str> {
let house_uuids = db.user_investments.get(&user.email)?;
let mut house_uuids: Vec<String> = house_uuids.unwrap_or_default();
dbg!(house_uuids.clone());
let existing_house_uuids: HashSet<String> = house_uuids.clone().into_iter().collect();
dbg!(existing_house_uuids.clone());
let mut new_house_uuids: Vec<_> = houses
.iter()
.filter(|house| !existing_house_uuids.contains(&house.uuid))
.map(|h| h.uuid.clone())
.collect();
dbg!(new_house_uuids.clone());
new_house_uuids = new_house_uuids
.into_iter()
.filter(|new_house_uuid| {
let existing_owner = db.investment_users.get(new_house_uuid);
match existing_owner {
Ok(Some(_)) | Err(_) => false,
Ok(None) => true,
}
})
.collect::<Vec<String>>();
// existing_house_uuids.
house_uuids.append(&mut new_house_uuids);
db.user_investments
.insert(user.email.as_bytes(), house_uuids)?;
for house in houses.iter() {
db.houses.insert(house.uuid.as_bytes(), house.clone())?;
}
Ok("Blah")
}
#[get("/health")]
fn health_check() -> anyhow::Result<&'static str> {
Ok("Healthy")
}
#[derive(Deserialize, PartialEq, Debug)]
struct Credentials {
email: String,
password: String,
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
struct User {
email: String,
hash: String,
salt: String,
}
#[derive(Serialize, Deserialize, PartialEq, Debug)]
struct AuthResponse {
token: String,
}
#[derive(Serialize, Deserialize, PartialEq, Debug)]
struct UserResponse {
email: String,
}
#[post("/signup", data = "<credentials>")]
fn signup(
db: State<Database>,
credentials: Json<Credentials>,
) -> anyhow::Result<Json<AuthResponse>> {
let (hash, salt) = hash_credentials(&*credentials);
let user = User {
email: credentials.email.clone(),
hash,
salt,
};
let existing_user = db.users.get(user.email.as_bytes())?;
if existing_user.is_some() {
Err(anyhow!("Existing user"))
} else {
db.users.insert(user.email.as_bytes(), user.clone())?;
Ok(Json(AuthResponse {
token: create_jwt(user)?,
// token: "BLAH".to_string(),
}))
}
}
fn hash_credentials(credentials: &Credentials) -> (String, String) {
let config = argon2::Config::default();
let salt = credentials.email.clone() + "r u salty";
let hash =
argon2::hash_encoded(&credentials.password.as_bytes(), &salt.as_bytes(), &config).unwrap();
(hash, salt)
}
fn resolve_jwt(jwt: &str, db: &Database) -> anyhow::Result<User> {
let secret = env::var("SECRET").unwrap_or("DEV_SECRET".to_string());
let claims = jsonwebtoken::decode::<Claims>(
&jwt,
&jsonwebtoken::DecodingKey::from_secret(secret.as_ref()),
&jsonwebtoken::Validation::default(),
)
.context("Failed to resolve JWT")?;
let user = db
.users
.get(claims.claims.email)?
.ok_or_else(|| anyhow!("BLAH"))?;
Ok(user)
}
fn create_jwt(user: User) -> anyhow::Result<String> {
let now = chrono::Local::now();
let claims = Claims {
email: user.email,
exp: (now + chrono::Duration::days(1)).timestamp() as usize,
};
let secret = env::var("SECRET").unwrap_or("DEV_SECRET".to_string());
jsonwebtoken::encode(
&jsonwebtoken::Header::default(),
&claims,
&jsonwebtoken::EncodingKey::from_secret(secret.as_ref()),
)
.map_err(|_e| anyhow!("jwt creation failed"))
}
#[post("/login", data = "<credentials>")]
fn login(
db: State<Database>,
credentials: Json<Credentials>,
) -> anyhow::Result<Json<AuthResponse>> {
let (hash, _) = hash_credentials(&*credentials);
let user = db
.users
.get(&credentials.email)?
.ok_or_else(|| anyhow!("no user investments"))?;
if hash == user.hash {
Ok(Json(AuthResponse {
token: create_jwt(user)?,
}))
} else {
Err(anyhow!("hash doesn't match"))
}
}
fn launch_server() -> anyhow::Result<&'static str> {
// let path = "./data/sled";
let path = env::var("DB_PATH").unwrap_or("./data/sled".to_string());
let db = sled_extensions::Config::default()
.path(path)
.open()
.expect("Failed to open sled db");
let db_clone = db.clone();
let db = Database {
houses: db.open_bincode_tree("houses")?,
users: db.open_bincode_tree("users")?,
user_investments: db.open_bincode_tree("user_investments")?,
investment_users: db.open_bincode_tree("investment_users")?,
};
rocket::ignite()
.manage(db)
.attach(AdHoc::on_response("Flush sled", move |req, res| {
db_clone.flush();
}))
.mount(
"/api/",
routes![
index,
get_investments,
get_investment,
health_check,
patch_investment,
signup,
login
],
)
.launch();
Ok("")
}
fn main() {
launch_server();
}
|
use std::fmt::{Display, Formatter, Result};
use std::result::Result as StdResult;
use std::rc::Rc;
use std::io;
use vec_map::VecMap;
use Arg;
use args::{AnyArg, HelpWriter};
use args::settings::{ArgFlags, ArgSettings};
#[allow(missing_debug_implementations)]
#[doc(hidden)]
pub struct PosBuilder<'n, 'e> {
pub name: &'n str,
pub help: Option<&'e str>,
pub requires: Option<Vec<&'e str>>,
pub blacklist: Option<Vec<&'e str>>,
pub possible_vals: Option<Vec<&'e str>>,
pub index: u64,
pub num_vals: Option<u64>,
pub max_vals: Option<u64>,
pub min_vals: Option<u64>,
pub val_names: Option<VecMap<&'e str>>,
pub validator: Option<Rc<Fn(String) -> StdResult<(), String>>>,
pub overrides: Option<Vec<&'e str>>,
pub settings: ArgFlags,
pub val_delim: Option<char>,
pub default_val: Option<&'n str>,
pub disp_ord: usize,
}
impl<'n, 'e> Default for PosBuilder<'n, 'e> {
fn default() -> Self {
PosBuilder {
name: "",
help: None,
requires: None,
blacklist: None,
possible_vals: None,
index: 0,
num_vals: None,
min_vals: None,
max_vals: None,
val_names: None,
validator: None,
overrides: None,
settings: ArgFlags::new(),
val_delim: Some(','),
default_val: None,
disp_ord: 999,
}
}
}
impl<'n, 'e> PosBuilder<'n, 'e> {
pub fn new(name: &'n str, idx: u64) -> Self {
PosBuilder {
name: name,
index: idx,
..Default::default()
}
}
pub fn from_arg(a: &Arg<'n, 'e>, idx: u64, reqs: &mut Vec<&'e str>) -> Self {
debug_assert!(a.short.is_none() || a.long.is_none(),
format!("Argument \"{}\" has conflicting requirements, both index() and short(), \
or long(), were supplied", a.name));
// Create the Positional Argument Builder with each HashSet = None to only
// allocate
// those that require it
let mut pb = PosBuilder {
name: a.name,
index: idx,
num_vals: a.num_vals,
min_vals: a.min_vals,
max_vals: a.max_vals,
val_names: a.val_names.clone(),
blacklist: a.blacklist.clone(),
overrides: a.overrides.clone(),
requires: a.requires.clone(),
possible_vals: a.possible_vals.clone(),
help: a.help,
val_delim: a.val_delim,
settings: a.settings,
default_val: a.default_val,
disp_ord: a.disp_ord,
..Default::default()
};
if a.max_vals.is_some()
|| a.min_vals.is_some()
|| (a.num_vals.is_some() && a.num_vals.unwrap() > 1) {
pb.settings.set(ArgSettings::Multiple);
}
if let Some(ref p) = a.validator {
pb.validator = Some(p.clone());
}
// If the arg is required, add all it's requirements to master required list
if a.is_set(ArgSettings::Required) {
if let Some(ref areqs) = a.requires {
for r in areqs { reqs.push(*r); }
}
}
pb
}
pub fn write_help<W: io::Write>(&self, w: &mut W, longest: usize, skip_pv: bool, nlh: bool) -> io::Result<()> {
let mut hw = HelpWriter::new(self, longest, nlh);
hw.skip_pv = skip_pv;
hw.write_to(w)
}
}
impl<'n, 'e> Display for PosBuilder<'n, 'e> {
fn fmt(&self, f: &mut Formatter) -> Result {
if self.settings.is_set(ArgSettings::Required) {
if let Some(ref names) = self.val_names {
try!(write!(f, "{}", names.values().map(|n| format!("<{}>", n)).collect::<Vec<_>>().join(" ")));
} else {
try!(write!(f, "<{}>", self.name));
}
} else {
if let Some(ref names) = self.val_names {
try!(write!(f, "{}", names.values().map(|n| format!("[{}]", n)).collect::<Vec<_>>().join(" ")));
} else {
try!(write!(f, "[{}]", self.name));
}
}
if self.settings.is_set(ArgSettings::Multiple) && self.val_names.is_none() {
try!(write!(f, "..."));
}
Ok(())
}
}
impl<'n, 'e> Clone for PosBuilder<'n, 'e> {
fn clone(&self) -> Self {
PosBuilder {
name: self.name,
help: self.help,
blacklist: self.blacklist.clone(),
overrides: self.overrides.clone(),
requires: self.requires.clone(),
settings: self.settings,
disp_ord: self.disp_ord,
num_vals: self.num_vals,
min_vals: self.min_vals,
max_vals: self.max_vals,
val_names: self.val_names.clone(),
val_delim: self.val_delim,
possible_vals: self.possible_vals.clone(),
default_val: self.default_val,
validator: self.validator.clone(),
index: self.index,
}
}
}
impl<'n, 'e> AnyArg<'n, 'e> for PosBuilder<'n, 'e> {
fn name(&self) -> &'n str { self.name }
fn overrides(&self) -> Option<&[&'e str]> { self.overrides.as_ref().map(|o| &o[..]) }
fn requires(&self) -> Option<&[&'e str]> { self.requires.as_ref().map(|o| &o[..]) }
fn blacklist(&self) -> Option<&[&'e str]> { self.blacklist.as_ref().map(|o| &o[..]) }
fn val_names(&self) -> Option<&VecMap<&'e str>> { self.val_names.as_ref() }
fn is_set(&self, s: ArgSettings) -> bool { self.settings.is_set(s) }
fn set(&mut self, s: ArgSettings) { self.settings.set(s) }
fn has_switch(&self) -> bool { false }
fn max_vals(&self) -> Option<u64> { self.max_vals }
fn num_vals(&self) -> Option<u64> { self.num_vals }
fn possible_vals(&self) -> Option<&[&'e str]> { self.possible_vals.as_ref().map(|o| &o[..]) }
fn validator(&self) -> Option<&Rc<Fn(String) -> StdResult<(), String>>> {
self.validator.as_ref()
}
fn min_vals(&self) -> Option<u64> { self.min_vals }
fn short(&self) -> Option<char> { None }
fn long(&self) -> Option<&'e str> { None }
fn val_delim(&self) -> Option<char> { self.val_delim }
fn takes_value(&self) -> bool { true }
fn help(&self) -> Option<&'e str> { self.help }
fn default_val(&self) -> Option<&'n str> { self.default_val }
}
#[cfg(test)]
mod test {
use super::PosBuilder;
use args::settings::ArgSettings;
use vec_map::VecMap;
#[test]
fn display_mult() {
let mut p = PosBuilder::new("pos", 1);
p.settings.set(ArgSettings::Multiple);
assert_eq!(&*format!("{}", p), "[pos]...");
}
#[test]
fn display_required() {
let mut p2 = PosBuilder::new("pos", 1);
p2.settings.set(ArgSettings::Required);
assert_eq!(&*format!("{}", p2), "<pos>");
}
#[test]
fn display_val_names() {
let mut p2 = PosBuilder::new("pos", 1);
let mut vm = VecMap::new();
vm.insert(0, "file1");
vm.insert(1, "file2");
p2.val_names = Some(vm);
assert_eq!(&*format!("{}", p2), "[file1] [file2]");
}
#[test]
fn display_val_names_req() {
let mut p2 = PosBuilder::new("pos", 1);
p2.settings.set(ArgSettings::Required);
let mut vm = VecMap::new();
vm.insert(0, "file1");
vm.insert(1, "file2");
p2.val_names = Some(vm);
assert_eq!(&*format!("{}", p2), "<file1> <file2>");
}
}
|
#[macro_use]
#[path = "./mod_tests.rs"]
mod mod_tests;
use self::mod_tests::*;
use bc::mem::Mem;
define_tests!(Mem);
|
use actix::fut;
use actix::prelude::*;
use actix_web::*;
use futures::future::Future;
use serde_json;
use std::time::Instant;
use model::person::{PersonList, PersonUpdate};
use ws_server;
use AppState;
pub fn person_list(req: HttpRequest<AppState>) -> FutureResponse<HttpResponse> {
req.state()
.db
.send(PersonList)
.from_err()
.and_then(|res| match res {
Ok(person_list) => Ok(HttpResponse::Ok().json(person_list)),
Err(_) => Ok(HttpResponse::InternalServerError().into()),
})
.responder()
}
/*
pub fn person_update(
person_update: Json<PersonUpdate>,
state: State<AppState>,
) -> FutureResponse<HttpResponse> {
state
.db
.send(PersonUpdate {
person_id: person_update.person_id,
op: person_update.op.clone(),
})
.from_err()
.and_then(|res| match res {
Ok(msg) => Ok(HttpResponse::Ok().json(msg)),
Err(_) => Ok(HttpResponse::InternalServerError().into()),
})
.responder()
}
*/
pub fn ws(req: HttpRequest<AppState>) -> Result<HttpResponse, Error> {
ws::start(
req,
PersonSession {
id: 0,
hb: Instant::now(),
},
)
}
struct PersonSession {
id: usize,
hb: Instant,
}
impl Actor for PersonSession {
type Context = ws::WebsocketContext<Self, AppState>;
fn started(&mut self, ctx: &mut Self::Context) {
let addr: Addr<Syn, _> = ctx.address();
ctx.state()
.ws
.send(ws_server::Connect {
addr: addr.recipient(),
})
.into_actor(self)
.then(|res, act, ctx| {
match res {
Ok(res) => act.id = res,
_ => ctx.stop(),
}
fut::ok(())
})
.wait(ctx);
}
fn stopping(&mut self, ctx: &mut Self::Context) -> Running {
ctx.state()
.ws
.do_send(ws_server::Disconnect { id: self.id });
Running::Stop
}
}
impl Handler<ws_server::Message> for PersonSession {
type Result = ();
fn handle(&mut self, msg: ws_server::Message, ctx: &mut Self::Context) {
ctx.text(msg.0);
}
}
impl StreamHandler<ws::Message, ws::ProtocolError> for PersonSession {
fn handle(&mut self, msg: ws::Message, ctx: &mut Self::Context) {
match msg {
ws::Message::Ping(msg) => ctx.pong(&msg),
ws::Message::Pong(_msg) => self.hb = Instant::now(),
ws::Message::Text(text) => {
let person_update: PersonUpdate = serde_json::from_str(&text).unwrap();
ctx.state()
.db
.send(PersonUpdate {
person_id: person_update.person_id,
op: person_update.op.clone(),
})
.into_actor(self)
.then(|_res, _act, ctx| {
ctx.state()
.ws
.do_send(ws_server::ClientMessage { msg: text });
fut::ok(())
})
.wait(ctx);
}
ws::Message::Binary(_bin) => println!("binary is not supported."),
ws::Message::Close(_) => {
ctx.stop();
}
}
}
}
|
#[allow(dead_code)]
#[derive(Debug)]
pub struct Vector2 {
pub x: f32,
pub y: f32,
}
#[allow(dead_code)]
impl Vector2 {
pub fn new(x: f32, y: f32) -> Vector2 {
Vector2 { x, y }
}
pub fn zero() -> Vector2 {
Vector2::new(0.0, 0.0)
}
pub fn one() -> Vector2 {
Vector2::new(1.0, 1.0)
}
pub fn right() -> Vector2 {
Vector2::new(1.0, 0.0)
}
pub fn left() -> Vector2 {
Vector2::new(-1.0, 0.0)
}
pub fn up() -> Vector2 {
Vector2::new(0.0, 1.0)
}
pub fn down() -> Vector2 {
Vector2::new(0.0, -1.0)
}
pub fn add(&self, adder: &Vector2) -> Vector2 {
Vector2::new(self.x + adder.x, self.y + adder.y)
}
pub fn sub(&self, subber: &Vector2) -> Vector2 {
Vector2::new(self.x - subber.x, self.y - subber.y)
}
}
|
use std::cell::RefCell;
use std::{mem, os, iter, ptr};
use std::vec::Vec;
use std::rc::Rc;
use def::*;
use core::{raw, slice};
use core::cell::{Ref};
use page;
use page::{Page, PageHeader, DbRecord, RecordFlags, DbKey, DbValue, PageFlags};
use db::{Transaction, FlagTxnError, FlagDbValid};
use environ::{Environ, DbParams, DatabaseInfo, DbFlags};
#[derive(Clone)]
enum CursorState {
Initialized,
EOF
}
enum CursorOpError {
CursorStackFull,
KeyPairExist,
KeyPairNotFound,
PageNotFound,
DatabaseCorrupted,
PageFull
}
type CursorOpResult = Result<(), CursorOpError>;
enum SearchOp {
FirstPage,
PrevPage,
NextPage,
LastPage,
RootOnly
}
enum SearchCond {
SearchCmd(SearchOp),
SearchByKey(DbKey)
}
#[derive(Clone)]
struct StackElem {
p: Rc<RefCell<Box<Page>>>,
idx: usize
}
pub struct Cursor {
txn: Rc<RefCell<Transaction>>,
cursor_state: CursorState,
db_handle: DbHandle,
db: Rc<RefCell<DatabaseInfo>>,
stack: Vec<StackElem>
}
impl Cursor {
pub fn open(txn: Rc<RefCell<Transaction>>, db_handle: DbHandle) -> Option<Cursor> {
fn is_database_existed(txn: Ref<Transaction>, db_handle: DbHandle) -> bool {
if db_handle < txn.num_of_dbs {
if (txn.dbs_flags[db_handle].contains(FlagDbValid)) {
true
} else {
false
}
} else {
false
}
}
if !is_database_existed(txn.borrow(), db_handle) {
return None;
}
let cursor = Cursor {
txn: txn.clone(),
cursor_state: CursorState::Initialized,
db_handle: db_handle,
db: txn.borrow().dbs[db_handle].clone(),
stack: Vec::with_capacity(CURSOR_STACK_SIZE)
};
Some(cursor)
}
fn is_initialized(&self) -> bool {
match self.cursor_state {
CursorState::Initialized => { true }
_ => { false }
}
}
fn is_last(&self) -> bool {
match self.cursor_state {
CursorState::EOF => { true }
_ => { false }
}
}
//fn touch_page(&mut self) {
//let p = self.alloc_page(1);
//let e = StackElem { p: p.clone(), idx: 0 };
//self.stack.push(e);
//}
//fn calc_record_size_in_leaf(&self, key: &DbKey, val: &DbValue) -> usize {
//let mut sz: usize = mem::size_of::<PageNum>() + mem::size_of::<RecordFlags>() + key._data.len() + val._data.len();
//let record_maxsize = {
//let t1 = self.txn.borrow();
//let t2 = t1.env.borrow();
//t2.record_maxsize
//};
//if sz > record_maxsize {
//sz -= mem::size_of::<PageNum>() + val._data.len();
//}
//round_up_even(sz)
//}
//fn calc_record_size_in_branch(&self, key: &DbKey) -> usize {
//let sz: usize = mem::size_of::<PageNum>() + mem::size_of::<RecordFlags>() + key._data.len();
//sz
//}
//pub fn put(&mut self, key: DbKey, val: DbValue) {
//let txn = self.txn.clone();
//if txn.borrow().is_read_only() {
//return;
//}
//if self.db.borrow().root_page.is_none() {
////New Database, No root page, the cursor would be in an invalid state
//} else {
//}
//}
pub fn get(&mut self, key: DbKey) -> Option<DbValue> {
None
}
pub fn del(&mut self) {
}
//fn push(&mut self, page: Rc<RefCell<Box<Page>>>) -> CursorOpResult {
//debug!("pushing page {} on db {}", page.borrow().get_page_number(), self.db_handle);
//if self.stack.len() >= CURSOR_STACK_SIZE {
//return Err(CursorOpError::CursorStackFull);
//};
//let e = StackElem { p: page.clone(), idx: 0 };
//self.stack.push(e);
//Ok(())
//}
//fn pop(&mut self) -> Option<(Rc<RefCell<Box<Page>>>, usize)> {
//if self.stack.len() > 0 {
//match self.stack.pop() {
//None => {
//None
//},
//Some(elem) => {
//Some((elem.p.clone(), elem.idx.clone()))
//}
//}
//} else {
//None
//}
//}
pub fn close(&mut self) -> () {
}
//fn alloc_page(&mut self, n: PageNum) -> Option<Rc<RefCell<Box<Page>>>> {
//let mut txn = self.txn.borrow_mut();
//if n == 1 && !txn.loose_pages.is_empty() {
//let new_page = txn.loose_pages.pop().unwrap();
//return Some(new_page);
//}
//if txn.page_manager.is_dirty_page_full() {
//return None;
//}
////Search Free Pages from FREE DB
////FIXME:
//let mut op: SearchOp = SearchOp::FirstPage;
//loop {
//match op {
//SearchOp::FirstPage => {
//let c = Cursor::open(self.txn.clone(), FREE_DBI_HANDLE);
//},
//_ => {}
//}
//break;
//op = SearchOp::NextPage;
//}
////If no page with enough space was found, alloc from MMap
//let next_pgno: u64 = txn.next_pgno;
//let pgno_upperbound: u64 = txn.env.borrow().db_pgno_upperbound;
//if (next_pgno + n >= pgno_upperbound) {
//debug!("db size maxed out");
//return None;
//}
//let mut new_page = Box::new(Page { _data: RefCell::new(Vec::with_capacity(txn.env.borrow().db_pagesize)) });
//txn.next_pgno += n.to_u64().unwrap();
//new_page.set_page_number(next_pgno);
//let rc_new_page = Rc::new(RefCell::new(new_page));
//txn.dirty_pages.push_back(rc_new_page.clone());
//Some(rc_new_page)
//}
//fn loose_page(&mut self, page: Rc<RefCell<Box<Page>>>) -> () {
//let mut page = page.borrow_mut();
//let mut txn = self.txn.borrow_mut();
//fn is_safe_to_loose(p: Ref<Box<Page>>) -> bool {
//if (*p).is_dirty() {
//true
//} else {
//false
//}
//}
//if is_safe_to_loose(page.borrow()) {
//debug!("looseen page {}", page.get_page_number());
//page.set_loose();
//txn.loose_pages.push(page.clone());
//}
//}
//fn new_page(&mut self, flags: PageFlags, n: PageNum) -> Rc<RefCell<Box<Page>>> {
//let mut rc_new_page = self.alloc_page(n).unwrap();
//{
//let mut new_page = rc_new_page.borrow_mut();
//debug!("allocate a new page {}", new_page.get_page_number());
//let txn = self.txn.borrow();
//let env = txn.env.borrow();
//let db_pagesize: Offset = env.db_pagesize.to_u16().unwrap();
//new_page.set_page_flag(flags);
//new_page.set_dirty();
//new_page.set_space_indicator(SpaceIndicator::Bounds(0, db_pagesize));
//if new_page.is_branch_page() {
//self.db.borrow_mut().branch_page_num += 1;
//} else if new_page.is_leaf_page() {
//self.db.borrow_mut().leaf_page_num += 1;
//} else if new_page.is_overflow_page() {
//self.db.borrow_mut().overflow_page_num += n.to_u64().unwrap();
//new_page.set_space_indicator(SpaceIndicator::OverflowPageNum(n.to_u32().unwrap()));
//}
//}
//rc_new_page
//}
//fn search_page_from_root(&mut self, search_cond: SearchCond, touch: bool) -> CursorOpResult {
//let mut page_ref = self.stack.last().unwrap().p.clone();
//while page_ref.borrow().is_branch_page() {
//let key_num = page_ref.borrow().number_of_keys();
//debug!("branch page {} has {} keys", page_ref.borrow().get_page_number(), key_num);
//assert!(key_num > 1);
//let mut i: usize = match search_cond {
//SearchCond::SearchCmd(ref op) => {
//match op {
//&SearchOp::FirstPage => { 0 },
//&SearchOp::LastPage => { key_num - 1 },
//_ => { assert!(true, "shouldn't reach here"); 0 }
//}
//},
//SearchCond::SearchByKey(ref k) => {
//match self.search_dbrecord(k) {
//Some(ref r) => { self.idx_in_pages.last().unwrap().clone() },
//None => { key_num - 1 }
//}
//}
//};
//assert!(i < key_num);
//let db_record = page_ref.borrow().get_dbrecord(i).unwrap();
//let pgno = db_record.page_number;
//page_ref = match self.txn.borrow_mut().read_page(pgno) {
//Some(p) => { p },
//None => { return Err(CursorOpError::PageNotFound); }
//};
//self.idx_in_pages.push(i);
//match self.push(page_ref.clone()) {
//Ok(_) => {},
//Err(e) => { return Err(e); }
//}
//if touch {
//self.touch_page();
//page_ref = self.db_pages.last().unwrap().p.clone();
//}
//}
//if page_ref.borrow().is_leaf_page() {
//debug!("internal error!, index point to a ??? page");
//self.txn.borrow_mut().txn_flags.insert(FlagTxnError);
//return Err(CursorOpError::DatabaseCorrupted);
//}
//debug!("found leaf page {}", page_ref.borrow().get_page_number());
//self.cursor_state = CursorState::Initialized;
//Ok(())
//}
//fn search_dbrecord(&mut self, key: &DbKey) -> Option<DbRecord> {
//None
//}
//fn search_page(&mut self, search_cond: SearchCond, touch: bool) -> CursorOpResult {
//if !self.txn.borrow().is_valid() {
//return Ok(());
//}
//let root_pgno = match self.db.borrow().root_page {
//Some(pgno) => { pgno },
//None => {
//debug!("btree is empty");
//return Err(CursorOpError::KeyPairNotFound);
//}
//};
//if self.stack.is_empty() || (self.stack[0].p.borrow().get_page_number() != root_pgno) {
//let mut rc_root_page = match self.txn.borrow_mut().read_page(root_pgno) {
//Some(p) => { p },
//None => { return Err(CursorOpError::PageNotFound)}
//};
//let elem = StackElem { p: rc_root_page.clone(), idx: 0 };
//self.db_pages.push(elem);
//}
//if touch {
////touch the page on the top of the cursor stack
//self.touch_page()
//}
//match search_cond {
//SearchCond::SearchCmd(op) => {
//match op {
//SearchOp::RootOnly => { return Ok(()); }
//_ => {
//return Ok(());
//}
//}
//},
//SearchCond::SearchByKey(k) =>{
//return Ok(());
//}
//}
//}
//fn merge_page(&mut self, dst_cursor: &Cursor) {
//}
//fn split_branch_page(&mut self, new_key: &DbKey, new_page_number: PageNum) {
//assert!(self.db_pages.last().unwrap().borrow().is_branch_page());
//let key_num = self.db_pages.last().unwrap().borrow().number_of_keys();
//let new_idx = self.idx_in_pages.last().unwrap().clone();
//debug!("splitting page: {} at index {}/{}", self.db_pages.last().unwrap().borrow().get_page_number(), new_idx, key_num);
//let flags = self.db_pages.last().unwrap().borrow().get_page_flag().clone();
//let rc_page = self.new_page(flags, 1);
//debug!("new right sibling page {}", rc_page.borrow().get_page_number());
//let mut parent_top = if self.db_pages.len() < 2 {
//let flags = PageFlags::empty() | page::P_BRANCH;
//let p = self.new_page(flags, 1);
//self.db_pages[1] = self.db_pages[0].clone();
//self.idx_in_pages[1] = self.idx_in_pages[0].clone();
//self.db_pages[0] = p;
//self.idx_in_pages[0] = 0;
//self.db.borrow_mut().root_page = Some(self.db_pages[0].borrow().get_page_number());
//debug!("root split, the new root is {}", self.db_pages[0].borrow().get_page_number());
//self.db.borrow_mut().btree_depth += 1;
//0
//} else {
//self.db_pages.len() - 2
//};
//let mut new_cursor = self.clone();
//(*new_cursor.db_pages.last_mut().unwrap()) = rc_page.clone();
//new_cursor.idx_in_pages[parent_top] = self.idx_in_pages[parent_top] + 1;
//let split_idx = (key_num + 1) / 2;
//let db_pagesize = {
//let t1 = self.txn.borrow();
//let t2 = t1.env.borrow();
//t2.db_pagesize
//};
//let free_space_upperbound = {
//let t1 = self.txn.borrow();
//let t2 = t1.env.borrow();
//t2.db_pagesize - PAGE_HEADER_SIZE
//};
//let seperator_key = {
//let p = self.db_pages.last().unwrap();
//let mut record_size = self.calc_record_size_in_branch(new_key);
//record_size = round_up_even(record_size);
//let buf_page: Box<Page> = Box::new(Page { _data: RefCell::new(Vec::with_capacity(db_pagesize)) });
//let seperator_key = if split_idx == new_idx {
//DbKey {
//_data: new_key._data.clone()
//}
//} else {
//let record = p.borrow().get_dbrecord(split_idx).unwrap();
//DbKey {
//_data: record.key._data.clone()
//}
//};
//seperator_key
//};
//debug!("seperator idx is {}", split_idx);
//let size_left = {
//let t1 = new_cursor.db_pages[parent_top].borrow();
//t1.size_left()
//};
//// copy seperator key to the parent
//if size_left < self.calc_record_size_in_branch(&seperator_key) {
//new_cursor.pop();
//new_cursor.split_branch_page(&seperator_key, rc_page.borrow().get_page_number());
//// root split
//if new_cursor.db_pages.len() == self.db_pages.len() {
//let last_idx = self.db_pages.len() - 1;
//let p = self.db_pages[last_idx].clone();
//let idx = self.idx_in_pages[last_idx].clone();
//self.db_pages.push(p);
//self.idx_in_pages.push(idx);
//self.db_pages[last_idx] = self.db_pages[parent_top].clone();
//self.idx_in_pages[last_idx] = self.idx_in_pages[parent_top].clone();
//parent_top += 1;
//}
//// right page could have dirrent parent
//if (new_cursor.db_pages[parent_top].borrow().get_page_number() != self.db_pages[parent_top].borrow().get_page_number())
//&& (self.idx_in_pages[parent_top] >= self.db_pages[parent_top].borrow().number_of_keys()) {
//for i in range(0, parent_top) {
//self.db_pages[i] = new_cursor.db_pages[i].clone();
//self.idx_in_pages[i] = new_cursor.idx_in_pages[i].clone();
//}
//self.db_pages[parent_top] = new_cursor.db_pages[parent_top].clone();
//if new_cursor.idx_in_pages[parent_top] > 0 {
//self.idx_in_pages[parent_top] = new_cursor.idx_in_pages[parent_top] - 1;
//} else {
//self.idx_in_pages[parent_top] = new_cursor.idx_in_pages[parent_top];
//let to_right = false;
//self.sibling(to_right);
//}
//}
//} else {
////FIXME
//let r = new_cursor.pop().unwrap();
//let idx = new_cursor.idx_in_pages[parent_top].to_u16().unwrap();
//new_cursor.add_dbrecord_to_branch(idx, seperator_key, rc_page.borrow().get_page_number());
//new_cursor.push(r.0);
//}
//}
//fn add_dbrecord_to_branch(&mut self, idx: RecordIndex, key: DbKey, page_number: PageNum) -> CursorOpResult {
//let page_ref = self.db_pages.last().unwrap();
//debug!("add to page {} index {}", page_ref.borrow().get_page_number(), idx);
//let room = page_ref.borrow().size_left() - mem::size_of::<RecordIndex>();
//let mut node_size = mem::size_of::<PageNum>() + mem::size_of::<RecordFlags>() + key._data.len();
//node_size = round_up_even(node_size);
//if node_size > room {
//debug!("not enough room in page {}, node_size = {}", page_ref.borrow().get_page_number(), node_size);
//self.txn.borrow_mut().txn_flags.insert(FlagTxnError);
//return Err(CursorOpError::PageFull);
//}
//let usize_idx = idx.to_uint().unwrap();
//let offset: RecordIndex = page_ref.borrow().get_upper_bound() - node_size.to_u16().unwrap();
//{
//let mut p = page_ref.borrow_mut();
//let mut s = p.get_mut_idx_slice();
//for i in iter::range_step(s.len()-1, usize_idx, -1) {
//s[i] = s[i-1];
//}
//s[usize_idx] = offset;
//}
//{
//let mut p = page_ref.borrow_mut();
//let mut d = p._data.borrow_mut();
//let mut slot = d.slice_mut(offset.to_uint().unwrap(), offset.to_uint().unwrap() + node_size);
//slice::bytes::copy_memory(slot, key._data.as_slice());
//}
//Ok(())
//}
//fn add_dbrecord_to_leaf(&mut self, idx: RecordIndex, key: DbKey, val: DbValue, mut flags: RecordFlags) -> CursorOpResult {
//debug!("add to page {} index {}", self.db_pages.last().unwrap().borrow().get_page_number(), idx);
//let room = self.db_pages.last().unwrap().borrow().size_left() - mem::size_of::<RecordIndex>();
//let mut node_size = mem::size_of::<PageNum>() + mem::size_of::<RecordFlags>() + key._data.len();
//let record_maxsize = {
//let t1 = self.txn.borrow();
//let t2 = t1.env.borrow();
//t2.record_maxsize
//};
//if flags.contains(page::FlagRecordBigData) {
//node_size += mem::size_of::<PageNum>();
//node_size = round_up_even(node_size);
//if node_size > room {
//debug!("not enough room in page {}, node_size = {}", self.db_pages.last().unwrap().borrow().get_page_number(), node_size);
//self.txn.borrow_mut().txn_flags.insert(FlagTxnError);
//return Err(CursorOpError::PageFull);
//}
//} else if node_size + val._data.len() > record_maxsize {
//let ovpages: u64 = {
//let t1 = self.txn.borrow();
//let t2 = t1.env.borrow();
//t2.overflow_pages_needed_num(val._data.len())
//};
//node_size = round_up_even(node_size + mem::size_of::<PageNum>());
//if node_size > room {
//debug!("not enough room in page {}, node_size = {}", self.db_pages.last().unwrap().borrow().get_page_number(), node_size);
//self.txn.borrow_mut().txn_flags.insert(FlagTxnError);
//return Err(CursorOpError::PageFull);
//}
//let rc_page = self.new_page(page::P_OVERFLOW, ovpages);
//debug!("allocated overflow page {}", rc_page.borrow().get_page_number());
//flags.insert(page::FlagRecordBigData);
//} else {
//node_size += val._data.len();
//node_size = round_up_even(node_size);
//if node_size > room {
//debug!("not enough room in page {}, node_size = {}", self.db_pages.last().unwrap().borrow().get_page_number(), node_size);
//self.txn.borrow_mut().txn_flags.insert(FlagTxnError);
//return Err(CursorOpError::PageFull);
//}
//}
//let page_ref = self.db_pages.last().unwrap();
//let usize_idx = idx.to_uint().unwrap();
//let offset: RecordIndex = page_ref.borrow().get_upper_bound() - node_size.to_u16().unwrap();
//{
//let mut p = page_ref.borrow_mut();
//let mut s = p.get_mut_idx_slice();
//for i in iter::range_step(s.len()-1, usize_idx, -1) {
//s[i] = s[i-1];
//}
//s[usize_idx] = offset;
//}
//{
//let mut p = page_ref.borrow_mut();
//let mut d = p._data.borrow_mut();
//let mut slot = d.slice_mut(offset.to_uint().unwrap(), offset.to_uint().unwrap() + node_size);
//slice::bytes::copy_memory(slot, key._data.as_slice());
//}
//Ok(())
//}
//fn first(&mut self) -> Option<(DbKey, DbValue)> {
//if !self.is_initialized() {
//let touch = false;
//let search_cond = SearchCond::SearchCmd(SearchOp::FirstPage);
//match self.search_page(search_cond, touch) {
//Ok(_) => {},
//Err(_) => { return None; }
//};
//}
//let mut page_ref = self.db_pages.last().unwrap();
//assert!(page_ref.borrow().is_leaf_page());
//let db_record = match page_ref.borrow().get_dbrecord(0) {
//Some(r) => { r },
//None => { return None; }
//};
//self.cursor_flags.insert(FlagInitialized);
//self.cursor_flags.remove(FlagEOF);
//self.idx_in_pages.push(0);
//let r = (db_record.key.clone(), db_record.value.clone());
//Some(r)
//}
//fn last(&mut self) -> Option<(DbKey, DbValue)> {
//None
//}
//// Move to the sibling
//fn sibling(&mut self, to_right: bool) -> CursorOpResult {
//Ok(())
//}
//// Move to the next data item
//fn next(&mut self) -> Option<(DbKey, DbValue)> {
//if self.is_last() {
//return None;
//}
//assert!(self.is_initialized());
//let key_num = self.db_pages.last().unwrap().borrow().number_of_keys();
//debug!("top page is {}", self.db_pages.last().unwrap().borrow().get_page_number());
//let x = self.idx_in_pages.last().unwrap().clone();
//let mut page_ref = if (x + 1) >= key_num {
//debug!("move to the next sibling page");
//let to_right = true;
//match self.sibling(to_right) {
//Ok(_) => {},
//Err(e) => {
//self.cursor_flags.insert(FlagEOF);
//}
//}
//debug!("next page is {}", self.db_pages.last().unwrap().borrow().get_page_number());
//self.db_pages.last().unwrap()
//} else {
//self.idx_in_pages.last().unwrap();
//self.db_pages.last().unwrap()
//};
//assert!(page_ref.borrow().is_leaf_page());
//let i = self.idx_in_pages.last().unwrap().clone();
//let db_record: DbRecord = match page_ref.borrow().get_dbrecord(i) {
//Some(r) => { r },
//None => { return None; }
//};
//let r: (DbKey, DbValue) = (db_record.key.clone(), db_record.value.clone());
//Some(r)
//}
//// Move to the previous data item
//fn prev(&mut self) -> Option<(DbKey, DbValue)> {
//assert!(self.is_initialized());
//debug!("top page is {}", self.db_pages.last().unwrap().borrow().get_page_number());
//let page_ref = if (self.idx_in_pages.last().unwrap().clone() == 0) {
//debug!("move to prev sibling page");
//let to_right = false;
//match self.sibling(to_right) {
//Ok(_) => {},
//Err(e) => {
//self.cursor_flags.insert(FlagEOF);
//}
//}
//debug!("prev page is {}", self.db_pages.last().unwrap().borrow().get_page_number());
//let key_num = self.db_pages.last().unwrap().borrow().number_of_keys();
//(*self.idx_in_pages.last_mut().unwrap()) = key_num;
//self.db_pages.last().unwrap()
//} else {
//(*self.idx_in_pages.last_mut().unwrap()) -= 1;
//self.db_pages.last().unwrap()
//};
//self.cursor_flags.remove(FlagEOF);
//assert!(page_ref.borrow().is_leaf_page());
//let i = self.idx_in_pages.last().unwrap().clone();
//let db_record = match page_ref.borrow().get_dbrecord(i) {
//Some(r) => { r },
//None => { return None; }
//};
//let r = (db_record.key.clone(), db_record.value.clone());
//Some(r)
//}
}
impl Clone for Cursor {
#[inline]
fn clone(&self) -> Cursor {
Cursor {
txn: self.txn.clone(),
cursor_state: self.cursor_state.clone(),
db_handle: self.db_handle,
db: self.db.clone(),
stack: self.stack.clone()
}
}
}
impl Drop for Cursor {
fn drop(&mut self) {
self.close();
}
}
#[cfg(test)]
mod tests {
extern crate test;
#[test]
fn test_cursor_new() {
let a: usize = 1;
assert_eq!(a, 1);
}
#[bench]
fn bench_txn_begin(b: &mut test::Bencher) {
b.iter(|| {
let a: usize = 1+1;
assert_eq!(a, 2);
})
}
}
|
use crate::{qjs, Artifact, Mut, Ref, Set, Weak, WeakElement, WeakKey, WeakSet};
use derive_deref::Deref;
use std::{
borrow::Borrow,
fmt::{Display, Formatter, Result as FmtResult},
hash::{Hash, Hasher},
};
pub struct Internal {
name: String,
description: Mut<String>,
artifacts: Mut<Set<Artifact>>,
}
impl Drop for Internal {
fn drop(&mut self) {
log::debug!("Goal::drop `{}`", self.name);
}
}
impl Borrow<str> for Internal {
fn borrow(&self) -> &str {
&self.name
}
}
impl Borrow<String> for Internal {
fn borrow(&self) -> &String {
&self.name
}
}
impl PartialEq for Internal {
fn eq(&self, other: &Self) -> bool {
self.name == other.name
}
}
impl Eq for Internal {}
impl Hash for Internal {
fn hash<H: Hasher>(&self, state: &mut H) {
self.name.hash(state);
}
}
#[derive(Clone)]
#[repr(transparent)]
pub struct Goal(Ref<Internal>);
impl Display for Goal {
fn fmt(&self, f: &mut Formatter) -> FmtResult {
"Goal `".fmt(f)?;
self.0.name.fmt(f)?;
'`'.fmt(f)
}
}
impl Goal {
pub fn new_raw<S: Into<String>>(name: S) -> Self {
let name = name.into();
log::debug!("Goal::new `{}`", name);
Self(Ref::new(Internal {
name,
description: Mut::new(Default::default()),
artifacts: Mut::new(Default::default()),
}))
}
pub fn new<W: AsRef<WeakGoalSet>, N: Into<String>>(set: W, name: N) -> Self {
let set = set.as_ref();
let name = name.into();
{
// try reuse already existing goal
if let Some(goal) = set.read().get(&name) {
return goal;
}
}
let goal = Self::new_raw(name);
set.write().insert(goal.clone());
goal
}
pub fn set_description<S: Into<String>>(&self, description: S) {
*self.0.description.write() = description.into();
}
pub fn set_artifacts(&self, artifacts: &[Artifact]) {
*self.0.artifacts.write() = artifacts.iter().cloned().collect();
}
pub fn clear_artifacts(&self) {
*self.0.artifacts.write() = Default::default();
}
pub fn weak(&self) -> WeakGoal {
WeakGoal(Ref::downgrade(&self.0))
}
}
#[derive(Clone)]
#[repr(transparent)]
pub struct WeakGoal(Weak<Internal>);
impl WeakGoal {
pub fn try_ref(&self) -> Option<Goal> {
self.0.upgrade().map(Goal)
}
}
impl WeakKey for WeakGoal {
type Key = Internal;
fn with_key<F, R>(view: &Self::Strong, f: F) -> R
where
F: FnOnce(&Self::Key) -> R,
{
f(&view.0)
}
}
impl WeakElement for WeakGoal {
type Strong = Goal;
fn new(view: &Self::Strong) -> Self {
view.weak()
}
fn view(&self) -> Option<Self::Strong> {
self.try_ref()
}
fn clone(view: &Self::Strong) -> Self::Strong {
view.clone()
}
}
#[derive(Clone, Default, Deref)]
pub struct WeakGoalSet(Ref<Mut<WeakSet<WeakGoal>>>);
impl AsRef<WeakGoalSet> for WeakGoalSet {
fn as_ref(&self) -> &WeakGoalSet {
&*self
}
}
#[qjs::bind(module, public)]
#[quickjs(bare)]
mod js {
pub use super::*;
#[quickjs(rename = "Goal")]
impl Goal {
#[quickjs(rename = "new")]
pub fn ctor() -> Self {
unimplemented!()
}
#[quickjs(get)]
pub fn name(&self) -> &String {
&self.0.name
}
#[quickjs(get)]
pub fn description(&self) -> String {
self.0.description.read().clone()
}
#[quickjs(rename = "description", set)]
pub fn set_description_js(&self, description: String) {
self.set_description(description)
}
#[quickjs(get)]
pub fn artifacts(&self) -> Vec<Artifact> {
self.0.artifacts.read().iter().cloned().collect()
}
#[quickjs(rename = "artifacts", set)]
pub fn set_artifacts_js(&self, artifacts: Vec<&Artifact>) {
*self.0.artifacts.write() = artifacts.into_iter().cloned().collect();
}
/*pub fn insert_artifact(&self, artifact: &Artifact) {
self.0.artifacts.write().insert(artifact.clone());
}
pub fn remove_artifact(&self, artifact: &Artifact) {
self.0.artifacts.write().remove(artifact);
}*/
#[quickjs(rename = "toString")]
pub fn to_string_js(&self) -> String {
self.to_string()
}
}
}
|
use libbeaglebone::enums::DeviceState;
use libbeaglebone::gpio::GPIO;
use libbeaglebone::gpio::PinDirection;
use libbeaglebone::gpio::PinState;
use libbeaglebone::pins::Pin;
use crate::pinouts::digital::input::DigitalInput;
use crate::pinouts::digital::output::DigitalOutput;
pub struct GpioPinout {
pin: GPIO,
}
impl DigitalOutput for GpioPinout {
fn set_value(&mut self, val: bool) {
let mut set_pin = |state| {
if let Err(e) = self.pin.write(state) {
error!("{}", e);
self.set_output();
}
};
if val {
set_pin(PinState::High);
} else {
set_pin(PinState::Low);
}
}
}
impl DigitalInput for GpioPinout {
fn get_value(&self) -> bool {
match self.pin.read() {
Ok(val) => {
PinState::High == val
}
Err(e) => {
error!("{}", e);
false
}
}
}
}
impl GpioPinout {
pub fn new(pin_number: Pin) -> Self {
let pin = GPIO::new(pin_number);
if let Err(e) = pin.set_export(DeviceState::Exported) {
error!("{}", e);
}
Self {
pin,
}
}
pub fn set_output(&mut self) {
if let Err(error) = self.pin.set_direction(PinDirection::Out) {
error!("{}", error);
}
}
pub fn set_output_twice(&mut self) {
if self.pin.set_direction(PinDirection::Out).is_err() {
self.set_output();
}
}
pub fn set_input(&mut self) {
if let Err(error) = self.pin.set_direction(PinDirection::In) {
error!("{}", error);
}
}
pub fn set_input_twice(&mut self) {
if self.pin.set_direction(PinDirection::In).is_err() {
self.set_input();
}
}
}
impl Drop for GpioPinout {
fn drop(&mut self) {
if let Err(e) = self.pin.set_export(DeviceState::Unexported) {
error!("{}", e);
}
}
} |
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/
use crate::gdbstub::commands::*;
use crate::gdbstub::hex::*;
#[derive(Debug, PartialEq)]
pub struct QDisableRandomization {
pub val: bool,
}
impl ParseCommand for QDisableRandomization {
fn parse(bytes: BytesMut) -> Option<Self> {
if bytes == ":0" {
Some(QDisableRandomization { val: false })
} else if bytes == ":1" {
Some(QDisableRandomization { val: true })
} else {
None
}
}
}
|
use embedded_nal::{IpAddr, Ipv4Addr};
use mqttrust::encoding::v4::LastWill;
#[derive(Clone, Debug, PartialEq)]
pub enum Broker<'a> {
Hostname(&'a str),
IpAddr(IpAddr),
}
impl<'a> From<&'a str> for Broker<'a> {
fn from(s: &'a str) -> Self {
Broker::Hostname(s)
}
}
impl<'a> From<IpAddr> for Broker<'a> {
fn from(ip: IpAddr) -> Self {
Broker::IpAddr(ip)
}
}
impl<'a> From<Ipv4Addr> for Broker<'a> {
fn from(ip: Ipv4Addr) -> Self {
Broker::IpAddr(ip.into())
}
}
/// Options to configure the behaviour of mqtt connection
///
/// **Lifetimes**:
/// - 'a: The lifetime of option fields, not referenced in any MQTT packets at any point
/// - 'b: The lifetime of the packet fields, backed by a slice buffer
#[derive(Clone, Debug)]
pub struct MqttOptions<'a> {
/// broker address that you want to connect to
broker_addr: Broker<'a>,
/// broker port
port: u16,
/// keep alive time to send pingreq to broker when the connection is idle
keep_alive_ms: u32,
/// clean (or) persistent session
clean_session: bool,
/// client identifier
client_id: &'a str,
// alpn settings
// alpn: Option<Vec<Vec<u8>>>,
/// username and password
credentials: Option<(&'a str, &'a [u8])>,
// Minimum delay time between consecutive outgoing packets
// throttle: Duration,
/// Last will that will be issued on unexpected disconnect
last_will: Option<LastWill<'a>>,
}
impl<'a> MqttOptions<'a> {
/// New mqtt options
pub fn new(id: &'a str, broker: Broker<'a>, port: u16) -> MqttOptions<'a> {
if id.starts_with(' ') || id.is_empty() {
panic!("Invalid client id")
}
MqttOptions {
broker_addr: broker,
port,
keep_alive_ms: 60_000,
clean_session: true,
client_id: id,
// alpn: None,
credentials: None,
// throttle: Duration::from_micros(0),
last_will: None,
}
}
/// Broker address
pub fn broker(&self) -> (Broker, u16) {
(self.broker_addr.clone(), self.port)
}
/// Broker address
pub fn set_broker(self, broker: Broker<'a>) -> Self {
Self {
broker_addr: broker,
..self
}
}
pub fn set_port(self, port: u16) -> Self {
Self { port, ..self }
}
pub fn set_last_will(self, will: LastWill<'a>) -> Self {
Self {
last_will: Some(will),
..self
}
}
pub fn last_will(&self) -> Option<LastWill<'a>> {
self.last_will.clone()
}
// pub fn set_alpn(self, alpn: Vec<Vec<u8>>) -> Self {
// Self {
// alpn: Some(alpn),
// ..self
// }
// }
// pub fn alpn(&self) -> Option<Vec<Vec<u8>>> {
// self.alpn.clone()
// }
/// Set number of seconds after which client should ping the broker
/// if there is no other data exchange
pub fn set_keep_alive(self, secs: u16) -> Self {
if secs < 5 {
panic!("Keep alives should be >= 5 secs");
}
Self {
keep_alive_ms: secs as u32 * 1000,
..self
}
}
/// Keep alive time
pub fn keep_alive_ms(&self) -> u32 {
self.keep_alive_ms
}
/// Client identifier
pub fn client_id(&self) -> &'a str {
self.client_id
}
/// `clean_session = true` removes all the state from queues & instructs the broker
/// to clean all the client state when client disconnects.
///
/// When set `false`, broker will hold the client state and performs pending
/// operations on the client when reconnection with same `client_id`
/// happens. Local queue state is also held to retransmit packets after reconnection.
pub fn set_clean_session(self, clean_session: bool) -> Self {
Self {
clean_session,
..self
}
}
/// Clean session
pub fn clean_session(&self) -> bool {
self.clean_session
}
/// Username and password
pub fn set_credentials(self, username: &'a str, password: &'a [u8]) -> Self {
Self {
credentials: Some((username, password)),
..self
}
}
/// Security options
pub fn credentials(&self) -> (Option<&'a str>, Option<&'a [u8]>) {
if let Some((username, password)) = self.credentials {
(Some(username), Some(password))
} else {
(None, None)
}
}
// /// Enables throttling and sets outoing message rate to the specified 'rate'
// pub fn set_throttle(self, duration: Duration) -> Self {
// self.throttle = duration;
// self
// }
// /// Outgoing message rate
// pub fn throttle(&self) -> Duration {
// self.throttle
// }
}
#[cfg(test)]
mod test {
use super::{Ipv4Addr, MqttOptions};
use embedded_nal::{IpAddr, Ipv6Addr};
use mqttrust::{encoding::v4::LastWill, QoS};
#[test]
#[should_panic]
fn client_id_starts_with_space() {
let _mqtt_opts = MqttOptions::new(" client_a", Ipv4Addr::new(127, 0, 0, 1).into(), 1883)
.set_clean_session(true);
}
#[test]
#[should_panic]
fn no_client_id() {
let _mqtt_opts =
MqttOptions::new("", Ipv4Addr::localhost().into(), 1883).set_clean_session(true);
}
#[test]
fn broker() {
let opts = MqttOptions::new("client_a", Ipv4Addr::localhost().into(), 1883);
assert_eq!(opts.broker_addr, Ipv4Addr::localhost().into());
assert_eq!(opts.port, 1883);
assert_eq!(opts.broker(), (Ipv4Addr::localhost().into(), 1883));
assert_eq!(
MqttOptions::new("client_a", "localhost".into(), 1883).broker_addr,
"localhost".into()
);
assert_eq!(
MqttOptions::new("client_a", IpAddr::V4(Ipv4Addr::localhost()).into(), 1883)
.broker_addr,
IpAddr::V4(Ipv4Addr::localhost()).into()
);
assert_eq!(
MqttOptions::new("client_a", IpAddr::V6(Ipv6Addr::localhost()).into(), 1883)
.broker_addr,
IpAddr::V6(Ipv6Addr::localhost()).into()
);
}
#[test]
fn client_id() {
let opts = MqttOptions::new("client_a", Ipv4Addr::localhost().into(), 1883);
assert_eq!(opts.client_id(), "client_a");
}
#[test]
fn keep_alive_ms() {
let opts = MqttOptions::new("client_a", Ipv4Addr::localhost().into(), 1883);
assert_eq!(opts.keep_alive_ms, 60_000);
assert_eq!(opts.set_keep_alive(120).keep_alive_ms(), 120_000);
}
#[test]
#[should_panic]
fn keep_alive_panic() {
let opts = MqttOptions::new("client_a", Ipv4Addr::localhost().into(), 1883);
assert_eq!(opts.keep_alive_ms, 60_000);
assert_eq!(opts.set_keep_alive(4).keep_alive_ms(), 120_000);
}
#[test]
fn last_will() {
let opts = MqttOptions::new("client_a", Ipv4Addr::localhost().into(), 1883);
assert_eq!(opts.last_will, None);
let will = LastWill {
topic: "topic",
message: b"Will message",
qos: QoS::AtLeastOnce,
retain: false,
};
assert_eq!(opts.set_last_will(will.clone()).last_will(), Some(will));
}
#[test]
fn clean_session() {
let opts = MqttOptions::new("client_a", Ipv4Addr::localhost().into(), 1883);
assert_eq!(opts.clean_session, true);
assert_eq!(opts.set_clean_session(false).clean_session(), false);
}
#[test]
fn credentials() {
let opts = MqttOptions::new("client_a", Ipv4Addr::localhost().into(), 1883);
assert_eq!(opts.credentials, None);
assert_eq!(opts.credentials(), (None, None));
assert_eq!(
opts.set_credentials("some_user", &[]).credentials(),
(Some("some_user"), Some(&b""[..]))
);
}
}
|
use byteorder::{BigEndian, ByteOrder};
use tiny_keccak::Keccak;
use parity_hash::H256;
use lib::*;
use super::{Signature, ValueType};
use super::util::Error;
#[derive(Clone)]
pub struct HashSignature {
pub hash: u32,
pub signature: Signature,
}
#[derive(Clone)]
pub struct NamedSignature {
name: Cow<'static, str>,
signature: Signature,
}
#[derive(Default)]
pub struct Table {
// slice instead of hashmap since dispatch table is usually small (todo: maybe add variant with hash tables)
pub inner: Cow<'static, [HashSignature]>,
// anonymous signature/constructor
pub fallback: Option<Signature>,
}
impl From<NamedSignature> for HashSignature {
fn from(named: NamedSignature) -> HashSignature {
let hash = named.hash();
let signature = named.signature;
HashSignature {
hash: BigEndian::read_u32(&hash.as_ref()[0..4]),
signature: signature
}
}
}
impl Table {
pub fn new<T>(inner: T) -> Self
where T: Into<Cow<'static, [HashSignature]>>
{
Table { inner: inner.into(), fallback: None }
}
pub fn with_fallback<T>(inner: T, fallback: Signature) -> Self
where T: Into<Cow<'static, [HashSignature]>>
{
Table { inner: inner.into(), fallback: Some(fallback) }
}
pub fn push<S>(&mut self, signature: S)
where S: Into<HashSignature>
{
self.inner.to_mut().push(signature.into())
}
pub fn dispatch<D>(&self, payload: &[u8], mut d: D) -> Result<Vec<u8>, Error>
where D: FnMut(u32, Vec<ValueType>) -> Option<ValueType>
{
if payload.len() < 4 { return Err(Error::NoLengthForSignature); }
let method_id = BigEndian::read_u32(&payload[0..4]);
let hash_signature = self.hash_signature(method_id)?;
let args = hash_signature.signature.decode_invoke(&payload[4..]);
let result = d(method_id, args);
Ok(hash_signature.signature.encode_result(result)?)
}
/// Fallback/constructor dispatch cannot return anything
pub fn fallback_dispatch<D>(&self, payload: &[u8], mut d: D)
-> Result<(), Error>
where D: FnMut(Vec<ValueType>)
{
if let Some(ref fallback_signature) = self.fallback {
d(fallback_signature.decode_invoke(payload));
Ok(())
} else {
Err(Error::NoFallback)
}
}
pub fn hash_signature(&self, method_id: u32) -> Result<&HashSignature, Error> {
self.inner.iter().find(|x| x.hash == method_id).ok_or(Error::UnknownSignature)
}
pub fn call<D>(&self, hash: u32, args: &[ValueType], mut d: D)
-> Result<Option<ValueType>, Error>
where D: FnMut(Vec<u8>) -> Option<[u8; 32]>
{
let hash_signature = self.hash_signature(hash)?;
let args_payload = hash_signature.signature.encode_invoke(args);
let mut payload = Vec::with_capacity(args_payload.len() + 4);
let mut encoded_signature = [0u8; 4];
BigEndian::write_u32(&mut encoded_signature, hash);
payload.extend_from_slice(&encoded_signature);
payload.extend(args_payload);
let result = d(payload);
Ok(match result {
Some(ref result_slice) => hash_signature.signature.decode_result(&result_slice[..])?,
None => None,
})
}
}
impl NamedSignature {
pub fn new<T>(name: T, signature: Signature) -> Self
where T: Into<Cow<'static, str>>
{
NamedSignature {
name: name.into(),
signature: signature,
}
}
pub fn name(&self) -> &str {
self.name.as_ref()
}
pub fn signature(&self) -> &Signature {
&self.signature
}
pub fn hash(&self) -> H256 {
let mut signature_str = self.name.to_string();
signature_str.push('(');
for (i, p) in self.signature.params().iter().enumerate() {
p.to_member(&mut signature_str);
if i != self.signature.params().len()-1 { signature_str.push(','); }
}
signature_str.push(')');
let mut keccak = Keccak::new_keccak256();
let mut res = H256::zero();
keccak.update(signature_str.as_bytes());
keccak.finalize(res.as_mut());
res
}
}
impl HashSignature {
pub fn new(hash: u32, signature: Signature) -> Self {
HashSignature {
hash: hash,
signature: signature,
}
}
pub fn hash(&self) -> u32 {
self.hash
}
pub fn signature(&self) -> &Signature {
&self.signature
}
}
#[test]
fn match_signature() {
use super::ParamType;
let named = NamedSignature {
name: Cow::Borrowed("baz"),
signature: Signature::new_void(vec![ParamType::U32, ParamType::Bool]),
};
let hashed: HashSignature = named.into();
assert_eq!(hashed.hash, 0xcdcd77c0);
}
#[test]
fn match_signature_2() {
use super::ParamType;
let named = NamedSignature {
name: Cow::Borrowed("sam"),
signature: Signature::new_void(vec![ParamType::Bytes, ParamType::Bool, ParamType::Array(ParamType::U256.into())]),
};
let hashed: HashSignature = named.into();
assert_eq!(hashed.hash, 0xa5643bf2);
}
#[test]
fn table() {
use super::ParamType;
let mut table = Table::default();
table.push(
NamedSignature {
name: Cow::Borrowed("baz"),
signature: Signature::new_void(vec![ParamType::U32, ParamType::Bool]),
}
);
table.push(
NamedSignature {
name: Cow::Borrowed("sam"),
signature: Signature::new_void(vec![ParamType::Bytes, ParamType::Bool, ParamType::Array(ParamType::U256.into())]),
}
);
table.dispatch(
&[
0xcd, 0xcd, 0x77, 0xc0,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x45,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01
],
|method_id, values| {
assert_eq!(method_id, 0xcdcd77c0);
assert_eq!(values[0], ValueType::U32(69));
assert_eq!(values[1], ValueType::Bool(true));
None
}
).expect("dispatch failed");
table.dispatch(
&[
0xa5, 0x64, 0x3b, 0xf2,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x60,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xa0,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04,
0x64, 0x61, 0x76, 0x65, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03,
],
|method_id, values| {
assert_eq!(method_id, 0xa5643bf2);
assert_eq!(values[0], ValueType::Bytes(vec![100, 97, 118, 101]));
assert_eq!(values[1], ValueType::Bool(true));
assert_eq!(values[2], ValueType::Array(
vec![
ValueType::U256([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01]),
ValueType::U256([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02]),
ValueType::U256([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03])
]
));
None
}
).expect("dispatch failed");
}
|
// Copyright © 2020, Oracle and/or its affiliates.
//
// Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
//
// Portions Copyright 2017 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE-BSD-3-Clause file.
//
// SPDX-License-Identifier: Apache-2.0 AND BSD-3-Clause
/*
* automatically generated by rust-bindgen using:
*
* # bindgen --with-derive-default elf.h > elf.rs
*
* From upstream linux include/uapi/linux/elf.h at commit:
* 48b1320a674e1ff5de2fad8606bee38f724594dc
* and then edited to eliminate unnecessary definitions, add comments,
* and relocate definitions and tests for clarity.
*/
pub const PT_LOAD: u32 = 1;
pub const PT_NOTE: u32 = 4;
pub const EI_MAG0: u32 = 0;
pub const EI_MAG1: u32 = 1;
pub const EI_MAG2: u32 = 2;
pub const EI_MAG3: u32 = 3;
pub const EI_DATA: u32 = 5;
pub const ELFMAG0: u32 = 127;
// The values for the following definitions have been edited
// to use their equivalent byte literal representations.
pub const ELFMAG1: u8 = b'E';
pub const ELFMAG2: u8 = b'L';
pub const ELFMAG3: u8 = b'F';
pub const ELFDATA2LSB: u32 = 1;
pub type __s8 = ::std::os::raw::c_schar;
pub type __u8 = ::std::os::raw::c_uchar;
pub type __s16 = ::std::os::raw::c_short;
pub type __u16 = ::std::os::raw::c_ushort;
pub type __s32 = ::std::os::raw::c_int;
pub type __u32 = ::std::os::raw::c_uint;
pub type __s64 = ::std::os::raw::c_longlong;
pub type __u64 = ::std::os::raw::c_ulonglong;
pub type Elf64_Addr = __u64;
pub type Elf64_Half = __u16;
pub type Elf64_Off = __u64;
pub type Elf64_Sword = __s32;
pub type Elf64_Word = __u32;
pub type Elf64_Xword = __u64;
#[repr(C)]
#[derive(Debug, Default, Copy, Clone)]
pub struct elf64_hdr {
pub e_ident: [::std::os::raw::c_uchar; 16usize],
pub e_type: Elf64_Half,
pub e_machine: Elf64_Half,
pub e_version: Elf64_Word,
pub e_entry: Elf64_Addr,
pub e_phoff: Elf64_Off,
pub e_shoff: Elf64_Off,
pub e_flags: Elf64_Word,
pub e_ehsize: Elf64_Half,
pub e_phentsize: Elf64_Half,
pub e_phnum: Elf64_Half,
pub e_shentsize: Elf64_Half,
pub e_shnum: Elf64_Half,
pub e_shstrndx: Elf64_Half,
}
pub type Elf64_Ehdr = elf64_hdr;
#[repr(C)]
#[derive(Debug, Default, Copy, Clone)]
pub struct elf64_phdr {
pub p_type: Elf64_Word,
pub p_flags: Elf64_Word,
pub p_offset: Elf64_Off,
pub p_vaddr: Elf64_Addr,
pub p_paddr: Elf64_Addr,
pub p_filesz: Elf64_Xword,
pub p_memsz: Elf64_Xword,
pub p_align: Elf64_Xword,
}
pub type Elf64_Phdr = elf64_phdr;
#[repr(C)]
#[derive(Debug, Default, Copy, Clone)]
pub struct elf64_note {
pub n_namesz: Elf64_Word,
pub n_descsz: Elf64_Word,
pub n_type: Elf64_Word,
}
pub type Elf64_Nhdr = elf64_note;
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn bindgen_test_layout_elf64_hdr() {
const UNINIT: ::std::mem::MaybeUninit<elf64_hdr> = ::std::mem::MaybeUninit::uninit();
let ptr = UNINIT.as_ptr();
assert_eq!(
::std::mem::size_of::<elf64_hdr>(),
64usize,
concat!("Size of: ", stringify!(elf64_hdr))
);
assert_eq!(
::std::mem::align_of::<elf64_hdr>(),
8usize,
concat!("Alignment of ", stringify!(elf64_hdr))
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).e_ident) as usize - ptr as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(elf64_hdr),
"::",
stringify!(e_ident)
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).e_type) as usize - ptr as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(elf64_hdr),
"::",
stringify!(e_type)
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).e_machine) as usize - ptr as usize },
18usize,
concat!(
"Offset of field: ",
stringify!(elf64_hdr),
"::",
stringify!(e_machine)
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).e_version) as usize - ptr as usize },
20usize,
concat!(
"Offset of field: ",
stringify!(elf64_hdr),
"::",
stringify!(e_version)
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).e_entry) as usize - ptr as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(elf64_hdr),
"::",
stringify!(e_entry)
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).e_phoff) as usize - ptr as usize },
32usize,
concat!(
"Offset of field: ",
stringify!(elf64_hdr),
"::",
stringify!(e_phoff)
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).e_shoff) as usize - ptr as usize },
40usize,
concat!(
"Offset of field: ",
stringify!(elf64_hdr),
"::",
stringify!(e_shoff)
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).e_flags) as usize - ptr as usize },
48usize,
concat!(
"Offset of field: ",
stringify!(elf64_hdr),
"::",
stringify!(e_flags)
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).e_ehsize) as usize - ptr as usize },
52usize,
concat!(
"Offset of field: ",
stringify!(elf64_hdr),
"::",
stringify!(e_ehsize)
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).e_phentsize) as usize - ptr as usize },
54usize,
concat!(
"Offset of field: ",
stringify!(elf64_hdr),
"::",
stringify!(e_phentsize)
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).e_phnum) as usize - ptr as usize },
56usize,
concat!(
"Offset of field: ",
stringify!(elf64_hdr),
"::",
stringify!(e_phnum)
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).e_shentsize) as usize - ptr as usize },
58usize,
concat!(
"Offset of field: ",
stringify!(elf64_hdr),
"::",
stringify!(e_shentsize)
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).e_shnum) as usize - ptr as usize },
60usize,
concat!(
"Offset of field: ",
stringify!(elf64_hdr),
"::",
stringify!(e_shnum)
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).e_shstrndx) as usize - ptr as usize },
62usize,
concat!(
"Offset of field: ",
stringify!(elf64_hdr),
"::",
stringify!(e_shstrndx)
)
);
}
#[test]
fn bindgen_test_layout_elf64_phdr() {
const UNINIT: ::std::mem::MaybeUninit<elf64_phdr> = ::std::mem::MaybeUninit::uninit();
let ptr = UNINIT.as_ptr();
assert_eq!(
::std::mem::size_of::<elf64_phdr>(),
56usize,
concat!("Size of: ", stringify!(elf64_phdr))
);
assert_eq!(
::std::mem::align_of::<elf64_phdr>(),
8usize,
concat!("Alignment of ", stringify!(elf64_phdr))
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).p_type) as usize - ptr as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(elf64_phdr),
"::",
stringify!(p_type)
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).p_flags) as usize - ptr as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(elf64_phdr),
"::",
stringify!(p_flags)
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).p_offset) as usize - ptr as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(elf64_phdr),
"::",
stringify!(p_offset)
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).p_vaddr) as usize - ptr as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(elf64_phdr),
"::",
stringify!(p_vaddr)
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).p_paddr) as usize - ptr as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(elf64_phdr),
"::",
stringify!(p_paddr)
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).p_filesz) as usize - ptr as usize },
32usize,
concat!(
"Offset of field: ",
stringify!(elf64_phdr),
"::",
stringify!(p_filesz)
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).p_memsz) as usize - ptr as usize },
40usize,
concat!(
"Offset of field: ",
stringify!(elf64_phdr),
"::",
stringify!(p_memsz)
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).p_align) as usize - ptr as usize },
48usize,
concat!(
"Offset of field: ",
stringify!(elf64_phdr),
"::",
stringify!(p_align)
)
);
}
#[test]
fn bindgen_test_layout_elf64_note() {
const UNINIT: ::std::mem::MaybeUninit<elf64_note> = ::std::mem::MaybeUninit::uninit();
let ptr = UNINIT.as_ptr();
assert_eq!(
::std::mem::size_of::<elf64_note>(),
12usize,
concat!("Size of: ", stringify!(elf64_note))
);
assert_eq!(
::std::mem::align_of::<elf64_note>(),
4usize,
concat!("Alignment of ", stringify!(elf64_note))
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).n_namesz) as usize - ptr as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(elf64_note),
"::",
stringify!(n_namesz)
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).n_descsz) as usize - ptr as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(elf64_note),
"::",
stringify!(n_descsz)
)
);
assert_eq!(
unsafe { ::std::ptr::addr_of!((*ptr).n_type) as usize - ptr as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(elf64_note),
"::",
stringify!(n_type)
)
);
}
}
|
use crate::pubnub::PubNub;
use crate::runtime::Runtime;
use crate::subscription::subscribe_loop::ExitTx as SubscribeLoopExitTx;
use crate::subscription::subscribe_loop_supervisor::{
SubscribeLoopSupervisor, SubscribeLoopSupervisorParams,
};
use crate::transport::Transport;
use futures_util::lock::Mutex;
use std::sync::Arc;
/// # PubNub Client Builder
///
/// Create a [`crate::PubNub`] client using the builder pattern.
/// Optional items can be overridden using this.
#[derive(Clone, Debug)]
pub struct Builder<TTransport = (), TRuntime = ()> {
/// Transport to use for communication.
transport: TTransport,
/// Runtime to use for managing resources.
runtime: TRuntime,
/// Subscription related configuration params.
/// If set, gets a signal when subscribe loop exits.
subscribe_loop_exit_tx: Option<SubscribeLoopExitTx>,
}
impl<TTransport, TRuntime> Builder<TTransport, TRuntime>
where
TTransport: Transport,
TRuntime: Runtime,
{
/// Build the [`PubNub`] client to begin streaming messages.
///
/// # Example
///
/// ```
/// use pubnub_core::mock::{runtime::MockRuntime, transport::MockTransport};
/// use pubnub_core::Builder;
///
/// let transport = MockTransport::new();
/// let runtime = MockRuntime::new();
///
/// let pubnub = Builder::with_components(transport, runtime).build();
/// ```
#[must_use]
pub fn build(self) -> PubNub<TTransport, TRuntime> {
let Self {
transport,
runtime,
subscribe_loop_exit_tx,
} = self;
let subscribe_loop_supervisor_params = SubscribeLoopSupervisorParams {
exit_tx: subscribe_loop_exit_tx,
};
PubNub {
transport,
runtime,
subscribe_loop_supervisor: Arc::new(Mutex::new(SubscribeLoopSupervisor::new(
subscribe_loop_supervisor_params,
))),
}
}
}
impl<TTransport, TRuntime> Builder<TTransport, TRuntime> {
/// Create a new [`Builder`] that can configure a [`PubNub`] client
/// with custom component implementations.
///
/// # Example
///
/// ```
/// use pubnub_core::mock::{runtime::MockRuntime, transport::MockTransport};
/// use pubnub_core::Builder;
///
/// let transport = MockTransport::new();
/// let runtime = MockRuntime::new();
///
/// let pubnub = Builder::with_components(transport, runtime).build();
/// ```
#[must_use]
pub fn with_components(transport: TTransport, runtime: TRuntime) -> Self {
Self {
subscribe_loop_exit_tx: None,
transport,
runtime,
}
}
/// Set the subscribe loop exit tx.
///
/// If set, subscribe loop sends a message to it when it exits.
///
/// # Example
///
/// ```
/// # use pubnub_core::mock::{transport::MockTransport, runtime::MockRuntime};
/// # let transport = MockTransport::new();
/// # let runtime = MockRuntime::new();
/// use pubnub_core::Builder;
///
/// let (tx, _rx) = futures_channel::mpsc::channel(1);
///
/// let pubnub = Builder::with_components(transport, runtime)
/// .subscribe_loop_exit_tx(tx)
/// .build();
/// ```
#[must_use]
pub fn subscribe_loop_exit_tx(mut self, tx: SubscribeLoopExitTx) -> Self {
self.subscribe_loop_exit_tx = Some(tx);
self
}
/// Set the transport to use.
///
/// This allows changing the [`Transport`] type on the builder and,
/// therefore, on the resulting [`PubNub`] client.
#[must_use]
pub fn transport<U: Transport>(self, transport: U) -> Builder<U, TRuntime> {
Builder {
transport,
// Copy the rest of the fields.
runtime: self.runtime,
subscribe_loop_exit_tx: self.subscribe_loop_exit_tx,
}
}
/// Set the runtime to use.
///
/// This allows changing the [`Runtime`] type on the builder and,
/// therefore, on the resulting [`PubNub`] client.
#[must_use]
pub fn runtime<U: Runtime>(self, runtime: U) -> Builder<TTransport, U> {
Builder {
runtime,
// Copy the rest of the fields.
transport: self.transport,
subscribe_loop_exit_tx: self.subscribe_loop_exit_tx,
}
}
}
impl Builder<(), ()> {
/// Create a new [`Builder`] that can configure a [`PubNub`] client.
///
/// # Example
///
/// ```
/// use pubnub_core::mock::{runtime::MockRuntime, transport::MockTransport};
/// use pubnub_core::Builder;
///
/// let transport = MockTransport::new();
/// let runtime = MockRuntime::new();
///
/// let pubnub = Builder::new().transport(transport).runtime(runtime).build();
/// ```
#[must_use]
pub fn new() -> Self {
Self::with_components((), ())
}
}
impl<TTransport, TRuntime> Default for Builder<TTransport, TRuntime>
where
TTransport: Default,
TRuntime: Default,
{
/// Create a new [`Builder`] that can configure a [`PubNub`] client
/// with default components.
#[must_use]
fn default() -> Self {
Self::with_components(TTransport::default(), TRuntime::default())
}
}
|
//! A demonstration of an offchain worker that submits onchain callbacks
#![cfg_attr(not(feature = "std"), no_std)]
#[cfg(test)]
mod tests;
use frame_support::{
debug,
dispatch::DispatchResult, decl_module, decl_storage, decl_event, decl_error,
weights::SimpleDispatchInfo,
};
use core::convert::{TryInto};
use frame_system::{self as system, ensure_signed, ensure_none, offchain};
use sp_core::crypto::KeyTypeId;
use sp_runtime::{
offchain as rt_offchain,
transaction_validity::{
InvalidTransaction, ValidTransaction, TransactionValidity, TransactionSource
},
};
use sp_std::prelude::*;
use sp_std::str as str;
// Because we are parsing json in a `no_std` env, we cannot use `serde_json` library.
// `simple_json2` is a small tool written by our community member to handle json parsing in
// no_std env.
use simple_json2::{ json::{ JsonObject, JsonValue }, parse_json };
/// Defines application identifier for crypto keys of this module.
///
/// Every module that deals with signatures needs to declare its unique identifier for
/// its crypto keys.
/// When offchain worker is signing transactions it's going to request keys of type
/// `KeyTypeId` from the keystore and use the ones it finds to sign the transaction.
/// The keys can be inserted manually via RPC (see `author_insertKey`).
pub const KEY_TYPE: KeyTypeId = KeyTypeId(*b"demo");
pub const NUM_VEC_LEN: usize = 10;
// We are fetching information from github public API about organisation `substrate-developer-hub`.
pub const HTTP_REMOTE_REQUEST_BYTES: &[u8] = b"https://api.github.com/orgs/substrate-developer-hub";
pub const HTTP_HEADER_USER_AGENT: &[u8] = b"jimmychu0807";
/// Based on the above `KeyTypeId` we need to generate a pallet-specific crypto type wrappers.
/// We can use from supported crypto kinds (`sr25519`, `ed25519` and `ecdsa`) and augment
/// the types with this pallet-specific identifier.
pub mod crypto {
use crate::KEY_TYPE;
use sp_runtime::app_crypto::{app_crypto, sr25519};
app_crypto!(sr25519, KEY_TYPE);
}
/// This is the pallet's configuration trait
pub trait Trait: system::Trait {
/// The overarching dispatch call type.
type Call: From<Call<Self>>;
/// The overarching event type.
type Event: From<Event<Self>> + Into<<Self as system::Trait>::Event>;
/// The type to sign and submit transactions.
type SubmitSignedTransaction: offchain::SubmitSignedTransaction<Self, <Self as Trait>::Call>;
/// The type to submit unsigned transactions.
type SubmitUnsignedTransaction: offchain::SubmitUnsignedTransaction<Self, <Self as Trait>::Call>;
}
// Custom data type
#[derive(Debug)]
enum TransactionType {
SignedSubmitNumber,
UnsignedSubmitNumber,
HttpFetching,
None,
}
decl_storage! {
trait Store for Module<T: Trait> as Example {
/// A vector of recently submitted numbers. Should be bounded
Numbers get(fn numbers): Vec<u64>;
}
}
decl_event!(
/// Events generated by the module.
pub enum Event<T> where AccountId = <T as system::Trait>::AccountId {
/// Event generated when a new number is accepted to contribute to the average.
NewNumber(Option<AccountId>, u64),
}
);
decl_error! {
pub enum Error for Module<T: Trait> {
// Error returned when making signed transactions in off-chain worker
SignedSubmitNumberError,
// Error returned when making unsigned transactions in off-chain worker
UnsignedSubmitNumberError,
HttpFetchingError,
JsonParsingError,
}
}
decl_module! {
pub struct Module<T: Trait> for enum Call where origin: T::Origin {
fn deposit_event() = default;
#[weight = SimpleDispatchInfo::default()]
pub fn submit_number_signed(origin, number: u64) -> DispatchResult {
debug::info!("submit_number_signed: {:?}", number);
let who = ensure_signed(origin)?;
Self::append_or_replace_number(Some(who), number)
}
#[weight = SimpleDispatchInfo::default()]
pub fn submit_number_unsigned(origin, _block: T::BlockNumber, number: u64) -> DispatchResult {
debug::info!("submit_number_unsigned: {:?}", number);
let _ = ensure_none(origin)?;
Self::append_or_replace_number(None, number)
}
fn offchain_worker(block_number: T::BlockNumber) {
debug::info!("Entering off-chain workers");
let result = match Self::choose_tx_type(block_number) {
TransactionType::SignedSubmitNumber => Self::signed_submit_number(block_number),
TransactionType::UnsignedSubmitNumber => Self::unsigned_submit_number(block_number),
TransactionType::HttpFetching => Self::fetch_n_parse(block_number),
TransactionType::None => Ok(())
};
if let Err(e) = result { debug::error!("Error: {:?}", e); }
}
}
}
impl<T: Trait> Module<T> {
/// Add a new number to the list.
fn append_or_replace_number(who: Option<T::AccountId>, number: u64) -> DispatchResult {
Numbers::mutate(|numbers| {
// The append or replace logic. The `numbers` vector is at most `NUM_VEC_LEN` long.
let num_len = numbers.len();
if num_len < NUM_VEC_LEN {
numbers.push(number);
} else {
numbers[num_len % NUM_VEC_LEN] = number;
}
// displaying the average
let average = match num_len {
0 => 0,
_ => numbers.iter().fold(0, {|acc, num| acc + num}) / (num_len as u64),
};
debug::info!("Current average of numbers is: {}", average);
});
// Raise the NewNumber event
Self::deposit_event(RawEvent::NewNumber(who, number));
Ok(())
}
fn choose_tx_type(block_number: T::BlockNumber) -> TransactionType {
// Decide what type of transaction to submit based on block number.
// Each block the offchain worker will submit one type of transaction back to the chain.
// First a signed transaction, then an unsigned transaction, then an http fetch and json parsing.
match block_number.try_into().ok().unwrap() % 3 {
0 => TransactionType::SignedSubmitNumber,
1 => TransactionType::UnsignedSubmitNumber,
2 => TransactionType::HttpFetching,
_ => TransactionType::None,
}
}
fn fetch_n_parse(_block_number: T::BlockNumber) -> Result<(), Error<T>> {
let resp_bytes = Self::fetch_from_remote()
.map_err(|e| {
debug::error!("fetch_from_remote error: {:?}", e);
<Error<T>>::HttpFetchingError
})?;
let resp_str = str::from_utf8(&resp_bytes)
.map_err(|_| <Error<T>>::HttpFetchingError)?;
// The json shape is as follow.
// {
// "login":"substrate-developer-hub",
// "blog":"https://substrate.dev",
// ...
// }
debug::info!("{}", resp_str);
let login_bytes = Self::parse_for_value(&resp_str, "login")?;
let blog_bytes = Self::parse_for_value(&resp_str, "blog")?;
debug::info!("login: {}", str::from_utf8(&login_bytes)
.map_err(|_| <Error<T>>::JsonParsingError)?);
debug::info!("blog: {}", str::from_utf8(&blog_bytes)
.map_err(|_| <Error<T>>::JsonParsingError)?);
Ok(())
}
fn fetch_from_remote() -> Result<Vec<u8>, Error<T>> {
let remote_url_bytes = HTTP_REMOTE_REQUEST_BYTES.to_vec();
let user_agent = HTTP_HEADER_USER_AGENT.to_vec();
let remote_url = str::from_utf8(&remote_url_bytes)
.map_err(|_| <Error<T>>::HttpFetchingError)?;
debug::info!("sending request to: {}", remote_url);
// Initiate an external HTTP GET request. This is using high-level wrappers from `sp_runtime`.
let request = rt_offchain::http::Request::get(remote_url);
// Keeping the offchain worker execution time reasonable, so limiting the call to be within 3s.
let timeout = sp_io::offchain::timestamp().add(rt_offchain::Duration::from_millis(3000));
// For github API request, we also need to specify `user-agent` in http request header.
// See: https://developer.github.com/v3/#user-agent-required
let pending = request
.add_header("User-Agent", str::from_utf8(&user_agent)
.map_err(|_| <Error<T>>::HttpFetchingError)?)
.deadline(timeout) // Setting the timeout time
.send() // Sending the request out by the host
.map_err(|_| <Error<T>>::HttpFetchingError)?;
// By default, the http request is async from the runtime perspective. So we are asking the
// runtime to wait here.
// The returning value here is a `Result` of `Result`, so we are unwrapping it twice by two `?`
// ref: https://substrate.dev/rustdocs/master/sp_runtime/offchain/http/struct.PendingRequest.html#method.try_wait
let response = pending.try_wait(timeout)
.map_err(|_| <Error<T>>::HttpFetchingError)?
.map_err(|_| <Error<T>>::HttpFetchingError)?;
if response.code != 200 {
debug::error!("Unexpected http request status code: {}", response.code);
return Err(<Error<T>>::HttpFetchingError);
}
// Next we fully read the response body and collect it to a vector of bytes.
Ok(response.body().collect::<Vec<u8>>())
}
fn parse_for_value(json_str: &str, key: &str) -> Result<Vec<u8>, Error<T>> {
// Parse the whole string into a Json object
let json: JsonValue = parse_json(&json_str)
.map_err(|_| <Error<T>>::JsonParsingError)?;
let json_obj: &JsonObject = json.get_object()
.map_err(|_| <Error<T>>::JsonParsingError)?;
// We iterate through the key and retrieve the (key, value) pair that match the `key`
// parameter.
// `key_val.0` contains the key and `key_val.1` contains the value.
let key_val = json_obj
.iter()
.find(|(k, _)| *k == key.chars().collect::<Vec<char>>())
.ok_or(<Error<T>>::JsonParsingError)?;
// We assume the value is a string, so we use `get_bytes()` to collect them back.
// In a real app, you may need to catch the error and further process it if the value is not
// a string.
key_val.1.get_bytes()
.map_err(|_| <Error<T>>::JsonParsingError)
}
fn signed_submit_number(block_number: T::BlockNumber) -> Result<(), Error<T>> {
use offchain::SubmitSignedTransaction;
if !T::SubmitSignedTransaction::can_sign() {
debug::error!("No local account available");
return Err(<Error<T>>::SignedSubmitNumberError);
}
// We are just submitting the current block number back on-chain
let submission: u64 = block_number.try_into().ok().unwrap() as u64;
let call = Call::submit_number_signed(submission);
// Using `SubmitSignedTransaction` associated type we create and submit a transaction
// representing the call, we've just created.
// Submit signed will return a vector of results for all accounts that were found in the
// local keystore with expected `KEY_TYPE`.
let results = T::SubmitSignedTransaction::submit_signed(call);
for (_acc, res) in &results {
match res {
Ok(()) => { debug::native::info!("off-chain send_signed: acc: {}| number: {}", _acc, submission); },
Err(e) => {
debug::error!("[{:?}] Failed in signed_submit_number: {:?}", _acc, e);
return Err(<Error<T>>::SignedSubmitNumberError);
}
};
}
Ok(())
}
fn unsigned_submit_number(block_number: T::BlockNumber) -> Result<(), Error<T>> {
use offchain::SubmitUnsignedTransaction;
let submission: u64 = block_number.try_into().ok().unwrap() as u64;
// Submitting the current block number back on-chain.
// `blocknumber` and `submission` params are always the same value but in different
// data type. They seem redundant, but in reality they have different purposes.
// `submission` is the number to be recorded back on-chain. `block_number` is checked in
// `validate_unsigned` function so only one `Call::submit_number_unsigned` is accepted in
// each block generation phase.
let call = Call::submit_number_unsigned(block_number, submission);
T::SubmitUnsignedTransaction::submit_unsigned(call).map_err(|e| {
debug::error!("Failed in unsigned_submit_number: {:?}", e);
<Error<T>>::UnsignedSubmitNumberError
})
}
}
impl<T: Trait> frame_support::unsigned::ValidateUnsigned for Module<T> {
type Call = Call<T>;
fn validate_unsigned(
_source: TransactionSource,
call: &Self::Call
) -> TransactionValidity {
if let Call::submit_number_unsigned(block_num, number) = call {
debug::native::info!("off-chain send_unsigned: block_num: {}| number: {}", block_num, number);
Ok(ValidTransaction {
priority: 1 << 20,
requires: vec![],
provides: vec![codec::Encode::encode(&(KEY_TYPE.0, block_num))],
longevity: 3,
propagate: false,
})
} else {
InvalidTransaction::Call.into()
}
}
}
|
use crate::domain::domain::SysRes;
use chrono::NaiveDateTime;
use rbatis::utils::table_util::FatherChildRelationship;
///权限资源表
#[crud_enable(table_name: "sys_res" | table_columns: "id,parent_id,name,permission,path,del")]
#[derive(Clone, Debug)]
pub struct SysResVO {
pub id: Option<String>,
//父id(可空)
pub parent_id: Option<String>,
pub name: Option<String>,
//权限
pub permission: Option<String>,
//前端-菜单路径
pub path: Option<String>,
pub del: Option<i32>,
pub create_date: Option<NaiveDateTime>,
pub childs: Option<Vec<SysResVO>>,
}
impl From<&SysRes> for SysResVO {
fn from(arg: &SysRes) -> Self {
Self {
id: arg.id.clone(),
parent_id: arg.parent_id.clone(),
name: arg.name.clone(),
permission: arg.permission.clone(),
path: arg.path.clone(),
del: arg.del.clone(),
create_date: arg.create_date.clone(),
childs: None,
}
}
}
impl FatherChildRelationship for SysResVO {
fn get_father_id(&self) -> Option<&Self::IdType> {
self.parent_id.as_ref()
}
fn set_childs(&mut self, arg: Vec<Self>) {
self.childs = Option::from(arg);
}
}
|
//! VapourSynth plugins.
use std::ffi::{CStr, CString, NulError};
use std::marker::PhantomData;
use std::ops::Deref;
use std::ptr::NonNull;
use vapoursynth_sys as ffi;
use crate::api::API;
use crate::map::{Map, OwnedMap};
use crate::plugins::{self, FilterFunction};
/// A VapourSynth plugin.
#[derive(Debug, Clone, Copy)]
pub struct Plugin<'core> {
handle: NonNull<ffi::VSPlugin>,
_owner: PhantomData<&'core ()>,
}
unsafe impl<'core> Send for Plugin<'core> {}
unsafe impl<'core> Sync for Plugin<'core> {}
impl<'core> Plugin<'core> {
/// Wraps `handle` in a `Plugin`.
///
/// # Safety
/// The caller must ensure `handle` is valid and API is cached.
#[inline]
pub(crate) unsafe fn from_ptr(handle: *mut ffi::VSPlugin) -> Self {
Self {
handle: NonNull::new_unchecked(handle),
_owner: PhantomData,
}
}
/// Returns a map containing a list of the filters exported by a plugin.
///
/// Keys: the filter names;
///
/// Values: the filter name followed by its argument string, separated by a semicolon.
// TODO: parse the values on the crate side and return a nice struct.
#[inline]
pub fn functions(&self) -> OwnedMap<'core> {
unsafe { OwnedMap::from_ptr(API::get_cached().get_functions(self.handle.as_ptr())) }
}
/// Returns the absolute path to the plugin, including the plugin's file name. This is the real
/// location of the plugin, i.e. there are no symbolic links in the path.
///
/// Path elements are always delimited with forward slashes.
#[cfg(feature = "gte-vapoursynth-api-31")]
#[inline]
pub fn path(&self) -> Option<&'core CStr> {
let ptr = unsafe { API::get_cached().get_plugin_path(self.handle.as_ptr()) };
if ptr.is_null() {
None
} else {
Some(unsafe { CStr::from_ptr(ptr) })
}
}
/// Invokes a filter.
///
/// `invoke()` makes sure the filter has no compat input nodes, checks that the args passed to
/// the filter are consistent with the argument list registered by the plugin that contains the
/// filter, creates the filter, and checks that the filter doesn't return any compat nodes. If
/// everything goes smoothly, the filter will be ready to generate frames after `invoke()`
/// returns.
///
/// Returns a map containing the filter's return value(s). Use `Map::error()` to check if the
/// filter was invoked successfully.
///
/// Most filters will either add an error to the map, or one or more clips with the key `clip`.
/// The exception to this are functions, for example `LoadPlugin`, which doesn't return any
/// clips for obvious reasons.
#[inline]
pub fn invoke(&self, name: &str, args: &Map<'core>) -> Result<OwnedMap<'core>, NulError> {
let name = CString::new(name)?;
Ok(unsafe {
OwnedMap::from_ptr(API::get_cached().invoke(
self.handle.as_ptr(),
name.as_ptr(),
args.deref(),
))
})
}
/// Registers a filter function to be exported by a non-readonly plugin.
#[inline]
pub fn register_function<F: FilterFunction>(&self, filter_function: F) -> Result<(), NulError> {
// TODO: this is almost the same code as plugins::ffi::call_register_function().
let name_cstring = CString::new(filter_function.name())?;
let args_cstring = CString::new(filter_function.args())?;
let data = Box::new(plugins::ffi::FilterFunctionData::<F> {
filter_function,
name: name_cstring,
});
unsafe {
API::get_cached().register_function(
data.name.as_ptr(),
args_cstring.as_ptr(),
plugins::ffi::create::<F>,
Box::into_raw(data) as _,
self.handle.as_ptr(),
);
}
Ok(())
}
}
|
pub
fn new_pixel_buffer(rows: usize, cols: usize) -> Vec<u8> {
vec![0; rows * cols]
}
|
//! Contains a different flavour of the [`Aggregate`] trait,
//! while still maintaining compatibility through [`IntoAggregate`] type.
//!
//! Check out [`optional::Aggregate`](Aggregate) for more information.
use async_trait::async_trait;
/// An [`Option`]-flavoured, [`Aggregate`]-compatible trait
/// to model Aggregates having an optional [`State`](Aggregate::State).
///
/// Use [`as_aggregate`](Aggregate::as_aggregate) to get an
/// [`Aggregate`]-compatible instance of this trait.
#[async_trait]
pub trait Aggregate {
/// Identifier type of the Aggregate.
///
/// Check out [`Aggregate::Id`] for more information.
type Id: Eq;
/// State of the Aggregate.
///
/// Check out [`Aggregate::State`] for more information.
type State;
/// Events produced and supported by the Aggregate.
///
/// Check out [`Aggregate::Event`] for more information.
type Event;
/// Commands supported by the Aggregate.
///
/// Check out [`Aggregate::Command`] for more information.
type Command;
/// Error produced by the the Aggregate while applying
/// [`Event`](Aggregate::Event)s or handling
/// [`Command`](Aggregate::Command)s.
type Error;
/// Applies the specified [`Event`](Aggregate::Event) when the
/// [`State`](Aggregate::State) is empty.
fn apply_first(event: Self::Event) -> Result<Self::State, Self::Error>;
/// Applies the specified [`Event`](Aggregate::Event) on a pre-existing
/// [`State`](Aggregate::State) value.
fn apply_next(state: Self::State, event: Self::Event) -> Result<Self::State, Self::Error>;
/// Handles the specified [`Command`](Aggregate::Command)when the
/// [`State`](Aggregate::State) is empty.
async fn handle_first(
&self,
id: &Self::Id,
command: Self::Command,
) -> Result<Vec<Self::Event>, Self::Error>;
/// Handles the specified [`Command`](Aggregate::Command) on a pre-existing
/// [`State`](Aggregate::State) value.
async fn handle_next(
&self,
id: &Self::Id,
state: &Self::State,
command: Self::Command,
) -> Result<Vec<Self::Event>, Self::Error>;
/// Translates the current [`optional::Aggregate`](Aggregate) instance into
/// a _newtype instance_ compatible with the core
/// [`Aggregate`](eventually_core::aggregate::Aggregate) trait.
#[inline]
fn into_aggregate(self) -> IntoAggregate<Self>
where
Self: Sized,
{
IntoAggregate::from(self)
}
}
/// _Newtype pattern_ to ensure compatibility between
/// [`optional::Aggregate`](Aggregate) trait and the core
/// [`Aggregate`](eventually_core::aggregate::Aggregate) trait.
///
/// ## Usage
///
/// 1. Use `From<Aggregate>` trait implementation:
/// ```text
/// use eventually_util::optional::IntoAggregate;
///
/// let aggregate = IntoAggregate::from(MyOptionalAggregate);
/// ```
/// 2. Use the [`Aggregate::as_aggregate`] method:
/// ```text
/// let aggregate = MyOptionalAggregate.as_aggregate();
/// ```
#[derive(Clone)]
pub struct IntoAggregate<A>(A);
impl<A> From<A> for IntoAggregate<A> {
#[inline]
fn from(value: A) -> Self {
IntoAggregate(value)
}
}
#[async_trait]
impl<A> eventually_core::aggregate::Aggregate for IntoAggregate<A>
where
A: Aggregate,
A: Send + Sync,
A::Id: Send + Sync,
A::Command: Send + Sync,
A::State: Send + Sync,
{
type Id = A::Id;
type State = Option<A::State>;
type Event = A::Event;
type Command = A::Command;
type Error = A::Error;
#[inline]
fn apply(state: Self::State, event: Self::Event) -> Result<Self::State, Self::Error> {
match state {
None => A::apply_first(event).map(Some),
Some(state) => A::apply_next(state, event).map(Some),
}
}
async fn handle(
&self,
id: &Self::Id,
state: &Self::State,
command: Self::Command,
) -> Result<Vec<Self::Event>, Self::Error> {
match state {
None => self.0.handle_first(id, command).await,
Some(state) => self.0.handle_next(id, state, command).await,
}
}
}
|
use vec3::Vec3;
use ray::Ray;
#[derive(Debug)]
pub struct Camera {
pub lower_left_corner: Vec3,
pub horizontal: Vec3,
pub vertical: Vec3,
pub origin: Vec3,
}
impl Camera {
pub fn get_ray(&self, u: f32, v: f32) -> Ray {
Ray {
origin: Vec3::clone(&self.origin),
direction: &self.lower_left_corner + &(&(u * &self.horizontal) + &(v * &self.vertical)),
}
}
}
|
use std::thread;
use std::time::Duration;
mod mutexes;
fn main() {
basic_threads();
threads_with_ownership();
mutexes::basic_mutexes();
mutexes::shared_mutex();
}
fn basic_threads() {
let handle = thread::spawn(|| {
for i in 1..10 {
println!("hi number {} from the spawned thread!", i);
thread::sleep(Duration::from_millis(1));
}
});
for i in 1..5 {
println!("hi number {} from the main thread!", i);
thread::sleep(Duration::from_millis(1));
}
handle.join().unwrap(); // Wait for this thread to end
}
fn threads_with_ownership() {
let v = vec![1, 2, 3];
// You can't borrow v because this thread might outlive v!
// You need to take ownership of v
let handle = thread::spawn(move || {
println!("This is a vector {:?}", v);
});
// Now we can't use v down here because, the other thread has taken ownership of it.
handle.join().unwrap();
// And down here, we can't use v because it will have been cleaned up after going out of scope
// when that other thread finished
}
|
//! # Low level reader for PK files
use super::file::{PKEntry, PKTrailer};
use super::parser;
use crate::sd0;
use crate::sd0::read::SegmentedDecoder;
use nom::{Finish, IResult, Offset};
use std::convert::TryFrom;
use std::error::Error;
use std::fmt;
use std::io::{self, ErrorKind};
use std::io::{BufRead, Read, Seek, SeekFrom};
use std::marker::{Send, Sync};
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
/// Failure when parsing
pub struct ParseError {
/// The structure that failed to parse
structure: &'static str,
/// Address of the error
addr: u64,
/// How far the parser got beyond addr
offset: usize,
/// The nom error kind
code: nom::error::ErrorKind,
}
impl ParseError {
fn map<'r>(
structure: &'static str,
addr: u64,
slice: &'r [u8],
) -> impl FnOnce(nom::error::Error<&'r [u8]>) -> Self {
move |e: nom::error::Error<&'r [u8]>| ParseError {
structure,
addr,
offset: slice.offset(e.input),
code: e.code,
}
}
}
impl std::error::Error for ParseError {}
impl fmt::Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"Failed to parse {} at {} (+{}) with code {:?}",
self.structure, self.addr, self.offset, self.code
)
}
}
impl From<ParseError> for io::Error {
fn from(error: ParseError) -> Self {
io::Error::new(io::ErrorKind::Other, error)
}
}
/// A low level pack file reader
pub struct PackFile<'a, T> {
inner: &'a mut T,
}
/// A low level random access to the entries
pub struct PackEntryAccessor<'b, 'a, T> {
base_addr: u32,
count: u32,
file: &'b mut PackFile<'a, T>,
}
/// A low level read for a file
pub struct PackStreamReader<'b, 'a, T> {
base_addr: u32,
offset: u32,
size: u32,
file: &'b mut PackFile<'a, T>,
}
trait Readable {
type Buf: AsMut<[u8]> + AsRef<[u8]>;
type Output: Sized;
const NAME: &'static str;
fn make() -> Self::Buf;
fn parse(input: &[u8]) -> IResult<&[u8], Self::Output>;
}
struct MagicBytes;
macro_rules! readable_impl {
($ty:ty ; $parser:ident([u8;$size:literal]) -> $out:ty) => {
impl Readable for $ty {
type Buf = [u8; $size];
type Output = $out;
const NAME: &'static str = std::stringify!($ty);
fn make() -> Self::Buf {
[0; $size]
}
fn parse(input: &[u8]) -> IResult<&[u8], Self::Output> {
parser::$parser(input)
}
}
};
($ty:ty ; $parser:ident([u8;$size:literal])) => {
readable_impl!($ty; $parser([u8;$size]) -> $ty);
}
}
readable_impl!(MagicBytes; parse_pk_magic([u8;4]) -> ());
readable_impl!(PKTrailer; parse_pk_trailer([u8;8]));
readable_impl!(PKEntry; parse_pk_entry([u8;100]));
fn read_value<V: Readable, R: Read + Seek>(reader: &mut R, addr: u64) -> io::Result<V::Output> {
let mut bytes: V::Buf = V::make();
reader.read_exact(bytes.as_mut())?;
let (_, value) = V::parse(bytes.as_ref()).finish().map_err(ParseError::map(
V::NAME,
addr,
bytes.as_ref(),
))?;
Ok(value)
}
impl<'a, T> PackFile<'a, T>
where
T: Seek + BufRead,
{
/// Open a file from a stream
pub fn open<'b: 'a>(inner: &'b mut T) -> Self {
PackFile { inner }
}
/// Check for the magic bytes at the beginning of the file
pub fn check_magic(&mut self) -> io::Result<()> {
self.inner.seek(SeekFrom::Start(0))?;
read_value::<MagicBytes, _>(&mut self.inner, 0)
}
/// Load the header from the end of the file
pub fn get_header(&mut self) -> io::Result<PKTrailer> {
let addr = self.inner.seek(SeekFrom::End(-8))?;
read_value::<PKTrailer, _>(&mut self.inner, addr)
}
/// Load the header from the end of the file
pub fn get_entry(&mut self, addr: u32) -> io::Result<PKEntry> {
let addr = u64::from(addr);
self.inner.seek(SeekFrom::Start(addr))?;
read_value::<PKEntry, _>(&mut self.inner, addr)
}
/// Get an random access wrapper for the entries
pub fn get_entry_accessor<'b>(
&'b mut self,
addr: u32,
) -> io::Result<PackEntryAccessor<'b, 'a, T>> {
let mut count_bytes: [u8; 4] = [0; 4];
self.inner.seek(SeekFrom::Start(u64::from(addr)))?;
self.inner.read_exact(&mut count_bytes)?;
let count = u32::from_le_bytes(count_bytes);
Ok(PackEntryAccessor::<'b, 'a> {
base_addr: addr + 4,
count,
file: self,
})
}
/// Get a list of all entries
pub fn get_entry_list(&mut self, addr: u32) -> io::Result<Vec<PKEntry>> {
let mut bytes: Vec<u8> = Vec::new();
let addr = self.inner.seek(SeekFrom::Start(u64::from(addr)))?;
self.inner.read_to_end(&mut bytes)?;
let (_rest, entry_list) = parser::parse_pk_entry_list(&bytes)
.finish()
.map_err(ParseError::map("Vec<PKEntry>", addr, &bytes))?;
Ok(entry_list)
}
/// Get a boxed reader for the file stream
pub fn get_file_stream<'b>(&'b mut self, entry: PKEntry) -> PackStreamReader<'b, 'a, T> {
let base_addr = entry.file_data_addr;
let size = if (entry.is_compressed & 0xff) == 0 {
entry.orig_file_size
} else {
entry.compr_file_size
//entry.orig_file_size
};
//println!("{:?}", entry);
PackStreamReader::<'b, 'a, T> {
file: self,
base_addr,
offset: 0,
size,
}
}
/// Get some object with a read trait representing the data
pub fn get_file_data<'b>(
&'b mut self,
entry: PKEntry,
) -> std::result::Result<PackDataStream<'b, 'a, T>, sd0::read::Error> {
let is_compr = (entry.is_compressed & 0xff) > 0;
let file_stream = self.get_file_stream(entry);
Ok(if is_compr {
let compr_stream = SegmentedDecoder::new(file_stream)?;
PackDataStream::Compressed(compr_stream)
} else {
PackDataStream::Plain(file_stream)
})
}
}
/// A stream that is either compressed or plain
pub enum PackDataStream<'b, 'a, T> {
/// The stream is *not* sd0 compressed
Plain(PackStreamReader<'b, 'a, T>),
/// The stream *is* sd0 compressed
Compressed(SegmentedDecoder<PackStreamReader<'b, 'a, T>>),
}
impl<'b, 'a, T: Seek + BufRead> std::io::Read for PackDataStream<'b, 'a, T> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
match self {
Self::Plain(inner) => inner.read(buf),
Self::Compressed(inner) => inner.read(buf),
}
}
}
impl<'b, 'a, T> PackEntryAccessor<'b, 'a, T>
where
T: Seek + BufRead,
{
/// Get a reference to the underlying file
pub fn get_file_mut(&'b mut self) -> &'b mut PackFile<'a, T> {
self.file
}
/// Get the specified entry if inside of count
pub fn get_entry(&mut self, index: i32) -> Option<io::Result<PKEntry>> {
if index >= 0 {
Some(self.file.get_entry(self.base_addr + (index as u32) * 100))
} else {
None
}
}
/// Get the root entrys if not empty
pub fn get_root_entry(&mut self) -> Option<io::Result<PKEntry>> {
self.get_entry((self.count / 2) as i32)
}
/// Return the number of entries
pub fn get_count(&self) -> u32 {
self.count
}
}
fn other_io_err<E>(e: E) -> io::Error
where
E: Into<Box<dyn Error + Send + Sync>>,
{
io::Error::new(ErrorKind::Other, e)
}
impl<'b, 'a, T> Read for PackStreamReader<'b, 'a, T>
where
T: Seek + BufRead,
{
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
let pos = u64::from(self.base_addr + self.offset);
self.file.inner.seek(SeekFrom::Start(pos))?;
let buf_len = buf.len();
//println!("P-BUF: {}", buf_len);
let offset = usize::try_from(self.offset).map_err(other_io_err)?;
//println!("P-OFF: {}", offset);
let size = usize::try_from(self.size).map_err(other_io_err)?;
//println!("P-SIZ: {}", size);
if offset + buf_len > size {
let max = size - offset;
//println!("P-MAX: {}", max);
self.file.inner.read(&mut buf[..max])
} else {
self.file.inner.read(buf)
}
.and_then(|n| {
//println!("P-RES: {}", n);
self.offset += u32::try_from(n).map_err(other_io_err)?;
Ok(n)
})
}
}
|
fn read_line() -> String {
let mut line = String::new();
std::io::stdin().read_line(&mut line).unwrap();
line.trim_end().to_owned()
}
fn main() {
let a: Vec<i64> = read_line()
.split_whitespace()
.map(|v| v.parse().unwrap())
.collect();
println!("{}", a.iter().min().unwrap());
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.